]> gcc.gnu.org Git - gcc.git/blob - gcc/combine.c
(va_arg): Copy both definitions from gstdarg.h.
[gcc.git] / gcc / combine.c
1 /* Optimize by combining instructions for GNU compiler.
2 Copyright (C) 1987, 1988, 1992 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21 /* This module is essentially the "combiner" phase of the U. of Arizona
22 Portable Optimizer, but redone to work on our list-structured
23 representation for RTL instead of their string representation.
24
25 The LOG_LINKS of each insn identify the most recent assignment
26 to each REG used in the insn. It is a list of previous insns,
27 each of which contains a SET for a REG that is used in this insn
28 and not used or set in between. LOG_LINKs never cross basic blocks.
29 They were set up by the preceding pass (lifetime analysis).
30
31 We try to combine each pair of insns joined by a logical link.
32 We also try to combine triples of insns A, B and C when
33 C has a link back to B and B has a link back to A.
34
35 LOG_LINKS does not have links for use of the CC0. They don't
36 need to, because the insn that sets the CC0 is always immediately
37 before the insn that tests it. So we always regard a branch
38 insn as having a logical link to the preceding insn. The same is true
39 for an insn explicitly using CC0.
40
41 We check (with use_crosses_set_p) to avoid combining in such a way
42 as to move a computation to a place where its value would be different.
43
44 Combination is done by mathematically substituting the previous
45 insn(s) values for the regs they set into the expressions in
46 the later insns that refer to these regs. If the result is a valid insn
47 for our target machine, according to the machine description,
48 we install it, delete the earlier insns, and update the data flow
49 information (LOG_LINKS and REG_NOTES) for what we did.
50
51 There are a few exceptions where the dataflow information created by
52 flow.c aren't completely updated:
53
54 - reg_live_length is not updated
55 - reg_n_refs is not adjusted in the rare case when a register is
56 no longer required in a computation
57 - there are extremely rare cases (see distribute_regnotes) when a
58 REG_DEAD note is lost
59 - a LOG_LINKS entry that refers to an insn with multiple SETs may be
60 removed because there is no way to know which register it was
61 linking
62
63 To simplify substitution, we combine only when the earlier insn(s)
64 consist of only a single assignment. To simplify updating afterward,
65 we never combine when a subroutine call appears in the middle.
66
67 Since we do not represent assignments to CC0 explicitly except when that
68 is all an insn does, there is no LOG_LINKS entry in an insn that uses
69 the condition code for the insn that set the condition code.
70 Fortunately, these two insns must be consecutive.
71 Therefore, every JUMP_INSN is taken to have an implicit logical link
72 to the preceding insn. This is not quite right, since non-jumps can
73 also use the condition code; but in practice such insns would not
74 combine anyway. */
75
76 #include "config.h"
77 #include "gvarargs.h"
78 #include "rtl.h"
79 #include "flags.h"
80 #include "regs.h"
81 #include "expr.h"
82 #include "basic-block.h"
83 #include "insn-config.h"
84 #include "insn-flags.h"
85 #include "insn-codes.h"
86 #include "insn-attr.h"
87 #include "recog.h"
88 #include "real.h"
89 #include <stdio.h>
90
91 /* It is not safe to use ordinary gen_lowpart in combine.
92 Use gen_lowpart_for_combine instead. See comments there. */
93 #define gen_lowpart dont_use_gen_lowpart_you_dummy
94
95 /* Number of attempts to combine instructions in this function. */
96
97 static int combine_attempts;
98
99 /* Number of attempts that got as far as substitution in this function. */
100
101 static int combine_merges;
102
103 /* Number of instructions combined with added SETs in this function. */
104
105 static int combine_extras;
106
107 /* Number of instructions combined in this function. */
108
109 static int combine_successes;
110
111 /* Totals over entire compilation. */
112
113 static int total_attempts, total_merges, total_extras, total_successes;
114 \f
115 /* Vector mapping INSN_UIDs to cuids.
116 The cuids are like uids but increase monotonically always.
117 Combine always uses cuids so that it can compare them.
118 But actually renumbering the uids, which we used to do,
119 proves to be a bad idea because it makes it hard to compare
120 the dumps produced by earlier passes with those from later passes. */
121
122 static int *uid_cuid;
123
124 /* Get the cuid of an insn. */
125
126 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
127
128 /* Maximum register number, which is the size of the tables below. */
129
130 static int combine_max_regno;
131
132 /* Record last point of death of (hard or pseudo) register n. */
133
134 static rtx *reg_last_death;
135
136 /* Record last point of modification of (hard or pseudo) register n. */
137
138 static rtx *reg_last_set;
139
140 /* Record the cuid of the last insn that invalidated memory
141 (anything that writes memory, and subroutine calls, but not pushes). */
142
143 static int mem_last_set;
144
145 /* Record the cuid of the last CALL_INSN
146 so we can tell whether a potential combination crosses any calls. */
147
148 static int last_call_cuid;
149
150 /* When `subst' is called, this is the insn that is being modified
151 (by combining in a previous insn). The PATTERN of this insn
152 is still the old pattern partially modified and it should not be
153 looked at, but this may be used to examine the successors of the insn
154 to judge whether a simplification is valid. */
155
156 static rtx subst_insn;
157
158 /* This is the lowest CUID that `subst' is currently dealing with.
159 get_last_value will not return a value if the register was set at or
160 after this CUID. If not for this mechanism, we could get confused if
161 I2 or I1 in try_combine were an insn that used the old value of a register
162 to obtain a new value. In that case, we might erroneously get the
163 new value of the register when we wanted the old one. */
164
165 static int subst_low_cuid;
166
167 /* This is the value of undobuf.num_undo when we started processing this
168 substitution. This will prevent gen_rtx_combine from re-used a piece
169 from the previous expression. Doing so can produce circular rtl
170 structures. */
171
172 static int previous_num_undos;
173 \f
174 /* The next group of arrays allows the recording of the last value assigned
175 to (hard or pseudo) register n. We use this information to see if a
176 operation being processed is redundant given a prior operation performed
177 on the register. For example, an `and' with a constant is redundant if
178 all the zero bits are already known to be turned off.
179
180 We use an approach similar to that used by cse, but change it in the
181 following ways:
182
183 (1) We do not want to reinitialize at each label.
184 (2) It is useful, but not critical, to know the actual value assigned
185 to a register. Often just its form is helpful.
186
187 Therefore, we maintain the following arrays:
188
189 reg_last_set_value the last value assigned
190 reg_last_set_label records the value of label_tick when the
191 register was assigned
192 reg_last_set_table_tick records the value of label_tick when a
193 value using the register is assigned
194 reg_last_set_invalid set to non-zero when it is not valid
195 to use the value of this register in some
196 register's value
197
198 To understand the usage of these tables, it is important to understand
199 the distinction between the value in reg_last_set_value being valid
200 and the register being validly contained in some other expression in the
201 table.
202
203 Entry I in reg_last_set_value is valid if it is non-zero, and either
204 reg_n_sets[i] is 1 or reg_last_set_label[i] == label_tick.
205
206 Register I may validly appear in any expression returned for the value
207 of another register if reg_n_sets[i] is 1. It may also appear in the
208 value for register J if reg_last_set_label[i] < reg_last_set_label[j] or
209 reg_last_set_invalid[j] is zero.
210
211 If an expression is found in the table containing a register which may
212 not validly appear in an expression, the register is replaced by
213 something that won't match, (clobber (const_int 0)).
214
215 reg_last_set_invalid[i] is set non-zero when register I is being assigned
216 to and reg_last_set_table_tick[i] == label_tick. */
217
218 /* Record last value assigned to (hard or pseudo) register n. */
219
220 static rtx *reg_last_set_value;
221
222 /* Record the value of label_tick when the value for register n is placed in
223 reg_last_set_value[n]. */
224
225 static short *reg_last_set_label;
226
227 /* Record the value of label_tick when an expression involving register n
228 is placed in reg_last_set_value. */
229
230 static short *reg_last_set_table_tick;
231
232 /* Set non-zero if references to register n in expressions should not be
233 used. */
234
235 static char *reg_last_set_invalid;
236
237 /* Incremented for each label. */
238
239 static short label_tick;
240
241 /* Some registers that are set more than once and used in more than one
242 basic block are nevertheless always set in similar ways. For example,
243 a QImode register may be loaded from memory in two places on a machine
244 where byte loads zero extend.
245
246 We record in the following array what we know about the significant
247 bits of a register, specifically which bits are known to be zero.
248
249 If an entry is zero, it means that we don't know anything special. */
250
251 static HOST_WIDE_INT *reg_significant;
252
253 /* Mode used to compute significance in reg_significant. It is the largest
254 integer mode that can fit in HOST_BITS_PER_WIDE_INT. */
255
256 static enum machine_mode significant_mode;
257
258 /* Nonzero if we know that a register has some leading bits that are always
259 equal to the sign bit. */
260
261 static char *reg_sign_bit_copies;
262
263 /* Nonzero when reg_significant and reg_sign_bit_copies can be safely used.
264 It is zero while computing them and after combine has completed. This
265 former test prevents propagating values based on previously set values,
266 which can be incorrect if a variable is modified in a loop. */
267
268 static int significant_valid;
269 \f
270 /* Record one modification to rtl structure
271 to be undone by storing old_contents into *where.
272 is_int is 1 if the contents are an int. */
273
274 struct undo
275 {
276 int is_int;
277 union {rtx rtx; int i;} old_contents;
278 union {rtx *rtx; int *i;} where;
279 };
280
281 /* Record a bunch of changes to be undone, up to MAX_UNDO of them.
282 num_undo says how many are currently recorded.
283
284 storage is nonzero if we must undo the allocation of new storage.
285 The value of storage is what to pass to obfree.
286
287 other_insn is nonzero if we have modified some other insn in the process
288 of working on subst_insn. It must be verified too. */
289
290 #define MAX_UNDO 50
291
292 struct undobuf
293 {
294 int num_undo;
295 char *storage;
296 struct undo undo[MAX_UNDO];
297 rtx other_insn;
298 };
299
300 static struct undobuf undobuf;
301
302 /* Substitute NEWVAL, an rtx expression, into INTO, a place in some
303 insn. The substitution can be undone by undo_all. If INTO is already
304 set to NEWVAL, do not record this change. Because computing NEWVAL might
305 also call SUBST, we have to compute it before we put anything into
306 the undo table. */
307
308 #define SUBST(INTO, NEWVAL) \
309 do { rtx _new = (NEWVAL); \
310 if (undobuf.num_undo < MAX_UNDO) \
311 { \
312 undobuf.undo[undobuf.num_undo].is_int = 0; \
313 undobuf.undo[undobuf.num_undo].where.rtx = &INTO; \
314 undobuf.undo[undobuf.num_undo].old_contents.rtx = INTO; \
315 INTO = _new; \
316 if (undobuf.undo[undobuf.num_undo].old_contents.rtx != INTO) \
317 undobuf.num_undo++; \
318 } \
319 } while (0)
320
321 /* Similar to SUBST, but NEWVAL is an int. INTO will normally be an XINT
322 expression.
323 Note that substitution for the value of a CONST_INT is not safe. */
324
325 #define SUBST_INT(INTO, NEWVAL) \
326 do { if (undobuf.num_undo < MAX_UNDO) \
327 { \
328 undobuf.undo[undobuf.num_undo].is_int = 1; \
329 undobuf.undo[undobuf.num_undo].where.i = (int *) &INTO; \
330 undobuf.undo[undobuf.num_undo].old_contents.i = INTO; \
331 INTO = NEWVAL; \
332 if (undobuf.undo[undobuf.num_undo].old_contents.i != INTO) \
333 undobuf.num_undo++; \
334 } \
335 } while (0)
336
337 /* Number of times the pseudo being substituted for
338 was found and replaced. */
339
340 static int n_occurrences;
341
342 static void set_significant ();
343 static void move_deaths ();
344 rtx remove_death ();
345 static void record_value_for_reg ();
346 static void record_dead_and_set_regs ();
347 static int use_crosses_set_p ();
348 static rtx try_combine ();
349 static rtx *find_split_point ();
350 static rtx subst ();
351 static void undo_all ();
352 static int reg_dead_at_p ();
353 static rtx expand_compound_operation ();
354 static rtx expand_field_assignment ();
355 static rtx make_extraction ();
356 static int get_pos_from_mask ();
357 static rtx force_to_mode ();
358 static rtx known_cond ();
359 static rtx make_field_assignment ();
360 static rtx make_compound_operation ();
361 static rtx apply_distributive_law ();
362 static rtx simplify_and_const_int ();
363 static unsigned HOST_WIDE_INT significant_bits ();
364 static int num_sign_bit_copies ();
365 static int merge_outer_ops ();
366 static rtx simplify_shift_const ();
367 static int recog_for_combine ();
368 static rtx gen_lowpart_for_combine ();
369 static rtx gen_rtx_combine ();
370 static rtx gen_binary ();
371 static rtx gen_unary ();
372 static enum rtx_code simplify_comparison ();
373 static int reversible_comparison_p ();
374 static int get_last_value_validate ();
375 static rtx get_last_value ();
376 static void distribute_notes ();
377 static void distribute_links ();
378 \f
379 /* Main entry point for combiner. F is the first insn of the function.
380 NREGS is the first unused pseudo-reg number. */
381
382 void
383 combine_instructions (f, nregs)
384 rtx f;
385 int nregs;
386 {
387 register rtx insn, next, prev;
388 register int i;
389 register rtx links, nextlinks;
390
391 combine_attempts = 0;
392 combine_merges = 0;
393 combine_extras = 0;
394 combine_successes = 0;
395
396 combine_max_regno = nregs;
397
398 reg_last_death = (rtx *) alloca (nregs * sizeof (rtx));
399 reg_last_set = (rtx *) alloca (nregs * sizeof (rtx));
400 reg_last_set_value = (rtx *) alloca (nregs * sizeof (rtx));
401 reg_last_set_table_tick = (short *) alloca (nregs * sizeof (short));
402 reg_last_set_label = (short *) alloca (nregs * sizeof (short));
403 reg_last_set_invalid = (char *) alloca (nregs * sizeof (char));
404 reg_significant = (HOST_WIDE_INT *) alloca (nregs * sizeof (HOST_WIDE_INT));
405 reg_sign_bit_copies = (char *) alloca (nregs * sizeof (char));
406
407 bzero (reg_last_death, nregs * sizeof (rtx));
408 bzero (reg_last_set, nregs * sizeof (rtx));
409 bzero (reg_last_set_value, nregs * sizeof (rtx));
410 bzero (reg_last_set_table_tick, nregs * sizeof (short));
411 bzero (reg_last_set_invalid, nregs * sizeof (char));
412 bzero (reg_significant, nregs * sizeof (HOST_WIDE_INT));
413 bzero (reg_sign_bit_copies, nregs * sizeof (char));
414
415 init_recog_no_volatile ();
416
417 /* Compute maximum uid value so uid_cuid can be allocated. */
418
419 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
420 if (INSN_UID (insn) > i)
421 i = INSN_UID (insn);
422
423 uid_cuid = (int *) alloca ((i + 1) * sizeof (int));
424
425 significant_mode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
426
427 /* Don't use reg_significant when computing it. This can cause problems
428 when, for example, we have j <<= 1 in a loop. */
429
430 significant_valid = 0;
431
432 /* Compute the mapping from uids to cuids.
433 Cuids are numbers assigned to insns, like uids,
434 except that cuids increase monotonically through the code.
435
436 Scan all SETs and see if we can deduce anything about what
437 bits are significant for some registers. */
438
439 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
440 {
441 INSN_CUID (insn) = ++i;
442 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
443 note_stores (PATTERN (insn), set_significant);
444 }
445
446 significant_valid = 1;
447
448 /* Now scan all the insns in forward order. */
449
450 label_tick = 1;
451 last_call_cuid = 0;
452 mem_last_set = 0;
453
454 for (insn = f; insn; insn = next ? next : NEXT_INSN (insn))
455 {
456 next = 0;
457
458 if (GET_CODE (insn) == CODE_LABEL)
459 label_tick++;
460
461 else if (GET_CODE (insn) == INSN
462 || GET_CODE (insn) == CALL_INSN
463 || GET_CODE (insn) == JUMP_INSN)
464 {
465 /* Try this insn with each insn it links back to. */
466
467 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
468 if ((next = try_combine (insn, XEXP (links, 0), NULL_RTX)) != 0)
469 goto retry;
470
471 /* Try each sequence of three linked insns ending with this one. */
472
473 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
474 for (nextlinks = LOG_LINKS (XEXP (links, 0)); nextlinks;
475 nextlinks = XEXP (nextlinks, 1))
476 if ((next = try_combine (insn, XEXP (links, 0),
477 XEXP (nextlinks, 0))) != 0)
478 goto retry;
479
480 #ifdef HAVE_cc0
481 /* Try to combine a jump insn that uses CC0
482 with a preceding insn that sets CC0, and maybe with its
483 logical predecessor as well.
484 This is how we make decrement-and-branch insns.
485 We need this special code because data flow connections
486 via CC0 do not get entered in LOG_LINKS. */
487
488 if (GET_CODE (insn) == JUMP_INSN
489 && (prev = prev_nonnote_insn (insn)) != 0
490 && GET_CODE (prev) == INSN
491 && sets_cc0_p (PATTERN (prev)))
492 {
493 if ((next = try_combine (insn, prev, NULL_RTX)) != 0)
494 goto retry;
495
496 for (nextlinks = LOG_LINKS (prev); nextlinks;
497 nextlinks = XEXP (nextlinks, 1))
498 if ((next = try_combine (insn, prev,
499 XEXP (nextlinks, 0))) != 0)
500 goto retry;
501 }
502
503 /* Do the same for an insn that explicitly references CC0. */
504 if (GET_CODE (insn) == INSN
505 && (prev = prev_nonnote_insn (insn)) != 0
506 && GET_CODE (prev) == INSN
507 && sets_cc0_p (PATTERN (prev))
508 && GET_CODE (PATTERN (insn)) == SET
509 && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (insn))))
510 {
511 if ((next = try_combine (insn, prev, NULL_RTX)) != 0)
512 goto retry;
513
514 for (nextlinks = LOG_LINKS (prev); nextlinks;
515 nextlinks = XEXP (nextlinks, 1))
516 if ((next = try_combine (insn, prev,
517 XEXP (nextlinks, 0))) != 0)
518 goto retry;
519 }
520
521 /* Finally, see if any of the insns that this insn links to
522 explicitly references CC0. If so, try this insn, that insn,
523 and its predecessor if it sets CC0. */
524 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
525 if (GET_CODE (XEXP (links, 0)) == INSN
526 && GET_CODE (PATTERN (XEXP (links, 0))) == SET
527 && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (XEXP (links, 0))))
528 && (prev = prev_nonnote_insn (XEXP (links, 0))) != 0
529 && GET_CODE (prev) == INSN
530 && sets_cc0_p (PATTERN (prev))
531 && (next = try_combine (insn, XEXP (links, 0), prev)) != 0)
532 goto retry;
533 #endif
534
535 /* Try combining an insn with two different insns whose results it
536 uses. */
537 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
538 for (nextlinks = XEXP (links, 1); nextlinks;
539 nextlinks = XEXP (nextlinks, 1))
540 if ((next = try_combine (insn, XEXP (links, 0),
541 XEXP (nextlinks, 0))) != 0)
542 goto retry;
543
544 if (GET_CODE (insn) != NOTE)
545 record_dead_and_set_regs (insn);
546
547 retry:
548 ;
549 }
550 }
551
552 total_attempts += combine_attempts;
553 total_merges += combine_merges;
554 total_extras += combine_extras;
555 total_successes += combine_successes;
556
557 significant_valid = 0;
558 }
559 \f
560 /* Called via note_stores. If X is a pseudo that is used in more than
561 one basic block, is narrower that HOST_BITS_PER_WIDE_INT, and is being
562 set, record what bits are significant. If we are clobbering X,
563 ignore this "set" because the clobbered value won't be used.
564
565 If we are setting only a portion of X and we can't figure out what
566 portion, assume all bits will be used since we don't know what will
567 be happening.
568
569 Similarly, set how many bits of X are known to be copies of the sign bit
570 at all locations in the function. This is the smallest number implied
571 by any set of X. */
572
573 static void
574 set_significant (x, set)
575 rtx x;
576 rtx set;
577 {
578 int num;
579
580 if (GET_CODE (x) == REG
581 && REGNO (x) >= FIRST_PSEUDO_REGISTER
582 && reg_n_sets[REGNO (x)] > 1
583 && reg_basic_block[REGNO (x)] < 0
584 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
585 {
586 if (GET_CODE (set) == CLOBBER)
587 return;
588
589 /* If this is a complex assignment, see if we can convert it into a
590 simple assignment. */
591 set = expand_field_assignment (set);
592 if (SET_DEST (set) == x)
593 {
594 reg_significant[REGNO (x)]
595 |= significant_bits (SET_SRC (set), significant_mode);
596 num = num_sign_bit_copies (SET_SRC (set), GET_MODE (x));
597 if (reg_sign_bit_copies[REGNO (x)] == 0
598 || reg_sign_bit_copies[REGNO (x)] > num)
599 reg_sign_bit_copies[REGNO (x)] = num;
600 }
601 else
602 {
603 reg_significant[REGNO (x)] = GET_MODE_MASK (GET_MODE (x));
604 reg_sign_bit_copies[REGNO (x)] = 0;
605 }
606 }
607 }
608 \f
609 /* See if INSN can be combined into I3. PRED and SUCC are optionally
610 insns that were previously combined into I3 or that will be combined
611 into the merger of INSN and I3.
612
613 Return 0 if the combination is not allowed for any reason.
614
615 If the combination is allowed, *PDEST will be set to the single
616 destination of INSN and *PSRC to the single source, and this function
617 will return 1. */
618
619 static int
620 can_combine_p (insn, i3, pred, succ, pdest, psrc)
621 rtx insn;
622 rtx i3;
623 rtx pred, succ;
624 rtx *pdest, *psrc;
625 {
626 int i;
627 rtx set = 0, src, dest;
628 rtx p, link;
629 int all_adjacent = (succ ? (next_active_insn (insn) == succ
630 && next_active_insn (succ) == i3)
631 : next_active_insn (insn) == i3);
632
633 /* Can combine only if previous insn is a SET of a REG, a SUBREG or CC0.
634 or a PARALLEL consisting of such a SET and CLOBBERs.
635
636 If INSN has CLOBBER parallel parts, ignore them for our processing.
637 By definition, these happen during the execution of the insn. When it
638 is merged with another insn, all bets are off. If they are, in fact,
639 needed and aren't also supplied in I3, they may be added by
640 recog_for_combine. Otherwise, it won't match.
641
642 We can also ignore a SET whose SET_DEST is mentioned in a REG_UNUSED
643 note.
644
645 Get the source and destination of INSN. If more than one, can't
646 combine. */
647
648 if (GET_CODE (PATTERN (insn)) == SET)
649 set = PATTERN (insn);
650 else if (GET_CODE (PATTERN (insn)) == PARALLEL
651 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
652 {
653 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
654 {
655 rtx elt = XVECEXP (PATTERN (insn), 0, i);
656
657 switch (GET_CODE (elt))
658 {
659 /* We can ignore CLOBBERs. */
660 case CLOBBER:
661 break;
662
663 case SET:
664 /* Ignore SETs whose result isn't used but not those that
665 have side-effects. */
666 if (find_reg_note (insn, REG_UNUSED, SET_DEST (elt))
667 && ! side_effects_p (elt))
668 break;
669
670 /* If we have already found a SET, this is a second one and
671 so we cannot combine with this insn. */
672 if (set)
673 return 0;
674
675 set = elt;
676 break;
677
678 default:
679 /* Anything else means we can't combine. */
680 return 0;
681 }
682 }
683
684 if (set == 0
685 /* If SET_SRC is an ASM_OPERANDS we can't throw away these CLOBBERs,
686 so don't do anything with it. */
687 || GET_CODE (SET_SRC (set)) == ASM_OPERANDS)
688 return 0;
689 }
690 else
691 return 0;
692
693 if (set == 0)
694 return 0;
695
696 set = expand_field_assignment (set);
697 src = SET_SRC (set), dest = SET_DEST (set);
698
699 /* Don't eliminate a store in the stack pointer. */
700 if (dest == stack_pointer_rtx
701 /* Don't install a subreg involving two modes not tieable.
702 It can worsen register allocation, and can even make invalid reload
703 insns, since the reg inside may need to be copied from in the
704 outside mode, and that may be invalid if it is an fp reg copied in
705 integer mode. As a special exception, we can allow this if
706 I3 is simply copying DEST, a REG, to CC0. */
707 || (GET_CODE (src) == SUBREG
708 && ! MODES_TIEABLE_P (GET_MODE (src), GET_MODE (SUBREG_REG (src)))
709 #ifdef HAVE_cc0
710 && ! (GET_CODE (i3) == INSN && GET_CODE (PATTERN (i3)) == SET
711 && SET_DEST (PATTERN (i3)) == cc0_rtx
712 && GET_CODE (dest) == REG && dest == SET_SRC (PATTERN (i3)))
713 #endif
714 )
715 /* If we couldn't eliminate a field assignment, we can't combine. */
716 || GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == STRICT_LOW_PART
717 /* Don't combine with an insn that sets a register to itself if it has
718 a REG_EQUAL note. This may be part of a REG_NO_CONFLICT sequence. */
719 || (rtx_equal_p (src, dest) && find_reg_note (insn, REG_EQUAL, NULL_RTX))
720 /* Can't merge a function call. */
721 || GET_CODE (src) == CALL
722 /* Don't substitute into an incremented register. */
723 || FIND_REG_INC_NOTE (i3, dest)
724 || (succ && FIND_REG_INC_NOTE (succ, dest))
725 /* Don't combine the end of a libcall into anything. */
726 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
727 /* Make sure that DEST is not used after SUCC but before I3. */
728 || (succ && ! all_adjacent
729 && reg_used_between_p (dest, succ, i3))
730 /* Make sure that the value that is to be substituted for the register
731 does not use any registers whose values alter in between. However,
732 If the insns are adjacent, a use can't cross a set even though we
733 think it might (this can happen for a sequence of insns each setting
734 the same destination; reg_last_set of that register might point to
735 a NOTE). Also, don't move a volatile asm across any other insns. */
736 || (! all_adjacent
737 && (use_crosses_set_p (src, INSN_CUID (insn))
738 || (GET_CODE (src) == ASM_OPERANDS && MEM_VOLATILE_P (src))))
739 /* If there is a REG_NO_CONFLICT note for DEST in I3 or SUCC, we get
740 better register allocation by not doing the combine. */
741 || find_reg_note (i3, REG_NO_CONFLICT, dest)
742 || (succ && find_reg_note (succ, REG_NO_CONFLICT, dest))
743 /* Don't combine across a CALL_INSN, because that would possibly
744 change whether the life span of some REGs crosses calls or not,
745 and it is a pain to update that information.
746 Exception: if source is a constant, moving it later can't hurt.
747 Accept that special case, because it helps -fforce-addr a lot. */
748 || (INSN_CUID (insn) < last_call_cuid && ! CONSTANT_P (src)))
749 return 0;
750
751 /* DEST must either be a REG or CC0. */
752 if (GET_CODE (dest) == REG)
753 {
754 /* If register alignment is being enforced for multi-word items in all
755 cases except for parameters, it is possible to have a register copy
756 insn referencing a hard register that is not allowed to contain the
757 mode being copied and which would not be valid as an operand of most
758 insns. Eliminate this problem by not combining with such an insn.
759
760 Also, on some machines we don't want to extend the life of a hard
761 register. */
762
763 if (GET_CODE (src) == REG
764 && ((REGNO (dest) < FIRST_PSEUDO_REGISTER
765 && ! HARD_REGNO_MODE_OK (REGNO (dest), GET_MODE (dest)))
766 #ifdef SMALL_REGISTER_CLASSES
767 /* Don't extend the life of a hard register. */
768 || REGNO (src) < FIRST_PSEUDO_REGISTER
769 #else
770 || (REGNO (src) < FIRST_PSEUDO_REGISTER
771 && ! HARD_REGNO_MODE_OK (REGNO (src), GET_MODE (src)))
772 #endif
773 ))
774 return 0;
775 }
776 else if (GET_CODE (dest) != CC0)
777 return 0;
778
779 /* Don't substitute for a register intended as a clobberable operand.
780 Similarly, don't substitute an expression containing a register that
781 will be clobbered in I3. */
782 if (GET_CODE (PATTERN (i3)) == PARALLEL)
783 for (i = XVECLEN (PATTERN (i3), 0) - 1; i >= 0; i--)
784 if (GET_CODE (XVECEXP (PATTERN (i3), 0, i)) == CLOBBER
785 && (reg_overlap_mentioned_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0),
786 src)
787 || rtx_equal_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0), dest)))
788 return 0;
789
790 /* If INSN contains anything volatile, or is an `asm' (whether volatile
791 or not), reject, unless nothing volatile comes between it and I3,
792 with the exception of SUCC. */
793
794 if (GET_CODE (src) == ASM_OPERANDS || volatile_refs_p (src))
795 for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
796 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
797 && p != succ && volatile_refs_p (PATTERN (p)))
798 return 0;
799
800 /* If INSN or I2 contains an autoincrement or autodecrement,
801 make sure that register is not used between there and I3,
802 and not already used in I3 either.
803 Also insist that I3 not be a jump; if it were one
804 and the incremented register were spilled, we would lose. */
805
806 #ifdef AUTO_INC_DEC
807 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
808 if (REG_NOTE_KIND (link) == REG_INC
809 && (GET_CODE (i3) == JUMP_INSN
810 || reg_used_between_p (XEXP (link, 0), insn, i3)
811 || reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i3))))
812 return 0;
813 #endif
814
815 #ifdef HAVE_cc0
816 /* Don't combine an insn that follows a CC0-setting insn.
817 An insn that uses CC0 must not be separated from the one that sets it.
818 We do, however, allow I2 to follow a CC0-setting insn if that insn
819 is passed as I1; in that case it will be deleted also.
820 We also allow combining in this case if all the insns are adjacent
821 because that would leave the two CC0 insns adjacent as well.
822 It would be more logical to test whether CC0 occurs inside I1 or I2,
823 but that would be much slower, and this ought to be equivalent. */
824
825 p = prev_nonnote_insn (insn);
826 if (p && p != pred && GET_CODE (p) == INSN && sets_cc0_p (PATTERN (p))
827 && ! all_adjacent)
828 return 0;
829 #endif
830
831 /* If we get here, we have passed all the tests and the combination is
832 to be allowed. */
833
834 *pdest = dest;
835 *psrc = src;
836
837 return 1;
838 }
839 \f
840 /* LOC is the location within I3 that contains its pattern or the component
841 of a PARALLEL of the pattern. We validate that it is valid for combining.
842
843 One problem is if I3 modifies its output, as opposed to replacing it
844 entirely, we can't allow the output to contain I2DEST or I1DEST as doing
845 so would produce an insn that is not equivalent to the original insns.
846
847 Consider:
848
849 (set (reg:DI 101) (reg:DI 100))
850 (set (subreg:SI (reg:DI 101) 0) <foo>)
851
852 This is NOT equivalent to:
853
854 (parallel [(set (subreg:SI (reg:DI 100) 0) <foo>)
855 (set (reg:DI 101) (reg:DI 100))])
856
857 Not only does this modify 100 (in which case it might still be valid
858 if 100 were dead in I2), it sets 101 to the ORIGINAL value of 100.
859
860 We can also run into a problem if I2 sets a register that I1
861 uses and I1 gets directly substituted into I3 (not via I2). In that
862 case, we would be getting the wrong value of I2DEST into I3, so we
863 must reject the combination. This case occurs when I2 and I1 both
864 feed into I3, rather than when I1 feeds into I2, which feeds into I3.
865 If I1_NOT_IN_SRC is non-zero, it means that finding I1 in the source
866 of a SET must prevent combination from occurring.
867
868 On machines where SMALL_REGISTER_CLASSES is defined, we don't combine
869 if the destination of a SET is a hard register.
870
871 Before doing the above check, we first try to expand a field assignment
872 into a set of logical operations.
873
874 If PI3_DEST_KILLED is non-zero, it is a pointer to a location in which
875 we place a register that is both set and used within I3. If more than one
876 such register is detected, we fail.
877
878 Return 1 if the combination is valid, zero otherwise. */
879
880 static int
881 combinable_i3pat (i3, loc, i2dest, i1dest, i1_not_in_src, pi3dest_killed)
882 rtx i3;
883 rtx *loc;
884 rtx i2dest;
885 rtx i1dest;
886 int i1_not_in_src;
887 rtx *pi3dest_killed;
888 {
889 rtx x = *loc;
890
891 if (GET_CODE (x) == SET)
892 {
893 rtx set = expand_field_assignment (x);
894 rtx dest = SET_DEST (set);
895 rtx src = SET_SRC (set);
896 rtx inner_dest = dest, inner_src = src;
897
898 SUBST (*loc, set);
899
900 while (GET_CODE (inner_dest) == STRICT_LOW_PART
901 || GET_CODE (inner_dest) == SUBREG
902 || GET_CODE (inner_dest) == ZERO_EXTRACT)
903 inner_dest = XEXP (inner_dest, 0);
904
905 /* We probably don't need this any more now that LIMIT_RELOAD_CLASS
906 was added. */
907 #if 0
908 while (GET_CODE (inner_src) == STRICT_LOW_PART
909 || GET_CODE (inner_src) == SUBREG
910 || GET_CODE (inner_src) == ZERO_EXTRACT)
911 inner_src = XEXP (inner_src, 0);
912
913 /* If it is better that two different modes keep two different pseudos,
914 avoid combining them. This avoids producing the following pattern
915 on a 386:
916 (set (subreg:SI (reg/v:QI 21) 0)
917 (lshiftrt:SI (reg/v:SI 20)
918 (const_int 24)))
919 If that were made, reload could not handle the pair of
920 reg 20/21, since it would try to get any GENERAL_REGS
921 but some of them don't handle QImode. */
922
923 if (rtx_equal_p (inner_src, i2dest)
924 && GET_CODE (inner_dest) == REG
925 && ! MODES_TIEABLE_P (GET_MODE (i2dest), GET_MODE (inner_dest)))
926 return 0;
927 #endif
928
929 /* Check for the case where I3 modifies its output, as
930 discussed above. */
931 if ((inner_dest != dest
932 && (reg_overlap_mentioned_p (i2dest, inner_dest)
933 || (i1dest && reg_overlap_mentioned_p (i1dest, inner_dest))))
934 /* This is the same test done in can_combine_p except that we
935 allow a hard register with SMALL_REGISTER_CLASSES if SRC is a
936 CALL operation. */
937 || (GET_CODE (inner_dest) == REG
938 && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER
939 #ifdef SMALL_REGISTER_CLASSES
940 && GET_CODE (src) != CALL
941 #else
942 && ! HARD_REGNO_MODE_OK (REGNO (inner_dest),
943 GET_MODE (inner_dest))
944 #endif
945 )
946
947 || (i1_not_in_src && reg_overlap_mentioned_p (i1dest, src)))
948 return 0;
949
950 /* If DEST is used in I3, it is being killed in this insn,
951 so record that for later. */
952 if (pi3dest_killed && GET_CODE (dest) == REG
953 && reg_referenced_p (dest, PATTERN (i3)))
954 {
955 if (*pi3dest_killed)
956 return 0;
957
958 *pi3dest_killed = dest;
959 }
960 }
961
962 else if (GET_CODE (x) == PARALLEL)
963 {
964 int i;
965
966 for (i = 0; i < XVECLEN (x, 0); i++)
967 if (! combinable_i3pat (i3, &XVECEXP (x, 0, i), i2dest, i1dest,
968 i1_not_in_src, pi3dest_killed))
969 return 0;
970 }
971
972 return 1;
973 }
974 \f
975 /* Try to combine the insns I1 and I2 into I3.
976 Here I1 and I2 appear earlier than I3.
977 I1 can be zero; then we combine just I2 into I3.
978
979 It we are combining three insns and the resulting insn is not recognized,
980 try splitting it into two insns. If that happens, I2 and I3 are retained
981 and I1 is pseudo-deleted by turning it into a NOTE. Otherwise, I1 and I2
982 are pseudo-deleted.
983
984 If we created two insns, return I2; otherwise return I3.
985 Return 0 if the combination does not work. Then nothing is changed. */
986
987 static rtx
988 try_combine (i3, i2, i1)
989 register rtx i3, i2, i1;
990 {
991 /* New patterns for I3 and I3, respectively. */
992 rtx newpat, newi2pat = 0;
993 /* Indicates need to preserve SET in I1 or I2 in I3 if it is not dead. */
994 int added_sets_1, added_sets_2;
995 /* Total number of SETs to put into I3. */
996 int total_sets;
997 /* Nonzero is I2's body now appears in I3. */
998 int i2_is_used;
999 /* INSN_CODEs for new I3, new I2, and user of condition code. */
1000 int insn_code_number, i2_code_number, other_code_number;
1001 /* Contains I3 if the destination of I3 is used in its source, which means
1002 that the old life of I3 is being killed. If that usage is placed into
1003 I2 and not in I3, a REG_DEAD note must be made. */
1004 rtx i3dest_killed = 0;
1005 /* SET_DEST and SET_SRC of I2 and I1. */
1006 rtx i2dest, i2src, i1dest = 0, i1src = 0;
1007 /* PATTERN (I2), or a copy of it in certain cases. */
1008 rtx i2pat;
1009 /* Indicates if I2DEST or I1DEST is in I2SRC or I1_SRC. */
1010 int i2dest_in_i2src, i1dest_in_i1src = 0, i2dest_in_i1src = 0;
1011 int i1_feeds_i3 = 0;
1012 /* Notes that must be added to REG_NOTES in I3 and I2. */
1013 rtx new_i3_notes, new_i2_notes;
1014
1015 int maxreg;
1016 rtx temp;
1017 register rtx link;
1018 int i;
1019
1020 /* If any of I1, I2, and I3 isn't really an insn, we can't do anything.
1021 This can occur when flow deletes an insn that it has merged into an
1022 auto-increment address. We also can't do anything if I3 has a
1023 REG_LIBCALL note since we don't want to disrupt the contiguity of a
1024 libcall. */
1025
1026 if (GET_RTX_CLASS (GET_CODE (i3)) != 'i'
1027 || GET_RTX_CLASS (GET_CODE (i2)) != 'i'
1028 || (i1 && GET_RTX_CLASS (GET_CODE (i1)) != 'i')
1029 || find_reg_note (i3, REG_LIBCALL, NULL_RTX))
1030 return 0;
1031
1032 combine_attempts++;
1033
1034 undobuf.num_undo = previous_num_undos = 0;
1035 undobuf.other_insn = 0;
1036
1037 /* Save the current high-water-mark so we can free storage if we didn't
1038 accept this combination. */
1039 undobuf.storage = (char *) oballoc (0);
1040
1041 /* If I1 and I2 both feed I3, they can be in any order. To simplify the
1042 code below, set I1 to be the earlier of the two insns. */
1043 if (i1 && INSN_CUID (i1) > INSN_CUID (i2))
1044 temp = i1, i1 = i2, i2 = temp;
1045
1046 /* First check for one important special-case that the code below will
1047 not handle. Namely, the case where I1 is zero, I2 has multiple sets,
1048 and I3 is a SET whose SET_SRC is a SET_DEST in I2. In that case,
1049 we may be able to replace that destination with the destination of I3.
1050 This occurs in the common code where we compute both a quotient and
1051 remainder into a structure, in which case we want to do the computation
1052 directly into the structure to avoid register-register copies.
1053
1054 We make very conservative checks below and only try to handle the
1055 most common cases of this. For example, we only handle the case
1056 where I2 and I3 are adjacent to avoid making difficult register
1057 usage tests. */
1058
1059 if (i1 == 0 && GET_CODE (i3) == INSN && GET_CODE (PATTERN (i3)) == SET
1060 && GET_CODE (SET_SRC (PATTERN (i3))) == REG
1061 && REGNO (SET_SRC (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER
1062 #ifdef SMALL_REGISTER_CLASSES
1063 && (GET_CODE (SET_DEST (PATTERN (i3))) != REG
1064 || REGNO (SET_DEST (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER)
1065 #endif
1066 && find_reg_note (i3, REG_DEAD, SET_SRC (PATTERN (i3)))
1067 && GET_CODE (PATTERN (i2)) == PARALLEL
1068 && ! side_effects_p (SET_DEST (PATTERN (i3)))
1069 /* If the dest of I3 is a ZERO_EXTRACT or STRICT_LOW_PART, the code
1070 below would need to check what is inside (and reg_overlap_mentioned_p
1071 doesn't support those codes anyway). Don't allow those destinations;
1072 the resulting insn isn't likely to be recognized anyway. */
1073 && GET_CODE (SET_DEST (PATTERN (i3))) != ZERO_EXTRACT
1074 && GET_CODE (SET_DEST (PATTERN (i3))) != STRICT_LOW_PART
1075 && ! reg_overlap_mentioned_p (SET_SRC (PATTERN (i3)),
1076 SET_DEST (PATTERN (i3)))
1077 && next_real_insn (i2) == i3)
1078 {
1079 rtx p2 = PATTERN (i2);
1080
1081 /* Make sure that the destination of I3,
1082 which we are going to substitute into one output of I2,
1083 is not used within another output of I2. We must avoid making this:
1084 (parallel [(set (mem (reg 69)) ...)
1085 (set (reg 69) ...)])
1086 which is not well-defined as to order of actions.
1087 (Besides, reload can't handle output reloads for this.)
1088
1089 The problem can also happen if the dest of I3 is a memory ref,
1090 if another dest in I2 is an indirect memory ref. */
1091 for (i = 0; i < XVECLEN (p2, 0); i++)
1092 if (GET_CODE (XVECEXP (p2, 0, i)) == SET
1093 && reg_overlap_mentioned_p (SET_DEST (PATTERN (i3)),
1094 SET_DEST (XVECEXP (p2, 0, i))))
1095 break;
1096
1097 if (i == XVECLEN (p2, 0))
1098 for (i = 0; i < XVECLEN (p2, 0); i++)
1099 if (SET_DEST (XVECEXP (p2, 0, i)) == SET_SRC (PATTERN (i3)))
1100 {
1101 combine_merges++;
1102
1103 subst_insn = i3;
1104 subst_low_cuid = INSN_CUID (i2);
1105
1106 added_sets_2 = 0;
1107 i2dest = SET_SRC (PATTERN (i3));
1108
1109 /* Replace the dest in I2 with our dest and make the resulting
1110 insn the new pattern for I3. Then skip to where we
1111 validate the pattern. Everything was set up above. */
1112 SUBST (SET_DEST (XVECEXP (p2, 0, i)),
1113 SET_DEST (PATTERN (i3)));
1114
1115 newpat = p2;
1116 goto validate_replacement;
1117 }
1118 }
1119
1120 #ifndef HAVE_cc0
1121 /* If we have no I1 and I2 looks like:
1122 (parallel [(set (reg:CC X) (compare:CC OP (const_int 0)))
1123 (set Y OP)])
1124 make up a dummy I1 that is
1125 (set Y OP)
1126 and change I2 to be
1127 (set (reg:CC X) (compare:CC Y (const_int 0)))
1128
1129 (We can ignore any trailing CLOBBERs.)
1130
1131 This undoes a previous combination and allows us to match a branch-and-
1132 decrement insn. */
1133
1134 if (i1 == 0 && GET_CODE (PATTERN (i2)) == PARALLEL
1135 && XVECLEN (PATTERN (i2), 0) >= 2
1136 && GET_CODE (XVECEXP (PATTERN (i2), 0, 0)) == SET
1137 && (GET_MODE_CLASS (GET_MODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 0))))
1138 == MODE_CC)
1139 && GET_CODE (SET_SRC (XVECEXP (PATTERN (i2), 0, 0))) == COMPARE
1140 && XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 1) == const0_rtx
1141 && GET_CODE (XVECEXP (PATTERN (i2), 0, 1)) == SET
1142 && GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 1))) == REG
1143 && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 0),
1144 SET_SRC (XVECEXP (PATTERN (i2), 0, 1))))
1145 {
1146 for (i = XVECLEN (PATTERN (i2), 0) - 1; i >= 2; i--)
1147 if (GET_CODE (XVECEXP (PATTERN (i2), 0, i)) != CLOBBER)
1148 break;
1149
1150 if (i == 1)
1151 {
1152 /* We make I1 with the same INSN_UID as I2. This gives it
1153 the same INSN_CUID for value tracking. Our fake I1 will
1154 never appear in the insn stream so giving it the same INSN_UID
1155 as I2 will not cause a problem. */
1156
1157 i1 = gen_rtx (INSN, VOIDmode, INSN_UID (i2), 0, i2,
1158 XVECEXP (PATTERN (i2), 0, 1), -1, 0, 0);
1159
1160 SUBST (PATTERN (i2), XVECEXP (PATTERN (i2), 0, 0));
1161 SUBST (XEXP (SET_SRC (PATTERN (i2)), 0),
1162 SET_DEST (PATTERN (i1)));
1163 }
1164 }
1165 #endif
1166
1167 /* Verify that I2 and I1 are valid for combining. */
1168 if (! can_combine_p (i2, i3, i1, NULL_RTX, &i2dest, &i2src)
1169 || (i1 && ! can_combine_p (i1, i3, NULL_RTX, i2, &i1dest, &i1src)))
1170 {
1171 undo_all ();
1172 return 0;
1173 }
1174
1175 /* Record whether I2DEST is used in I2SRC and similarly for the other
1176 cases. Knowing this will help in register status updating below. */
1177 i2dest_in_i2src = reg_overlap_mentioned_p (i2dest, i2src);
1178 i1dest_in_i1src = i1 && reg_overlap_mentioned_p (i1dest, i1src);
1179 i2dest_in_i1src = i1 && reg_overlap_mentioned_p (i2dest, i1src);
1180
1181 /* See if I1 directly feeds into I3. It does if I1DEST is not used
1182 in I2SRC. */
1183 i1_feeds_i3 = i1 && ! reg_overlap_mentioned_p (i1dest, i2src);
1184
1185 /* Ensure that I3's pattern can be the destination of combines. */
1186 if (! combinable_i3pat (i3, &PATTERN (i3), i2dest, i1dest,
1187 i1 && i2dest_in_i1src && i1_feeds_i3,
1188 &i3dest_killed))
1189 {
1190 undo_all ();
1191 return 0;
1192 }
1193
1194 /* If I3 has an inc, then give up if I1 or I2 uses the reg that is inc'd.
1195 We used to do this EXCEPT in one case: I3 has a post-inc in an
1196 output operand. However, that exception can give rise to insns like
1197 mov r3,(r3)+
1198 which is a famous insn on the PDP-11 where the value of r3 used as the
1199 source was model-dependent. Avoid this sort of thing. */
1200
1201 #if 0
1202 if (!(GET_CODE (PATTERN (i3)) == SET
1203 && GET_CODE (SET_SRC (PATTERN (i3))) == REG
1204 && GET_CODE (SET_DEST (PATTERN (i3))) == MEM
1205 && (GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_INC
1206 || GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_DEC)))
1207 /* It's not the exception. */
1208 #endif
1209 #ifdef AUTO_INC_DEC
1210 for (link = REG_NOTES (i3); link; link = XEXP (link, 1))
1211 if (REG_NOTE_KIND (link) == REG_INC
1212 && (reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i2))
1213 || (i1 != 0
1214 && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i1)))))
1215 {
1216 undo_all ();
1217 return 0;
1218 }
1219 #endif
1220
1221 /* See if the SETs in I1 or I2 need to be kept around in the merged
1222 instruction: whenever the value set there is still needed past I3.
1223 For the SETs in I2, this is easy: we see if I2DEST dies or is set in I3.
1224
1225 For the SET in I1, we have two cases: If I1 and I2 independently
1226 feed into I3, the set in I1 needs to be kept around if I1DEST dies
1227 or is set in I3. Otherwise (if I1 feeds I2 which feeds I3), the set
1228 in I1 needs to be kept around unless I1DEST dies or is set in either
1229 I2 or I3. We can distinguish these cases by seeing if I2SRC mentions
1230 I1DEST. If so, we know I1 feeds into I2. */
1231
1232 added_sets_2 = ! dead_or_set_p (i3, i2dest);
1233
1234 added_sets_1
1235 = i1 && ! (i1_feeds_i3 ? dead_or_set_p (i3, i1dest)
1236 : (dead_or_set_p (i3, i1dest) || dead_or_set_p (i2, i1dest)));
1237
1238 /* If the set in I2 needs to be kept around, we must make a copy of
1239 PATTERN (I2), so that when we substitute I1SRC for I1DEST in
1240 PATTERN (I2), we are only substituting for the original I1DEST, not into
1241 an already-substituted copy. This also prevents making self-referential
1242 rtx. If I2 is a PARALLEL, we just need the piece that assigns I2SRC to
1243 I2DEST. */
1244
1245 i2pat = (GET_CODE (PATTERN (i2)) == PARALLEL
1246 ? gen_rtx (SET, VOIDmode, i2dest, i2src)
1247 : PATTERN (i2));
1248
1249 if (added_sets_2)
1250 i2pat = copy_rtx (i2pat);
1251
1252 combine_merges++;
1253
1254 /* Substitute in the latest insn for the regs set by the earlier ones. */
1255
1256 maxreg = max_reg_num ();
1257
1258 subst_insn = i3;
1259
1260 /* It is possible that the source of I2 or I1 may be performing an
1261 unneeded operation, such as a ZERO_EXTEND of something that is known
1262 to have the high part zero. Handle that case by letting subst look at
1263 the innermost one of them.
1264
1265 Another way to do this would be to have a function that tries to
1266 simplify a single insn instead of merging two or more insns. We don't
1267 do this because of the potential of infinite loops and because
1268 of the potential extra memory required. However, doing it the way
1269 we are is a bit of a kludge and doesn't catch all cases.
1270
1271 But only do this if -fexpensive-optimizations since it slows things down
1272 and doesn't usually win. */
1273
1274 if (flag_expensive_optimizations)
1275 {
1276 /* Pass pc_rtx so no substitutions are done, just simplifications.
1277 The cases that we are interested in here do not involve the few
1278 cases were is_replaced is checked. */
1279 if (i1)
1280 {
1281 subst_low_cuid = INSN_CUID (i1);
1282 i1src = subst (i1src, pc_rtx, pc_rtx, 0, 0);
1283 }
1284 else
1285 {
1286 subst_low_cuid = INSN_CUID (i2);
1287 i2src = subst (i2src, pc_rtx, pc_rtx, 0, 0);
1288 }
1289
1290 previous_num_undos = undobuf.num_undo;
1291 }
1292
1293 #ifndef HAVE_cc0
1294 /* Many machines that don't use CC0 have insns that can both perform an
1295 arithmetic operation and set the condition code. These operations will
1296 be represented as a PARALLEL with the first element of the vector
1297 being a COMPARE of an arithmetic operation with the constant zero.
1298 The second element of the vector will set some pseudo to the result
1299 of the same arithmetic operation. If we simplify the COMPARE, we won't
1300 match such a pattern and so will generate an extra insn. Here we test
1301 for this case, where both the comparison and the operation result are
1302 needed, and make the PARALLEL by just replacing I2DEST in I3SRC with
1303 I2SRC. Later we will make the PARALLEL that contains I2. */
1304
1305 if (i1 == 0 && added_sets_2 && GET_CODE (PATTERN (i3)) == SET
1306 && GET_CODE (SET_SRC (PATTERN (i3))) == COMPARE
1307 && XEXP (SET_SRC (PATTERN (i3)), 1) == const0_rtx
1308 && rtx_equal_p (XEXP (SET_SRC (PATTERN (i3)), 0), i2dest))
1309 {
1310 rtx *cc_use;
1311 enum machine_mode compare_mode;
1312
1313 newpat = PATTERN (i3);
1314 SUBST (XEXP (SET_SRC (newpat), 0), i2src);
1315
1316 i2_is_used = 1;
1317
1318 #ifdef EXTRA_CC_MODES
1319 /* See if a COMPARE with the operand we substituted in should be done
1320 with the mode that is currently being used. If not, do the same
1321 processing we do in `subst' for a SET; namely, if the destination
1322 is used only once, try to replace it with a register of the proper
1323 mode and also replace the COMPARE. */
1324 if (undobuf.other_insn == 0
1325 && (cc_use = find_single_use (SET_DEST (newpat), i3,
1326 &undobuf.other_insn))
1327 && ((compare_mode = SELECT_CC_MODE (GET_CODE (*cc_use),
1328 i2src, const0_rtx))
1329 != GET_MODE (SET_DEST (newpat))))
1330 {
1331 int regno = REGNO (SET_DEST (newpat));
1332 rtx new_dest = gen_rtx (REG, compare_mode, regno);
1333
1334 if (regno < FIRST_PSEUDO_REGISTER
1335 || (reg_n_sets[regno] == 1 && ! added_sets_2
1336 && ! REG_USERVAR_P (SET_DEST (newpat))))
1337 {
1338 if (regno >= FIRST_PSEUDO_REGISTER)
1339 SUBST (regno_reg_rtx[regno], new_dest);
1340
1341 SUBST (SET_DEST (newpat), new_dest);
1342 SUBST (XEXP (*cc_use, 0), new_dest);
1343 SUBST (SET_SRC (newpat),
1344 gen_rtx_combine (COMPARE, compare_mode,
1345 i2src, const0_rtx));
1346 }
1347 else
1348 undobuf.other_insn = 0;
1349 }
1350 #endif
1351 }
1352 else
1353 #endif
1354 {
1355 n_occurrences = 0; /* `subst' counts here */
1356
1357 /* If I1 feeds into I2 (not into I3) and I1DEST is in I1SRC, we
1358 need to make a unique copy of I2SRC each time we substitute it
1359 to avoid self-referential rtl. */
1360
1361 subst_low_cuid = INSN_CUID (i2);
1362 newpat = subst (PATTERN (i3), i2dest, i2src, 0,
1363 ! i1_feeds_i3 && i1dest_in_i1src);
1364 previous_num_undos = undobuf.num_undo;
1365
1366 /* Record whether i2's body now appears within i3's body. */
1367 i2_is_used = n_occurrences;
1368 }
1369
1370 /* If we already got a failure, don't try to do more. Otherwise,
1371 try to substitute in I1 if we have it. */
1372
1373 if (i1 && GET_CODE (newpat) != CLOBBER)
1374 {
1375 /* Before we can do this substitution, we must redo the test done
1376 above (see detailed comments there) that ensures that I1DEST
1377 isn't mentioned in any SETs in NEWPAT that are field assignments. */
1378
1379 if (! combinable_i3pat (NULL_RTX, &newpat, i1dest, NULL_RTX,
1380 0, NULL_PTR))
1381 {
1382 undo_all ();
1383 return 0;
1384 }
1385
1386 n_occurrences = 0;
1387 subst_low_cuid = INSN_CUID (i1);
1388 newpat = subst (newpat, i1dest, i1src, 0, 0);
1389 previous_num_undos = undobuf.num_undo;
1390 }
1391
1392 /* Fail if an autoincrement side-effect has been duplicated. Be careful
1393 to count all the ways that I2SRC and I1SRC can be used. */
1394 if ((FIND_REG_INC_NOTE (i2, NULL_RTX) != 0
1395 && i2_is_used + added_sets_2 > 1)
1396 || (i1 != 0 && FIND_REG_INC_NOTE (i1, NULL_RTX) != 0
1397 && (n_occurrences + added_sets_1 + (added_sets_2 && ! i1_feeds_i3)
1398 > 1))
1399 /* Fail if we tried to make a new register (we used to abort, but there's
1400 really no reason to). */
1401 || max_reg_num () != maxreg
1402 /* Fail if we couldn't do something and have a CLOBBER. */
1403 || GET_CODE (newpat) == CLOBBER)
1404 {
1405 undo_all ();
1406 return 0;
1407 }
1408
1409 /* If the actions of the earlier insns must be kept
1410 in addition to substituting them into the latest one,
1411 we must make a new PARALLEL for the latest insn
1412 to hold additional the SETs. */
1413
1414 if (added_sets_1 || added_sets_2)
1415 {
1416 combine_extras++;
1417
1418 if (GET_CODE (newpat) == PARALLEL)
1419 {
1420 rtvec old = XVEC (newpat, 0);
1421 total_sets = XVECLEN (newpat, 0) + added_sets_1 + added_sets_2;
1422 newpat = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (total_sets));
1423 bcopy (&old->elem[0], &XVECEXP (newpat, 0, 0),
1424 sizeof (old->elem[0]) * old->num_elem);
1425 }
1426 else
1427 {
1428 rtx old = newpat;
1429 total_sets = 1 + added_sets_1 + added_sets_2;
1430 newpat = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (total_sets));
1431 XVECEXP (newpat, 0, 0) = old;
1432 }
1433
1434 if (added_sets_1)
1435 XVECEXP (newpat, 0, --total_sets)
1436 = (GET_CODE (PATTERN (i1)) == PARALLEL
1437 ? gen_rtx (SET, VOIDmode, i1dest, i1src) : PATTERN (i1));
1438
1439 if (added_sets_2)
1440 {
1441 /* If there is no I1, use I2's body as is. We used to also not do
1442 the subst call below if I2 was substituted into I3,
1443 but that could lose a simplification. */
1444 if (i1 == 0)
1445 XVECEXP (newpat, 0, --total_sets) = i2pat;
1446 else
1447 /* See comment where i2pat is assigned. */
1448 XVECEXP (newpat, 0, --total_sets)
1449 = subst (i2pat, i1dest, i1src, 0, 0);
1450 }
1451 }
1452
1453 /* We come here when we are replacing a destination in I2 with the
1454 destination of I3. */
1455 validate_replacement:
1456
1457 /* Is the result of combination a valid instruction? */
1458 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1459
1460 /* If the result isn't valid, see if it is a PARALLEL of two SETs where
1461 the second SET's destination is a register that is unused. In that case,
1462 we just need the first SET. This can occur when simplifying a divmod
1463 insn. We *must* test for this case here because the code below that
1464 splits two independent SETs doesn't handle this case correctly when it
1465 updates the register status. Also check the case where the first
1466 SET's destination is unused. That would not cause incorrect code, but
1467 does cause an unneeded insn to remain. */
1468
1469 if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
1470 && XVECLEN (newpat, 0) == 2
1471 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1472 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1473 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == REG
1474 && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 1)))
1475 && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 1)))
1476 && asm_noperands (newpat) < 0)
1477 {
1478 newpat = XVECEXP (newpat, 0, 0);
1479 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1480 }
1481
1482 else if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
1483 && XVECLEN (newpat, 0) == 2
1484 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1485 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1486 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) == REG
1487 && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 0)))
1488 && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 0)))
1489 && asm_noperands (newpat) < 0)
1490 {
1491 newpat = XVECEXP (newpat, 0, 1);
1492 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1493 }
1494
1495 /* See if this is an XOR. If so, perhaps the problem is that the
1496 constant is out of range. Replace it with a complemented XOR with
1497 a complemented constant; it might be in range. */
1498
1499 else if (insn_code_number < 0 && GET_CODE (newpat) == SET
1500 && GET_CODE (SET_SRC (newpat)) == XOR
1501 && GET_CODE (XEXP (SET_SRC (newpat), 1)) == CONST_INT
1502 && ((temp = simplify_unary_operation (NOT,
1503 GET_MODE (SET_SRC (newpat)),
1504 XEXP (SET_SRC (newpat), 1),
1505 GET_MODE (SET_SRC (newpat))))
1506 != 0))
1507 {
1508 enum machine_mode i_mode = GET_MODE (SET_SRC (newpat));
1509 rtx pat
1510 = gen_rtx_combine (SET, VOIDmode, SET_DEST (newpat),
1511 gen_unary (NOT, i_mode,
1512 gen_binary (XOR, i_mode,
1513 XEXP (SET_SRC (newpat), 0),
1514 temp)));
1515
1516 insn_code_number = recog_for_combine (&pat, i3, &new_i3_notes);
1517 if (insn_code_number >= 0)
1518 newpat = pat;
1519 }
1520
1521 /* If we were combining three insns and the result is a simple SET
1522 with no ASM_OPERANDS that wasn't recognized, try to split it into two
1523 insns. There are two ways to do this. It can be split using a
1524 machine-specific method (like when you have an addition of a large
1525 constant) or by combine in the function find_split_point. */
1526
1527 if (i1 && insn_code_number < 0 && GET_CODE (newpat) == SET
1528 && asm_noperands (newpat) < 0)
1529 {
1530 rtx m_split, *split;
1531 rtx ni2dest = i2dest;
1532
1533 /* See if the MD file can split NEWPAT. If it can't, see if letting it
1534 use I2DEST as a scratch register will help. In the latter case,
1535 convert I2DEST to the mode of the source of NEWPAT if we can. */
1536
1537 m_split = split_insns (newpat, i3);
1538 if (m_split == 0)
1539 {
1540 /* If I2DEST is a hard register or the only use of a pseudo,
1541 we can change its mode. */
1542 if (GET_MODE (SET_DEST (newpat)) != GET_MODE (i2dest)
1543 && GET_MODE (SET_DEST (newpat)) != VOIDmode
1544 && GET_CODE (i2dest) == REG
1545 && (REGNO (i2dest) < FIRST_PSEUDO_REGISTER
1546 || (reg_n_sets[REGNO (i2dest)] == 1 && ! added_sets_2
1547 && ! REG_USERVAR_P (i2dest))))
1548 ni2dest = gen_rtx (REG, GET_MODE (SET_DEST (newpat)),
1549 REGNO (i2dest));
1550
1551 m_split = split_insns (gen_rtx (PARALLEL, VOIDmode,
1552 gen_rtvec (2, newpat,
1553 gen_rtx (CLOBBER,
1554 VOIDmode,
1555 ni2dest))),
1556 i3);
1557 }
1558
1559 if (m_split && GET_CODE (m_split) == SEQUENCE
1560 && XVECLEN (m_split, 0) == 2
1561 && (next_real_insn (i2) == i3
1562 || ! use_crosses_set_p (PATTERN (XVECEXP (m_split, 0, 0)),
1563 INSN_CUID (i2))))
1564 {
1565 rtx i2set, i3set;
1566 rtx newi3pat = PATTERN (XVECEXP (m_split, 0, 1));
1567 newi2pat = PATTERN (XVECEXP (m_split, 0, 0));
1568
1569 i3set = single_set (XVECEXP (m_split, 0, 1));
1570 i2set = single_set (XVECEXP (m_split, 0, 0));
1571
1572 /* In case we changed the mode of I2DEST, replace it in the
1573 pseudo-register table here. We can't do it above in case this
1574 code doesn't get executed and we do a split the other way. */
1575
1576 if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
1577 SUBST (regno_reg_rtx[REGNO (i2dest)], ni2dest);
1578
1579 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
1580
1581 /* If I2 or I3 has multiple SETs, we won't know how to track
1582 register status, so don't use these insns. */
1583
1584 if (i2_code_number >= 0 && i2set && i3set)
1585 insn_code_number = recog_for_combine (&newi3pat, i3,
1586 &new_i3_notes);
1587
1588 if (insn_code_number >= 0)
1589 newpat = newi3pat;
1590
1591 /* It is possible that both insns now set the destination of I3.
1592 If so, we must show an extra use of it. */
1593
1594 if (insn_code_number >= 0 && GET_CODE (SET_DEST (i3set)) == REG
1595 && GET_CODE (SET_DEST (i2set)) == REG
1596 && REGNO (SET_DEST (i3set)) == REGNO (SET_DEST (i2set)))
1597 reg_n_sets[REGNO (SET_DEST (i2set))]++;
1598 }
1599
1600 /* If we can split it and use I2DEST, go ahead and see if that
1601 helps things be recognized. Verify that none of the registers
1602 are set between I2 and I3. */
1603 if (insn_code_number < 0 && (split = find_split_point (&newpat, i3)) != 0
1604 #ifdef HAVE_cc0
1605 && GET_CODE (i2dest) == REG
1606 #endif
1607 /* We need I2DEST in the proper mode. If it is a hard register
1608 or the only use of a pseudo, we can change its mode. */
1609 && (GET_MODE (*split) == GET_MODE (i2dest)
1610 || GET_MODE (*split) == VOIDmode
1611 || REGNO (i2dest) < FIRST_PSEUDO_REGISTER
1612 || (reg_n_sets[REGNO (i2dest)] == 1 && ! added_sets_2
1613 && ! REG_USERVAR_P (i2dest)))
1614 && (next_real_insn (i2) == i3
1615 || ! use_crosses_set_p (*split, INSN_CUID (i2)))
1616 /* We can't overwrite I2DEST if its value is still used by
1617 NEWPAT. */
1618 && ! reg_referenced_p (i2dest, newpat))
1619 {
1620 rtx newdest = i2dest;
1621
1622 /* Get NEWDEST as a register in the proper mode. We have already
1623 validated that we can do this. */
1624 if (GET_MODE (i2dest) != GET_MODE (*split)
1625 && GET_MODE (*split) != VOIDmode)
1626 {
1627 newdest = gen_rtx (REG, GET_MODE (*split), REGNO (i2dest));
1628
1629 if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
1630 SUBST (regno_reg_rtx[REGNO (i2dest)], newdest);
1631 }
1632
1633 /* If *SPLIT is a (mult FOO (const_int pow2)), convert it to
1634 an ASHIFT. This can occur if it was inside a PLUS and hence
1635 appeared to be a memory address. This is a kludge. */
1636 if (GET_CODE (*split) == MULT
1637 && GET_CODE (XEXP (*split, 1)) == CONST_INT
1638 && (i = exact_log2 (INTVAL (XEXP (*split, 1)))) >= 0)
1639 SUBST (*split, gen_rtx_combine (ASHIFT, GET_MODE (*split),
1640 XEXP (*split, 0), GEN_INT (i)));
1641
1642 #ifdef INSN_SCHEDULING
1643 /* If *SPLIT is a paradoxical SUBREG, when we split it, it should
1644 be written as a ZERO_EXTEND. */
1645 if (GET_CODE (*split) == SUBREG
1646 && GET_CODE (SUBREG_REG (*split)) == MEM)
1647 SUBST (*split, gen_rtx_combine (ZERO_EXTEND, GET_MODE (*split),
1648 XEXP (*split, 0)));
1649 #endif
1650
1651 newi2pat = gen_rtx_combine (SET, VOIDmode, newdest, *split);
1652 SUBST (*split, newdest);
1653 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
1654 if (i2_code_number >= 0)
1655 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1656 }
1657 }
1658
1659 /* Check for a case where we loaded from memory in a narrow mode and
1660 then sign extended it, but we need both registers. In that case,
1661 we have a PARALLEL with both loads from the same memory location.
1662 We can split this into a load from memory followed by a register-register
1663 copy. This saves at least one insn, more if register allocation can
1664 eliminate the copy. */
1665
1666 else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
1667 && GET_CODE (newpat) == PARALLEL
1668 && XVECLEN (newpat, 0) == 2
1669 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1670 && GET_CODE (SET_SRC (XVECEXP (newpat, 0, 0))) == SIGN_EXTEND
1671 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1672 && rtx_equal_p (SET_SRC (XVECEXP (newpat, 0, 1)),
1673 XEXP (SET_SRC (XVECEXP (newpat, 0, 0)), 0))
1674 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
1675 INSN_CUID (i2))
1676 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
1677 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
1678 && ! reg_overlap_mentioned_p (SET_DEST (XVECEXP (newpat, 0, 1)),
1679 SET_SRC (XVECEXP (newpat, 0, 1)))
1680 && ! find_reg_note (i3, REG_UNUSED,
1681 SET_DEST (XVECEXP (newpat, 0, 0))))
1682 {
1683 rtx ni2dest;
1684
1685 newi2pat = XVECEXP (newpat, 0, 0);
1686 ni2dest = SET_DEST (XVECEXP (newpat, 0, 0));
1687 newpat = XVECEXP (newpat, 0, 1);
1688 SUBST (SET_SRC (newpat),
1689 gen_lowpart_for_combine (GET_MODE (SET_SRC (newpat)), ni2dest));
1690 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
1691 if (i2_code_number >= 0)
1692 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1693
1694 if (insn_code_number >= 0)
1695 {
1696 rtx insn;
1697 rtx link;
1698
1699 /* If we will be able to accept this, we have made a change to the
1700 destination of I3. This can invalidate a LOG_LINKS pointing
1701 to I3. No other part of combine.c makes such a transformation.
1702
1703 The new I3 will have a destination that was previously the
1704 destination of I1 or I2 and which was used in i2 or I3. Call
1705 distribute_links to make a LOG_LINK from the next use of
1706 that destination. */
1707
1708 PATTERN (i3) = newpat;
1709 distribute_links (gen_rtx (INSN_LIST, VOIDmode, i3, NULL_RTX));
1710
1711 /* I3 now uses what used to be its destination and which is
1712 now I2's destination. That means we need a LOG_LINK from
1713 I3 to I2. But we used to have one, so we still will.
1714
1715 However, some later insn might be using I2's dest and have
1716 a LOG_LINK pointing at I3. We must remove this link.
1717 The simplest way to remove the link is to point it at I1,
1718 which we know will be a NOTE. */
1719
1720 for (insn = NEXT_INSN (i3);
1721 insn && GET_CODE (insn) != CODE_LABEL
1722 && GET_CODE (PREV_INSN (insn)) != JUMP_INSN;
1723 insn = NEXT_INSN (insn))
1724 {
1725 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
1726 && reg_referenced_p (ni2dest, PATTERN (insn)))
1727 {
1728 for (link = LOG_LINKS (insn); link;
1729 link = XEXP (link, 1))
1730 if (XEXP (link, 0) == i3)
1731 XEXP (link, 0) = i1;
1732
1733 break;
1734 }
1735 }
1736 }
1737 }
1738
1739 /* Similarly, check for a case where we have a PARALLEL of two independent
1740 SETs but we started with three insns. In this case, we can do the sets
1741 as two separate insns. This case occurs when some SET allows two
1742 other insns to combine, but the destination of that SET is still live. */
1743
1744 else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
1745 && GET_CODE (newpat) == PARALLEL
1746 && XVECLEN (newpat, 0) == 2
1747 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1748 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != ZERO_EXTRACT
1749 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != STRICT_LOW_PART
1750 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1751 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
1752 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
1753 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
1754 INSN_CUID (i2))
1755 /* Don't pass sets with (USE (MEM ...)) dests to the following. */
1756 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != USE
1757 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != USE
1758 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 1)),
1759 XVECEXP (newpat, 0, 0))
1760 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 0)),
1761 XVECEXP (newpat, 0, 1)))
1762 {
1763 newi2pat = XVECEXP (newpat, 0, 1);
1764 newpat = XVECEXP (newpat, 0, 0);
1765
1766 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
1767 if (i2_code_number >= 0)
1768 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1769 }
1770
1771 /* If it still isn't recognized, fail and change things back the way they
1772 were. */
1773 if ((insn_code_number < 0
1774 /* Is the result a reasonable ASM_OPERANDS? */
1775 && (! check_asm_operands (newpat) || added_sets_1 || added_sets_2)))
1776 {
1777 undo_all ();
1778 return 0;
1779 }
1780
1781 /* If we had to change another insn, make sure it is valid also. */
1782 if (undobuf.other_insn)
1783 {
1784 rtx other_notes = REG_NOTES (undobuf.other_insn);
1785 rtx other_pat = PATTERN (undobuf.other_insn);
1786 rtx new_other_notes;
1787 rtx note, next;
1788
1789 other_code_number = recog_for_combine (&other_pat, undobuf.other_insn,
1790 &new_other_notes);
1791
1792 if (other_code_number < 0 && ! check_asm_operands (other_pat))
1793 {
1794 undo_all ();
1795 return 0;
1796 }
1797
1798 PATTERN (undobuf.other_insn) = other_pat;
1799
1800 /* If any of the notes in OTHER_INSN were REG_UNUSED, ensure that they
1801 are still valid. Then add any non-duplicate notes added by
1802 recog_for_combine. */
1803 for (note = REG_NOTES (undobuf.other_insn); note; note = next)
1804 {
1805 next = XEXP (note, 1);
1806
1807 if (REG_NOTE_KIND (note) == REG_UNUSED
1808 && ! reg_set_p (XEXP (note, 0), PATTERN (undobuf.other_insn)))
1809 {
1810 if (GET_CODE (XEXP (note, 0)) == REG)
1811 reg_n_deaths[REGNO (XEXP (note, 0))]--;
1812
1813 remove_note (undobuf.other_insn, note);
1814 }
1815 }
1816
1817 for (note = new_other_notes; note; note = XEXP (note, 1))
1818 if (GET_CODE (XEXP (note, 0)) == REG)
1819 reg_n_deaths[REGNO (XEXP (note, 0))]++;
1820
1821 distribute_notes (new_other_notes, undobuf.other_insn,
1822 undobuf.other_insn, NULL_RTX, NULL_RTX, NULL_RTX);
1823 }
1824
1825 /* We now know that we can do this combination. Merge the insns and
1826 update the status of registers and LOG_LINKS. */
1827
1828 {
1829 rtx i3notes, i2notes, i1notes = 0;
1830 rtx i3links, i2links, i1links = 0;
1831 rtx midnotes = 0;
1832 int all_adjacent = (next_real_insn (i2) == i3
1833 && (i1 == 0 || next_real_insn (i1) == i2));
1834 register int regno;
1835 /* Compute which registers we expect to eliminate. */
1836 rtx elim_i2 = (newi2pat || i2dest_in_i2src || i2dest_in_i1src
1837 ? 0 : i2dest);
1838 rtx elim_i1 = i1 == 0 || i1dest_in_i1src ? 0 : i1dest;
1839
1840 /* Get the old REG_NOTES and LOG_LINKS from all our insns and
1841 clear them. */
1842 i3notes = REG_NOTES (i3), i3links = LOG_LINKS (i3);
1843 i2notes = REG_NOTES (i2), i2links = LOG_LINKS (i2);
1844 if (i1)
1845 i1notes = REG_NOTES (i1), i1links = LOG_LINKS (i1);
1846
1847 /* Ensure that we do not have something that should not be shared but
1848 occurs multiple times in the new insns. Check this by first
1849 resetting all the `used' flags and then copying anything is shared. */
1850
1851 reset_used_flags (i3notes);
1852 reset_used_flags (i2notes);
1853 reset_used_flags (i1notes);
1854 reset_used_flags (newpat);
1855 reset_used_flags (newi2pat);
1856 if (undobuf.other_insn)
1857 reset_used_flags (PATTERN (undobuf.other_insn));
1858
1859 i3notes = copy_rtx_if_shared (i3notes);
1860 i2notes = copy_rtx_if_shared (i2notes);
1861 i1notes = copy_rtx_if_shared (i1notes);
1862 newpat = copy_rtx_if_shared (newpat);
1863 newi2pat = copy_rtx_if_shared (newi2pat);
1864 if (undobuf.other_insn)
1865 reset_used_flags (PATTERN (undobuf.other_insn));
1866
1867 INSN_CODE (i3) = insn_code_number;
1868 PATTERN (i3) = newpat;
1869 if (undobuf.other_insn)
1870 INSN_CODE (undobuf.other_insn) = other_code_number;
1871
1872 /* We had one special case above where I2 had more than one set and
1873 we replaced a destination of one of those sets with the destination
1874 of I3. In that case, we have to update LOG_LINKS of insns later
1875 in this basic block. Note that this (expensive) case is rare. */
1876
1877 if (GET_CODE (PATTERN (i2)) == PARALLEL)
1878 for (i = 0; i < XVECLEN (PATTERN (i2), 0); i++)
1879 if (GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, i))) == REG
1880 && SET_DEST (XVECEXP (PATTERN (i2), 0, i)) != i2dest
1881 && ! find_reg_note (i2, REG_UNUSED,
1882 SET_DEST (XVECEXP (PATTERN (i2), 0, i))))
1883 {
1884 register rtx insn;
1885
1886 for (insn = NEXT_INSN (i2); insn; insn = NEXT_INSN (insn))
1887 {
1888 if (insn != i3 && GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1889 for (link = LOG_LINKS (insn); link; link = XEXP (link, 1))
1890 if (XEXP (link, 0) == i2)
1891 XEXP (link, 0) = i3;
1892
1893 if (GET_CODE (insn) == CODE_LABEL
1894 || GET_CODE (insn) == JUMP_INSN)
1895 break;
1896 }
1897 }
1898
1899 LOG_LINKS (i3) = 0;
1900 REG_NOTES (i3) = 0;
1901 LOG_LINKS (i2) = 0;
1902 REG_NOTES (i2) = 0;
1903
1904 if (newi2pat)
1905 {
1906 INSN_CODE (i2) = i2_code_number;
1907 PATTERN (i2) = newi2pat;
1908 }
1909 else
1910 {
1911 PUT_CODE (i2, NOTE);
1912 NOTE_LINE_NUMBER (i2) = NOTE_INSN_DELETED;
1913 NOTE_SOURCE_FILE (i2) = 0;
1914 }
1915
1916 if (i1)
1917 {
1918 LOG_LINKS (i1) = 0;
1919 REG_NOTES (i1) = 0;
1920 PUT_CODE (i1, NOTE);
1921 NOTE_LINE_NUMBER (i1) = NOTE_INSN_DELETED;
1922 NOTE_SOURCE_FILE (i1) = 0;
1923 }
1924
1925 /* Get death notes for everything that is now used in either I3 or
1926 I2 and used to die in a previous insn. */
1927
1928 move_deaths (newpat, i1 ? INSN_CUID (i1) : INSN_CUID (i2), i3, &midnotes);
1929 if (newi2pat)
1930 move_deaths (newi2pat, INSN_CUID (i1), i2, &midnotes);
1931
1932 /* Distribute all the LOG_LINKS and REG_NOTES from I1, I2, and I3. */
1933 if (i3notes)
1934 distribute_notes (i3notes, i3, i3, newi2pat ? i2 : NULL_RTX,
1935 elim_i2, elim_i1);
1936 if (i2notes)
1937 distribute_notes (i2notes, i2, i3, newi2pat ? i2 : NULL_RTX,
1938 elim_i2, elim_i1);
1939 if (i1notes)
1940 distribute_notes (i1notes, i1, i3, newi2pat ? i2 : NULL_RTX,
1941 elim_i2, elim_i1);
1942 if (midnotes)
1943 distribute_notes (midnotes, NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
1944 elim_i2, elim_i1);
1945
1946 /* Distribute any notes added to I2 or I3 by recog_for_combine. We
1947 know these are REG_UNUSED and want them to go to the desired insn,
1948 so we always pass it as i3. We have not counted the notes in
1949 reg_n_deaths yet, so we need to do so now. */
1950
1951 if (newi2pat && new_i2_notes)
1952 {
1953 for (temp = new_i2_notes; temp; temp = XEXP (temp, 1))
1954 if (GET_CODE (XEXP (temp, 0)) == REG)
1955 reg_n_deaths[REGNO (XEXP (temp, 0))]++;
1956
1957 distribute_notes (new_i2_notes, i2, i2, NULL_RTX, NULL_RTX, NULL_RTX);
1958 }
1959
1960 if (new_i3_notes)
1961 {
1962 for (temp = new_i3_notes; temp; temp = XEXP (temp, 1))
1963 if (GET_CODE (XEXP (temp, 0)) == REG)
1964 reg_n_deaths[REGNO (XEXP (temp, 0))]++;
1965
1966 distribute_notes (new_i3_notes, i3, i3, NULL_RTX, NULL_RTX, NULL_RTX);
1967 }
1968
1969 /* If I3DEST was used in I3SRC, it really died in I3. We may need to
1970 put a REG_DEAD note for it somewhere. Similarly for I2 and I1.
1971 Show an additional death due to the REG_DEAD note we make here. If
1972 we discard it in distribute_notes, we will decrement it again. */
1973
1974 if (i3dest_killed)
1975 {
1976 if (GET_CODE (i3dest_killed) == REG)
1977 reg_n_deaths[REGNO (i3dest_killed)]++;
1978
1979 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i3dest_killed,
1980 NULL_RTX),
1981 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
1982 NULL_RTX, NULL_RTX);
1983 }
1984
1985 /* For I2 and I1, we have to be careful. If NEWI2PAT exists and sets
1986 I2DEST or I1DEST, the death must be somewhere before I2, not I3. If
1987 we passed I3 in that case, it might delete I2. */
1988
1989 if (i2dest_in_i2src)
1990 {
1991 if (GET_CODE (i2dest) == REG)
1992 reg_n_deaths[REGNO (i2dest)]++;
1993
1994 if (newi2pat && reg_set_p (i2dest, newi2pat))
1995 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i2dest, NULL_RTX),
1996 NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
1997 else
1998 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i2dest, NULL_RTX),
1999 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2000 NULL_RTX, NULL_RTX);
2001 }
2002
2003 if (i1dest_in_i1src)
2004 {
2005 if (GET_CODE (i1dest) == REG)
2006 reg_n_deaths[REGNO (i1dest)]++;
2007
2008 if (newi2pat && reg_set_p (i1dest, newi2pat))
2009 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i1dest, NULL_RTX),
2010 NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2011 else
2012 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i1dest, NULL_RTX),
2013 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2014 NULL_RTX, NULL_RTX);
2015 }
2016
2017 distribute_links (i3links);
2018 distribute_links (i2links);
2019 distribute_links (i1links);
2020
2021 if (GET_CODE (i2dest) == REG)
2022 {
2023 rtx link;
2024 rtx i2_insn = 0, i2_val = 0, set;
2025
2026 /* The insn that used to set this register doesn't exist, and
2027 this life of the register may not exist either. See if one of
2028 I3's links points to an insn that sets I2DEST. If it does,
2029 that is now the last known value for I2DEST. If we don't update
2030 this and I2 set the register to a value that depended on its old
2031 contents, we will get confused. If this insn is used, thing
2032 will be set correctly in combine_instructions. */
2033
2034 for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2035 if ((set = single_set (XEXP (link, 0))) != 0
2036 && rtx_equal_p (i2dest, SET_DEST (set)))
2037 i2_insn = XEXP (link, 0), i2_val = SET_SRC (set);
2038
2039 record_value_for_reg (i2dest, i2_insn, i2_val);
2040
2041 /* If the reg formerly set in I2 died only once and that was in I3,
2042 zero its use count so it won't make `reload' do any work. */
2043 if (! added_sets_2 && newi2pat == 0)
2044 {
2045 regno = REGNO (i2dest);
2046 reg_n_sets[regno]--;
2047 if (reg_n_sets[regno] == 0
2048 && ! (basic_block_live_at_start[0][regno / REGSET_ELT_BITS]
2049 & ((REGSET_ELT_TYPE) 1 << (regno % REGSET_ELT_BITS))))
2050 reg_n_refs[regno] = 0;
2051 }
2052 }
2053
2054 if (i1 && GET_CODE (i1dest) == REG)
2055 {
2056 rtx link;
2057 rtx i1_insn = 0, i1_val = 0, set;
2058
2059 for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2060 if ((set = single_set (XEXP (link, 0))) != 0
2061 && rtx_equal_p (i1dest, SET_DEST (set)))
2062 i1_insn = XEXP (link, 0), i1_val = SET_SRC (set);
2063
2064 record_value_for_reg (i1dest, i1_insn, i1_val);
2065
2066 regno = REGNO (i1dest);
2067 if (! added_sets_1)
2068 {
2069 reg_n_sets[regno]--;
2070 if (reg_n_sets[regno] == 0
2071 && ! (basic_block_live_at_start[0][regno / REGSET_ELT_BITS]
2072 & ((REGSET_ELT_TYPE) 1 << (regno % REGSET_ELT_BITS))))
2073 reg_n_refs[regno] = 0;
2074 }
2075 }
2076
2077 /* Update reg_significant et al for any changes that may have been made
2078 to this insn. */
2079
2080 note_stores (newpat, set_significant);
2081 if (newi2pat)
2082 note_stores (newi2pat, set_significant);
2083
2084 /* If I3 is now an unconditional jump, ensure that it has a
2085 BARRIER following it since it may have initially been a
2086 conditional jump. */
2087
2088 if ((GET_CODE (newpat) == RETURN || simplejump_p (i3))
2089 && GET_CODE (next_nonnote_insn (i3)) != BARRIER)
2090 emit_barrier_after (i3);
2091 }
2092
2093 combine_successes++;
2094
2095 return newi2pat ? i2 : i3;
2096 }
2097 \f
2098 /* Undo all the modifications recorded in undobuf. */
2099
2100 static void
2101 undo_all ()
2102 {
2103 register int i;
2104 if (undobuf.num_undo > MAX_UNDO)
2105 undobuf.num_undo = MAX_UNDO;
2106 for (i = undobuf.num_undo - 1; i >= 0; i--)
2107 {
2108 if (undobuf.undo[i].is_int)
2109 *undobuf.undo[i].where.i = undobuf.undo[i].old_contents.i;
2110 else
2111 *undobuf.undo[i].where.rtx = undobuf.undo[i].old_contents.rtx;
2112
2113 }
2114
2115 obfree (undobuf.storage);
2116 undobuf.num_undo = 0;
2117 }
2118 \f
2119 /* Find the innermost point within the rtx at LOC, possibly LOC itself,
2120 where we have an arithmetic expression and return that point. LOC will
2121 be inside INSN.
2122
2123 try_combine will call this function to see if an insn can be split into
2124 two insns. */
2125
2126 static rtx *
2127 find_split_point (loc, insn)
2128 rtx *loc;
2129 rtx insn;
2130 {
2131 rtx x = *loc;
2132 enum rtx_code code = GET_CODE (x);
2133 rtx *split;
2134 int len = 0, pos, unsignedp;
2135 rtx inner;
2136
2137 /* First special-case some codes. */
2138 switch (code)
2139 {
2140 case SUBREG:
2141 #ifdef INSN_SCHEDULING
2142 /* If we are making a paradoxical SUBREG invalid, it becomes a split
2143 point. */
2144 if (GET_CODE (SUBREG_REG (x)) == MEM)
2145 return loc;
2146 #endif
2147 return find_split_point (&SUBREG_REG (x), insn);
2148
2149 case MEM:
2150 #ifdef HAVE_lo_sum
2151 /* If we have (mem (const ..)) or (mem (symbol_ref ...)), split it
2152 using LO_SUM and HIGH. */
2153 if (GET_CODE (XEXP (x, 0)) == CONST
2154 || GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
2155 {
2156 SUBST (XEXP (x, 0),
2157 gen_rtx_combine (LO_SUM, Pmode,
2158 gen_rtx_combine (HIGH, Pmode, XEXP (x, 0)),
2159 XEXP (x, 0)));
2160 return &XEXP (XEXP (x, 0), 0);
2161 }
2162 #endif
2163
2164 /* If we have a PLUS whose second operand is a constant and the
2165 address is not valid, perhaps will can split it up using
2166 the machine-specific way to split large constants. We use
2167 the first psuedo-reg (one of the virtual regs) as a placeholder;
2168 it will not remain in the result. */
2169 if (GET_CODE (XEXP (x, 0)) == PLUS
2170 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2171 && ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
2172 {
2173 rtx reg = regno_reg_rtx[FIRST_PSEUDO_REGISTER];
2174 rtx seq = split_insns (gen_rtx (SET, VOIDmode, reg, XEXP (x, 0)),
2175 subst_insn);
2176
2177 /* This should have produced two insns, each of which sets our
2178 placeholder. If the source of the second is a valid address,
2179 we can make put both sources together and make a split point
2180 in the middle. */
2181
2182 if (seq && XVECLEN (seq, 0) == 2
2183 && GET_CODE (XVECEXP (seq, 0, 0)) == INSN
2184 && GET_CODE (PATTERN (XVECEXP (seq, 0, 0))) == SET
2185 && SET_DEST (PATTERN (XVECEXP (seq, 0, 0))) == reg
2186 && ! reg_mentioned_p (reg,
2187 SET_SRC (PATTERN (XVECEXP (seq, 0, 0))))
2188 && GET_CODE (XVECEXP (seq, 0, 1)) == INSN
2189 && GET_CODE (PATTERN (XVECEXP (seq, 0, 1))) == SET
2190 && SET_DEST (PATTERN (XVECEXP (seq, 0, 1))) == reg
2191 && memory_address_p (GET_MODE (x),
2192 SET_SRC (PATTERN (XVECEXP (seq, 0, 1)))))
2193 {
2194 rtx src1 = SET_SRC (PATTERN (XVECEXP (seq, 0, 0)));
2195 rtx src2 = SET_SRC (PATTERN (XVECEXP (seq, 0, 1)));
2196
2197 /* Replace the placeholder in SRC2 with SRC1. If we can
2198 find where in SRC2 it was placed, that can become our
2199 split point and we can replace this address with SRC2.
2200 Just try two obvious places. */
2201
2202 src2 = replace_rtx (src2, reg, src1);
2203 split = 0;
2204 if (XEXP (src2, 0) == src1)
2205 split = &XEXP (src2, 0);
2206 else if (GET_RTX_FORMAT (GET_CODE (XEXP (src2, 0)))[0] == 'e'
2207 && XEXP (XEXP (src2, 0), 0) == src1)
2208 split = &XEXP (XEXP (src2, 0), 0);
2209
2210 if (split)
2211 {
2212 SUBST (XEXP (x, 0), src2);
2213 return split;
2214 }
2215 }
2216
2217 /* If that didn't work, perhaps the first operand is complex and
2218 needs to be computed separately, so make a split point there.
2219 This will occur on machines that just support REG + CONST
2220 and have a constant moved through some previous computation. */
2221
2222 else if (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (x, 0), 0))) != 'o'
2223 && ! (GET_CODE (XEXP (XEXP (x, 0), 0)) == SUBREG
2224 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (XEXP (x, 0), 0))))
2225 == 'o')))
2226 return &XEXP (XEXP (x, 0), 0);
2227 }
2228 break;
2229
2230 case SET:
2231 #ifdef HAVE_cc0
2232 /* If SET_DEST is CC0 and SET_SRC is not an operand, a COMPARE, or a
2233 ZERO_EXTRACT, the most likely reason why this doesn't match is that
2234 we need to put the operand into a register. So split at that
2235 point. */
2236
2237 if (SET_DEST (x) == cc0_rtx
2238 && GET_CODE (SET_SRC (x)) != COMPARE
2239 && GET_CODE (SET_SRC (x)) != ZERO_EXTRACT
2240 && GET_RTX_CLASS (GET_CODE (SET_SRC (x))) != 'o'
2241 && ! (GET_CODE (SET_SRC (x)) == SUBREG
2242 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (SET_SRC (x)))) == 'o'))
2243 return &SET_SRC (x);
2244 #endif
2245
2246 /* See if we can split SET_SRC as it stands. */
2247 split = find_split_point (&SET_SRC (x), insn);
2248 if (split && split != &SET_SRC (x))
2249 return split;
2250
2251 /* See if this is a bitfield assignment with everything constant. If
2252 so, this is an IOR of an AND, so split it into that. */
2253 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2254 && (GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
2255 <= HOST_BITS_PER_WIDE_INT)
2256 && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT
2257 && GET_CODE (XEXP (SET_DEST (x), 2)) == CONST_INT
2258 && GET_CODE (SET_SRC (x)) == CONST_INT
2259 && ((INTVAL (XEXP (SET_DEST (x), 1))
2260 + INTVAL (XEXP (SET_DEST (x), 2)))
2261 <= GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0))))
2262 && ! side_effects_p (XEXP (SET_DEST (x), 0)))
2263 {
2264 int pos = INTVAL (XEXP (SET_DEST (x), 2));
2265 int len = INTVAL (XEXP (SET_DEST (x), 1));
2266 int src = INTVAL (SET_SRC (x));
2267 rtx dest = XEXP (SET_DEST (x), 0);
2268 enum machine_mode mode = GET_MODE (dest);
2269 unsigned HOST_WIDE_INT mask = ((HOST_WIDE_INT) 1 << len) - 1;
2270
2271 #if BITS_BIG_ENDIAN
2272 pos = GET_MODE_BITSIZE (mode) - len - pos;
2273 #endif
2274
2275 if (src == mask)
2276 SUBST (SET_SRC (x),
2277 gen_binary (IOR, mode, dest, GEN_INT (src << pos)));
2278 else
2279 SUBST (SET_SRC (x),
2280 gen_binary (IOR, mode,
2281 gen_binary (AND, mode, dest,
2282 GEN_INT (~ (mask << pos)
2283 & GET_MODE_MASK (mode))),
2284 GEN_INT (src << pos)));
2285
2286 SUBST (SET_DEST (x), dest);
2287
2288 split = find_split_point (&SET_SRC (x), insn);
2289 if (split && split != &SET_SRC (x))
2290 return split;
2291 }
2292
2293 /* Otherwise, see if this is an operation that we can split into two.
2294 If so, try to split that. */
2295 code = GET_CODE (SET_SRC (x));
2296
2297 switch (code)
2298 {
2299 case AND:
2300 /* If we are AND'ing with a large constant that is only a single
2301 bit and the result is only being used in a context where we
2302 need to know if it is zero or non-zero, replace it with a bit
2303 extraction. This will avoid the large constant, which might
2304 have taken more than one insn to make. If the constant were
2305 not a valid argument to the AND but took only one insn to make,
2306 this is no worse, but if it took more than one insn, it will
2307 be better. */
2308
2309 if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
2310 && GET_CODE (XEXP (SET_SRC (x), 0)) == REG
2311 && (pos = exact_log2 (INTVAL (XEXP (SET_SRC (x), 1)))) >= 7
2312 && GET_CODE (SET_DEST (x)) == REG
2313 && (split = find_single_use (SET_DEST (x), insn, NULL_PTR)) != 0
2314 && (GET_CODE (*split) == EQ || GET_CODE (*split) == NE)
2315 && XEXP (*split, 0) == SET_DEST (x)
2316 && XEXP (*split, 1) == const0_rtx)
2317 {
2318 SUBST (SET_SRC (x),
2319 make_extraction (GET_MODE (SET_DEST (x)),
2320 XEXP (SET_SRC (x), 0),
2321 pos, NULL_RTX, 1, 1, 0, 0));
2322 return find_split_point (loc, insn);
2323 }
2324 break;
2325
2326 case SIGN_EXTEND:
2327 inner = XEXP (SET_SRC (x), 0);
2328 pos = 0;
2329 len = GET_MODE_BITSIZE (GET_MODE (inner));
2330 unsignedp = 0;
2331 break;
2332
2333 case SIGN_EXTRACT:
2334 case ZERO_EXTRACT:
2335 if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
2336 && GET_CODE (XEXP (SET_SRC (x), 2)) == CONST_INT)
2337 {
2338 inner = XEXP (SET_SRC (x), 0);
2339 len = INTVAL (XEXP (SET_SRC (x), 1));
2340 pos = INTVAL (XEXP (SET_SRC (x), 2));
2341
2342 #if BITS_BIG_ENDIAN
2343 pos = GET_MODE_BITSIZE (GET_MODE (inner)) - len - pos;
2344 #endif
2345 unsignedp = (code == ZERO_EXTRACT);
2346 }
2347 break;
2348 }
2349
2350 if (len && pos >= 0 && pos + len <= GET_MODE_BITSIZE (GET_MODE (inner)))
2351 {
2352 enum machine_mode mode = GET_MODE (SET_SRC (x));
2353
2354 /* For unsigned, we have a choice of a shift followed by an
2355 AND or two shifts. Use two shifts for field sizes where the
2356 constant might be too large. We assume here that we can
2357 always at least get 8-bit constants in an AND insn, which is
2358 true for every current RISC. */
2359
2360 if (unsignedp && len <= 8)
2361 {
2362 SUBST (SET_SRC (x),
2363 gen_rtx_combine
2364 (AND, mode,
2365 gen_rtx_combine (LSHIFTRT, mode,
2366 gen_lowpart_for_combine (mode, inner),
2367 GEN_INT (pos)),
2368 GEN_INT (((HOST_WIDE_INT) 1 << len) - 1)));
2369
2370 split = find_split_point (&SET_SRC (x), insn);
2371 if (split && split != &SET_SRC (x))
2372 return split;
2373 }
2374 else
2375 {
2376 SUBST (SET_SRC (x),
2377 gen_rtx_combine
2378 (unsignedp ? LSHIFTRT : ASHIFTRT, mode,
2379 gen_rtx_combine (ASHIFT, mode,
2380 gen_lowpart_for_combine (mode, inner),
2381 GEN_INT (GET_MODE_BITSIZE (mode)
2382 - len - pos)),
2383 GEN_INT (GET_MODE_BITSIZE (mode) - len)));
2384
2385 split = find_split_point (&SET_SRC (x), insn);
2386 if (split && split != &SET_SRC (x))
2387 return split;
2388 }
2389 }
2390
2391 /* See if this is a simple operation with a constant as the second
2392 operand. It might be that this constant is out of range and hence
2393 could be used as a split point. */
2394 if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
2395 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
2396 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<')
2397 && CONSTANT_P (XEXP (SET_SRC (x), 1))
2398 && (GET_RTX_CLASS (GET_CODE (XEXP (SET_SRC (x), 0))) == 'o'
2399 || (GET_CODE (XEXP (SET_SRC (x), 0)) == SUBREG
2400 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (SET_SRC (x), 0))))
2401 == 'o'))))
2402 return &XEXP (SET_SRC (x), 1);
2403
2404 /* Finally, see if this is a simple operation with its first operand
2405 not in a register. The operation might require this operand in a
2406 register, so return it as a split point. We can always do this
2407 because if the first operand were another operation, we would have
2408 already found it as a split point. */
2409 if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
2410 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
2411 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<'
2412 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '1')
2413 && ! register_operand (XEXP (SET_SRC (x), 0), VOIDmode))
2414 return &XEXP (SET_SRC (x), 0);
2415
2416 return 0;
2417
2418 case AND:
2419 case IOR:
2420 /* We write NOR as (and (not A) (not B)), but if we don't have a NOR,
2421 it is better to write this as (not (ior A B)) so we can split it.
2422 Similarly for IOR. */
2423 if (GET_CODE (XEXP (x, 0)) == NOT && GET_CODE (XEXP (x, 1)) == NOT)
2424 {
2425 SUBST (*loc,
2426 gen_rtx_combine (NOT, GET_MODE (x),
2427 gen_rtx_combine (code == IOR ? AND : IOR,
2428 GET_MODE (x),
2429 XEXP (XEXP (x, 0), 0),
2430 XEXP (XEXP (x, 1), 0))));
2431 return find_split_point (loc, insn);
2432 }
2433
2434 /* Many RISC machines have a large set of logical insns. If the
2435 second operand is a NOT, put it first so we will try to split the
2436 other operand first. */
2437 if (GET_CODE (XEXP (x, 1)) == NOT)
2438 {
2439 rtx tem = XEXP (x, 0);
2440 SUBST (XEXP (x, 0), XEXP (x, 1));
2441 SUBST (XEXP (x, 1), tem);
2442 }
2443 break;
2444 }
2445
2446 /* Otherwise, select our actions depending on our rtx class. */
2447 switch (GET_RTX_CLASS (code))
2448 {
2449 case 'b': /* This is ZERO_EXTRACT and SIGN_EXTRACT. */
2450 case '3':
2451 split = find_split_point (&XEXP (x, 2), insn);
2452 if (split)
2453 return split;
2454 /* ... fall through ... */
2455 case '2':
2456 case 'c':
2457 case '<':
2458 split = find_split_point (&XEXP (x, 1), insn);
2459 if (split)
2460 return split;
2461 /* ... fall through ... */
2462 case '1':
2463 /* Some machines have (and (shift ...) ...) insns. If X is not
2464 an AND, but XEXP (X, 0) is, use it as our split point. */
2465 if (GET_CODE (x) != AND && GET_CODE (XEXP (x, 0)) == AND)
2466 return &XEXP (x, 0);
2467
2468 split = find_split_point (&XEXP (x, 0), insn);
2469 if (split)
2470 return split;
2471 return loc;
2472 }
2473
2474 /* Otherwise, we don't have a split point. */
2475 return 0;
2476 }
2477 \f
2478 /* Throughout X, replace FROM with TO, and return the result.
2479 The result is TO if X is FROM;
2480 otherwise the result is X, but its contents may have been modified.
2481 If they were modified, a record was made in undobuf so that
2482 undo_all will (among other things) return X to its original state.
2483
2484 If the number of changes necessary is too much to record to undo,
2485 the excess changes are not made, so the result is invalid.
2486 The changes already made can still be undone.
2487 undobuf.num_undo is incremented for such changes, so by testing that
2488 the caller can tell whether the result is valid.
2489
2490 `n_occurrences' is incremented each time FROM is replaced.
2491
2492 IN_DEST is non-zero if we are processing the SET_DEST of a SET.
2493
2494 UNIQUE_COPY is non-zero if each substitution must be unique. We do this
2495 by copying if `n_occurrences' is non-zero. */
2496
2497 static rtx
2498 subst (x, from, to, in_dest, unique_copy)
2499 register rtx x, from, to;
2500 int in_dest;
2501 int unique_copy;
2502 {
2503 register char *fmt;
2504 register int len, i;
2505 register enum rtx_code code = GET_CODE (x), orig_code = code;
2506 rtx temp;
2507 enum machine_mode mode = GET_MODE (x);
2508 enum machine_mode op0_mode = VOIDmode;
2509 rtx other_insn;
2510 rtx *cc_use;
2511 int n_restarts = 0;
2512
2513 /* FAKE_EXTEND_SAFE_P (MODE, FROM) is 1 if (subreg:MODE FROM 0) is a safe
2514 replacement for (zero_extend:MODE FROM) or (sign_extend:MODE FROM).
2515 If it is 0, that cannot be done. We can now do this for any MEM
2516 because (SUBREG (MEM...)) is guaranteed to cause the MEM to be reloaded.
2517 If not for that, MEM's would very rarely be safe. */
2518
2519 /* Reject MODEs bigger than a word, because we might not be able
2520 to reference a two-register group starting with an arbitrary register
2521 (and currently gen_lowpart might crash for a SUBREG). */
2522
2523 #define FAKE_EXTEND_SAFE_P(MODE, FROM) \
2524 (GET_MODE_SIZE (MODE) <= UNITS_PER_WORD)
2525
2526 /* Two expressions are equal if they are identical copies of a shared
2527 RTX or if they are both registers with the same register number
2528 and mode. */
2529
2530 #define COMBINE_RTX_EQUAL_P(X,Y) \
2531 ((X) == (Y) \
2532 || (GET_CODE (X) == REG && GET_CODE (Y) == REG \
2533 && REGNO (X) == REGNO (Y) && GET_MODE (X) == GET_MODE (Y)))
2534
2535 if (! in_dest && COMBINE_RTX_EQUAL_P (x, from))
2536 {
2537 n_occurrences++;
2538 return (unique_copy && n_occurrences > 1 ? copy_rtx (to) : to);
2539 }
2540
2541 /* If X and FROM are the same register but different modes, they will
2542 not have been seen as equal above. However, flow.c will make a
2543 LOG_LINKS entry for that case. If we do nothing, we will try to
2544 rerecognize our original insn and, when it succeeds, we will
2545 delete the feeding insn, which is incorrect.
2546
2547 So force this insn not to match in this (rare) case. */
2548 if (! in_dest && code == REG && GET_CODE (from) == REG
2549 && REGNO (x) == REGNO (from))
2550 return gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
2551
2552 /* If this is an object, we are done unless it is a MEM or LO_SUM, both
2553 of which may contain things that can be combined. */
2554 if (code != MEM && code != LO_SUM && GET_RTX_CLASS (code) == 'o')
2555 return x;
2556
2557 /* It is possible to have a subexpression appear twice in the insn.
2558 Suppose that FROM is a register that appears within TO.
2559 Then, after that subexpression has been scanned once by `subst',
2560 the second time it is scanned, TO may be found. If we were
2561 to scan TO here, we would find FROM within it and create a
2562 self-referent rtl structure which is completely wrong. */
2563 if (COMBINE_RTX_EQUAL_P (x, to))
2564 return to;
2565
2566 len = GET_RTX_LENGTH (code);
2567 fmt = GET_RTX_FORMAT (code);
2568
2569 /* We don't need to process a SET_DEST that is a register, CC0, or PC, so
2570 set up to skip this common case. All other cases where we want to
2571 suppress replacing something inside a SET_SRC are handled via the
2572 IN_DEST operand. */
2573 if (code == SET
2574 && (GET_CODE (SET_DEST (x)) == REG
2575 || GET_CODE (SET_DEST (x)) == CC0
2576 || GET_CODE (SET_DEST (x)) == PC))
2577 fmt = "ie";
2578
2579 /* Get the mode of operand 0 in case X is now a SIGN_EXTEND of a constant. */
2580 if (fmt[0] == 'e')
2581 op0_mode = GET_MODE (XEXP (x, 0));
2582
2583 for (i = 0; i < len; i++)
2584 {
2585 if (fmt[i] == 'E')
2586 {
2587 register int j;
2588 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2589 {
2590 register rtx new;
2591 if (COMBINE_RTX_EQUAL_P (XVECEXP (x, i, j), from))
2592 {
2593 new = (unique_copy && n_occurrences ? copy_rtx (to) : to);
2594 n_occurrences++;
2595 }
2596 else
2597 {
2598 new = subst (XVECEXP (x, i, j), from, to, 0, unique_copy);
2599
2600 /* If this substitution failed, this whole thing fails. */
2601 if (GET_CODE (new) == CLOBBER && XEXP (new, 0) == const0_rtx)
2602 return new;
2603 }
2604
2605 SUBST (XVECEXP (x, i, j), new);
2606 }
2607 }
2608 else if (fmt[i] == 'e')
2609 {
2610 register rtx new;
2611
2612 if (COMBINE_RTX_EQUAL_P (XEXP (x, i), from))
2613 {
2614 new = (unique_copy && n_occurrences ? copy_rtx (to) : to);
2615 n_occurrences++;
2616 }
2617 else
2618 /* If we are in a SET_DEST, suppress most cases unless we
2619 have gone inside a MEM, in which case we want to
2620 simplify the address. We assume here that things that
2621 are actually part of the destination have their inner
2622 parts in the first expression. This is true for SUBREG,
2623 STRICT_LOW_PART, and ZERO_EXTRACT, which are the only
2624 things aside from REG and MEM that should appear in a
2625 SET_DEST. */
2626 new = subst (XEXP (x, i), from, to,
2627 (((in_dest
2628 && (code == SUBREG || code == STRICT_LOW_PART
2629 || code == ZERO_EXTRACT))
2630 || code == SET)
2631 && i == 0), unique_copy);
2632
2633 /* If we found that we will have to reject this combination,
2634 indicate that by returning the CLOBBER ourselves, rather than
2635 an expression containing it. This will speed things up as
2636 well as prevent accidents where two CLOBBERs are considered
2637 to be equal, thus producing an incorrect simplification. */
2638
2639 if (GET_CODE (new) == CLOBBER && XEXP (new, 0) == const0_rtx)
2640 return new;
2641
2642 SUBST (XEXP (x, i), new);
2643 }
2644 }
2645
2646 /* We come back to here if we have replaced the expression with one of
2647 a different code and it is likely that further simplification will be
2648 possible. */
2649
2650 restart:
2651
2652 /* If we have restarted more than 4 times, we are probably looping, so
2653 give up. */
2654 if (++n_restarts > 4)
2655 return x;
2656
2657 /* If we are restarting at all, it means that we no longer know the
2658 original mode of operand 0 (since we have probably changed the
2659 form of X). */
2660
2661 if (n_restarts > 1)
2662 op0_mode = VOIDmode;
2663
2664 code = GET_CODE (x);
2665
2666 /* If this is a commutative operation, put a constant last and a complex
2667 expression first. We don't need to do this for comparisons here. */
2668 if (GET_RTX_CLASS (code) == 'c'
2669 && ((CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
2670 || (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == 'o'
2671 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o')
2672 || (GET_CODE (XEXP (x, 0)) == SUBREG
2673 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0)))) == 'o'
2674 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o')))
2675 {
2676 temp = XEXP (x, 0);
2677 SUBST (XEXP (x, 0), XEXP (x, 1));
2678 SUBST (XEXP (x, 1), temp);
2679 }
2680
2681 /* If this is a PLUS, MINUS, or MULT, and the first operand is the
2682 sign extension of a PLUS with a constant, reverse the order of the sign
2683 extension and the addition. Note that this not the same as the original
2684 code, but overflow is undefined for signed values. Also note that the
2685 PLUS will have been partially moved "inside" the sign-extension, so that
2686 the first operand of X will really look like:
2687 (ashiftrt (plus (ashift A C4) C5) C4).
2688 We convert this to
2689 (plus (ashiftrt (ashift A C4) C2) C4)
2690 and replace the first operand of X with that expression. Later parts
2691 of this function may simplify the expression further.
2692
2693 For example, if we start with (mult (sign_extend (plus A C1)) C2),
2694 we swap the SIGN_EXTEND and PLUS. Later code will apply the
2695 distributive law to produce (plus (mult (sign_extend X) C1) C3).
2696
2697 We do this to simplify address expressions. */
2698
2699 if ((code == PLUS || code == MINUS || code == MULT)
2700 && GET_CODE (XEXP (x, 0)) == ASHIFTRT
2701 && GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS
2702 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ASHIFT
2703 && GET_CODE (XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1)) == CONST_INT
2704 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2705 && XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1) == XEXP (XEXP (x, 0), 1)
2706 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
2707 && (temp = simplify_binary_operation (ASHIFTRT, mode,
2708 XEXP (XEXP (XEXP (x, 0), 0), 1),
2709 XEXP (XEXP (x, 0), 1))) != 0)
2710 {
2711 rtx new
2712 = simplify_shift_const (NULL_RTX, ASHIFT, mode,
2713 XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 0),
2714 INTVAL (XEXP (XEXP (x, 0), 1)));
2715
2716 new = simplify_shift_const (NULL_RTX, ASHIFTRT, mode, new,
2717 INTVAL (XEXP (XEXP (x, 0), 1)));
2718
2719 SUBST (XEXP (x, 0), gen_binary (PLUS, mode, new, temp));
2720 }
2721
2722 /* If this is a simple operation applied to an IF_THEN_ELSE, try
2723 applying it to the arms of the IF_THEN_ELSE. This often simplifies
2724 things. Don't deal with operations that change modes here. */
2725
2726 if ((GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c')
2727 && GET_CODE (XEXP (x, 0)) == IF_THEN_ELSE)
2728 {
2729 /* Don't do this by using SUBST inside X since we might be messing
2730 up a shared expression. */
2731 rtx cond = XEXP (XEXP (x, 0), 0);
2732 rtx t_arm = subst (gen_binary (code, mode, XEXP (XEXP (x, 0), 1),
2733 XEXP (x, 1)),
2734 pc_rtx, pc_rtx, 0, 0);
2735 rtx f_arm = subst (gen_binary (code, mode, XEXP (XEXP (x, 0), 2),
2736 XEXP (x, 1)),
2737 pc_rtx, pc_rtx, 0, 0);
2738
2739
2740 x = gen_rtx (IF_THEN_ELSE, mode, cond, t_arm, f_arm);
2741 goto restart;
2742 }
2743
2744 else if (GET_RTX_CLASS (code) == '1'
2745 && GET_CODE (XEXP (x, 0)) == IF_THEN_ELSE
2746 && GET_MODE (XEXP (x, 0)) == mode)
2747 {
2748 rtx cond = XEXP (XEXP (x, 0), 0);
2749 rtx t_arm = subst (gen_unary (code, mode, XEXP (XEXP (x, 0), 1)),
2750 pc_rtx, pc_rtx, 0, 0);
2751 rtx f_arm = subst (gen_unary (code, mode, XEXP (XEXP (x, 0), 2)),
2752 pc_rtx, pc_rtx, 0, 0);
2753
2754 x = gen_rtx_combine (IF_THEN_ELSE, mode, cond, t_arm, f_arm);
2755 goto restart;
2756 }
2757
2758 /* Try to fold this expression in case we have constants that weren't
2759 present before. */
2760 temp = 0;
2761 switch (GET_RTX_CLASS (code))
2762 {
2763 case '1':
2764 temp = simplify_unary_operation (code, mode, XEXP (x, 0), op0_mode);
2765 break;
2766 case '<':
2767 temp = simplify_relational_operation (code, op0_mode,
2768 XEXP (x, 0), XEXP (x, 1));
2769 #ifdef FLOAT_STORE_FLAG_VALUE
2770 if (temp != 0 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2771 temp = ((temp == const0_rtx) ? CONST0_RTX (GET_MODE (x))
2772 : immed_real_const_1 (FLOAT_STORE_FLAG_VALUE, GET_MODE (x)));
2773 #endif
2774 break;
2775 case 'c':
2776 case '2':
2777 temp = simplify_binary_operation (code, mode, XEXP (x, 0), XEXP (x, 1));
2778 break;
2779 case 'b':
2780 case '3':
2781 temp = simplify_ternary_operation (code, mode, op0_mode, XEXP (x, 0),
2782 XEXP (x, 1), XEXP (x, 2));
2783 break;
2784 }
2785
2786 if (temp)
2787 x = temp, code = GET_CODE (temp);
2788
2789 /* First see if we can apply the inverse distributive law. */
2790 if (code == PLUS || code == MINUS || code == IOR || code == XOR)
2791 {
2792 x = apply_distributive_law (x);
2793 code = GET_CODE (x);
2794 }
2795
2796 /* If CODE is an associative operation not otherwise handled, see if we
2797 can associate some operands. This can win if they are constants or
2798 if they are logically related (i.e. (a & b) & a. */
2799 if ((code == PLUS || code == MINUS
2800 || code == MULT || code == AND || code == IOR || code == XOR
2801 || code == DIV || code == UDIV
2802 || code == SMAX || code == SMIN || code == UMAX || code == UMIN)
2803 && GET_MODE_CLASS (mode) == MODE_INT)
2804 {
2805 if (GET_CODE (XEXP (x, 0)) == code)
2806 {
2807 rtx other = XEXP (XEXP (x, 0), 0);
2808 rtx inner_op0 = XEXP (XEXP (x, 0), 1);
2809 rtx inner_op1 = XEXP (x, 1);
2810 rtx inner;
2811
2812 /* Make sure we pass the constant operand if any as the second
2813 one if this is a commutative operation. */
2814 if (CONSTANT_P (inner_op0) && GET_RTX_CLASS (code) == 'c')
2815 {
2816 rtx tem = inner_op0;
2817 inner_op0 = inner_op1;
2818 inner_op1 = tem;
2819 }
2820 inner = simplify_binary_operation (code == MINUS ? PLUS
2821 : code == DIV ? MULT
2822 : code == UDIV ? MULT
2823 : code,
2824 mode, inner_op0, inner_op1);
2825
2826 /* For commutative operations, try the other pair if that one
2827 didn't simplify. */
2828 if (inner == 0 && GET_RTX_CLASS (code) == 'c')
2829 {
2830 other = XEXP (XEXP (x, 0), 1);
2831 inner = simplify_binary_operation (code, mode,
2832 XEXP (XEXP (x, 0), 0),
2833 XEXP (x, 1));
2834 }
2835
2836 if (inner)
2837 {
2838 x = gen_binary (code, mode, other, inner);
2839 goto restart;
2840
2841 }
2842 }
2843 }
2844
2845 /* A little bit of algebraic simplification here. */
2846 switch (code)
2847 {
2848 case MEM:
2849 /* Ensure that our address has any ASHIFTs converted to MULT in case
2850 address-recognizing predicates are called later. */
2851 temp = make_compound_operation (XEXP (x, 0), MEM);
2852 SUBST (XEXP (x, 0), temp);
2853 break;
2854
2855 case SUBREG:
2856 /* (subreg:A (mem:B X) N) becomes a modified MEM unless the SUBREG
2857 is paradoxical. If we can't do that safely, then it becomes
2858 something nonsensical so that this combination won't take place. */
2859
2860 if (GET_CODE (SUBREG_REG (x)) == MEM
2861 && (GET_MODE_SIZE (mode)
2862 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
2863 {
2864 rtx inner = SUBREG_REG (x);
2865 int endian_offset = 0;
2866 /* Don't change the mode of the MEM
2867 if that would change the meaning of the address. */
2868 if (MEM_VOLATILE_P (SUBREG_REG (x))
2869 || mode_dependent_address_p (XEXP (inner, 0)))
2870 return gen_rtx (CLOBBER, mode, const0_rtx);
2871
2872 #if BYTES_BIG_ENDIAN
2873 if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
2874 endian_offset += UNITS_PER_WORD - GET_MODE_SIZE (mode);
2875 if (GET_MODE_SIZE (GET_MODE (inner)) < UNITS_PER_WORD)
2876 endian_offset -= UNITS_PER_WORD - GET_MODE_SIZE (GET_MODE (inner));
2877 #endif
2878 /* Note if the plus_constant doesn't make a valid address
2879 then this combination won't be accepted. */
2880 x = gen_rtx (MEM, mode,
2881 plus_constant (XEXP (inner, 0),
2882 (SUBREG_WORD (x) * UNITS_PER_WORD
2883 + endian_offset)));
2884 MEM_VOLATILE_P (x) = MEM_VOLATILE_P (inner);
2885 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (inner);
2886 MEM_IN_STRUCT_P (x) = MEM_IN_STRUCT_P (inner);
2887 return x;
2888 }
2889
2890 /* If we are in a SET_DEST, these other cases can't apply. */
2891 if (in_dest)
2892 return x;
2893
2894 /* Changing mode twice with SUBREG => just change it once,
2895 or not at all if changing back to starting mode. */
2896 if (GET_CODE (SUBREG_REG (x)) == SUBREG)
2897 {
2898 if (mode == GET_MODE (SUBREG_REG (SUBREG_REG (x)))
2899 && SUBREG_WORD (x) == 0 && SUBREG_WORD (SUBREG_REG (x)) == 0)
2900 return SUBREG_REG (SUBREG_REG (x));
2901
2902 SUBST_INT (SUBREG_WORD (x),
2903 SUBREG_WORD (x) + SUBREG_WORD (SUBREG_REG (x)));
2904 SUBST (SUBREG_REG (x), SUBREG_REG (SUBREG_REG (x)));
2905 }
2906
2907 /* SUBREG of a hard register => just change the register number
2908 and/or mode. If the hard register is not valid in that mode,
2909 suppress this combination. If the hard register is the stack,
2910 frame, or argument pointer, leave this as a SUBREG. */
2911
2912 if (GET_CODE (SUBREG_REG (x)) == REG
2913 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
2914 && REGNO (SUBREG_REG (x)) != FRAME_POINTER_REGNUM
2915 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2916 && REGNO (SUBREG_REG (x)) != ARG_POINTER_REGNUM
2917 #endif
2918 && REGNO (SUBREG_REG (x)) != STACK_POINTER_REGNUM)
2919 {
2920 if (HARD_REGNO_MODE_OK (REGNO (SUBREG_REG (x)) + SUBREG_WORD (x),
2921 mode))
2922 return gen_rtx (REG, mode,
2923 REGNO (SUBREG_REG (x)) + SUBREG_WORD (x));
2924 else
2925 return gen_rtx (CLOBBER, mode, const0_rtx);
2926 }
2927
2928 /* For a constant, try to pick up the part we want. Handle a full
2929 word and low-order part. Only do this if we are narrowing
2930 the constant; if it is being widened, we have no idea what
2931 the extra bits will have been set to. */
2932
2933 if (CONSTANT_P (SUBREG_REG (x)) && op0_mode != VOIDmode
2934 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
2935 && GET_MODE_SIZE (op0_mode) < UNITS_PER_WORD
2936 && GET_MODE_CLASS (mode) == MODE_INT)
2937 {
2938 temp = operand_subword (SUBREG_REG (x), SUBREG_WORD (x),
2939 0, op0_mode);
2940 if (temp)
2941 return temp;
2942 }
2943
2944 if (CONSTANT_P (SUBREG_REG (x)) && subreg_lowpart_p (x)
2945 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (op0_mode))
2946 return gen_lowpart_for_combine (mode, SUBREG_REG (x));
2947
2948 /* If we are narrowing the object, we need to see if we can simplify
2949 the expression for the object knowing that we only need the
2950 low-order bits. */
2951
2952 if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
2953 && subreg_lowpart_p (x))
2954 return force_to_mode (SUBREG_REG (x), mode, GET_MODE_BITSIZE (mode),
2955 NULL_RTX);
2956 break;
2957
2958 case NOT:
2959 /* (not (plus X -1)) can become (neg X). */
2960 if (GET_CODE (XEXP (x, 0)) == PLUS
2961 && XEXP (XEXP (x, 0), 1) == constm1_rtx)
2962 {
2963 x = gen_rtx_combine (NEG, mode, XEXP (XEXP (x, 0), 0));
2964 goto restart;
2965 }
2966
2967 /* Similarly, (not (neg X)) is (plus X -1). */
2968 if (GET_CODE (XEXP (x, 0)) == NEG)
2969 {
2970 x = gen_rtx_combine (PLUS, mode, XEXP (XEXP (x, 0), 0), constm1_rtx);
2971 goto restart;
2972 }
2973
2974 /* (not (xor X C)) for C constant is (xor X D) with D = ~ C. */
2975 if (GET_CODE (XEXP (x, 0)) == XOR
2976 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2977 && (temp = simplify_unary_operation (NOT, mode,
2978 XEXP (XEXP (x, 0), 1),
2979 mode)) != 0)
2980 {
2981 SUBST (XEXP (XEXP (x, 0), 1), temp);
2982 return XEXP (x, 0);
2983 }
2984
2985 /* (not (ashift 1 X)) is (rotate ~1 X). We used to do this for operands
2986 other than 1, but that is not valid. We could do a similar
2987 simplification for (not (lshiftrt C X)) where C is just the sign bit,
2988 but this doesn't seem common enough to bother with. */
2989 if (GET_CODE (XEXP (x, 0)) == ASHIFT
2990 && XEXP (XEXP (x, 0), 0) == const1_rtx)
2991 {
2992 x = gen_rtx (ROTATE, mode, gen_unary (NOT, mode, const1_rtx),
2993 XEXP (XEXP (x, 0), 1));
2994 goto restart;
2995 }
2996
2997 if (GET_CODE (XEXP (x, 0)) == SUBREG
2998 && subreg_lowpart_p (XEXP (x, 0))
2999 && (GET_MODE_SIZE (GET_MODE (XEXP (x, 0)))
3000 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (x, 0)))))
3001 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == ASHIFT
3002 && XEXP (SUBREG_REG (XEXP (x, 0)), 0) == const1_rtx)
3003 {
3004 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (XEXP (x, 0)));
3005
3006 x = gen_rtx (ROTATE, inner_mode,
3007 gen_unary (NOT, inner_mode, const1_rtx),
3008 XEXP (SUBREG_REG (XEXP (x, 0)), 1));
3009 x = gen_lowpart_for_combine (mode, x);
3010 goto restart;
3011 }
3012
3013 #if STORE_FLAG_VALUE == -1
3014 /* (not (comparison foo bar)) can be done by reversing the comparison
3015 code if valid. */
3016 if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3017 && reversible_comparison_p (XEXP (x, 0)))
3018 return gen_rtx_combine (reverse_condition (GET_CODE (XEXP (x, 0))),
3019 mode, XEXP (XEXP (x, 0), 0),
3020 XEXP (XEXP (x, 0), 1));
3021 #endif
3022
3023 /* Apply De Morgan's laws to reduce number of patterns for machines
3024 with negating logical insns (and-not, nand, etc.). If result has
3025 only one NOT, put it first, since that is how the patterns are
3026 coded. */
3027
3028 if (GET_CODE (XEXP (x, 0)) == IOR || GET_CODE (XEXP (x, 0)) == AND)
3029 {
3030 rtx in1 = XEXP (XEXP (x, 0), 0), in2 = XEXP (XEXP (x, 0), 1);
3031
3032 if (GET_CODE (in1) == NOT)
3033 in1 = XEXP (in1, 0);
3034 else
3035 in1 = gen_rtx_combine (NOT, GET_MODE (in1), in1);
3036
3037 if (GET_CODE (in2) == NOT)
3038 in2 = XEXP (in2, 0);
3039 else if (GET_CODE (in2) == CONST_INT
3040 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3041 in2 = GEN_INT (GET_MODE_MASK (mode) & ~ INTVAL (in2));
3042 else
3043 in2 = gen_rtx_combine (NOT, GET_MODE (in2), in2);
3044
3045 if (GET_CODE (in2) == NOT)
3046 {
3047 rtx tem = in2;
3048 in2 = in1; in1 = tem;
3049 }
3050
3051 x = gen_rtx_combine (GET_CODE (XEXP (x, 0)) == IOR ? AND : IOR,
3052 mode, in1, in2);
3053 goto restart;
3054 }
3055 break;
3056
3057 case NEG:
3058 /* (neg (plus X 1)) can become (not X). */
3059 if (GET_CODE (XEXP (x, 0)) == PLUS
3060 && XEXP (XEXP (x, 0), 1) == const1_rtx)
3061 {
3062 x = gen_rtx_combine (NOT, mode, XEXP (XEXP (x, 0), 0));
3063 goto restart;
3064 }
3065
3066 /* Similarly, (neg (not X)) is (plus X 1). */
3067 if (GET_CODE (XEXP (x, 0)) == NOT)
3068 {
3069 x = gen_rtx_combine (PLUS, mode, XEXP (XEXP (x, 0), 0), const1_rtx);
3070 goto restart;
3071 }
3072
3073 /* (neg (minus X Y)) can become (minus Y X). */
3074 if (GET_CODE (XEXP (x, 0)) == MINUS
3075 && (GET_MODE_CLASS (mode) != MODE_FLOAT
3076 /* x-y != -(y-x) with IEEE floating point. */
3077 || TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT))
3078 {
3079 x = gen_binary (MINUS, mode, XEXP (XEXP (x, 0), 1),
3080 XEXP (XEXP (x, 0), 0));
3081 goto restart;
3082 }
3083
3084 /* (neg (xor A 1)) is (plus A -1) if A is known to be either 0 or 1. */
3085 if (GET_CODE (XEXP (x, 0)) == XOR && XEXP (XEXP (x, 0), 1) == const1_rtx
3086 && significant_bits (XEXP (XEXP (x, 0), 0), mode) == 1)
3087 {
3088 x = gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0), constm1_rtx);
3089 goto restart;
3090 }
3091
3092 /* NEG commutes with ASHIFT since it is multiplication. Only do this
3093 if we can then eliminate the NEG (e.g.,
3094 if the operand is a constant). */
3095
3096 if (GET_CODE (XEXP (x, 0)) == ASHIFT)
3097 {
3098 temp = simplify_unary_operation (NEG, mode,
3099 XEXP (XEXP (x, 0), 0), mode);
3100 if (temp)
3101 {
3102 SUBST (XEXP (XEXP (x, 0), 0), temp);
3103 return XEXP (x, 0);
3104 }
3105 }
3106
3107 temp = expand_compound_operation (XEXP (x, 0));
3108
3109 /* For C equal to the width of MODE minus 1, (neg (ashiftrt X C)) can be
3110 replaced by (lshiftrt X C). This will convert
3111 (neg (sign_extract X 1 Y)) to (zero_extract X 1 Y). */
3112
3113 if (GET_CODE (temp) == ASHIFTRT
3114 && GET_CODE (XEXP (temp, 1)) == CONST_INT
3115 && INTVAL (XEXP (temp, 1)) == GET_MODE_BITSIZE (mode) - 1)
3116 {
3117 x = simplify_shift_const (temp, LSHIFTRT, mode, XEXP (temp, 0),
3118 INTVAL (XEXP (temp, 1)));
3119 goto restart;
3120 }
3121
3122 /* If X has only a single bit significant, say, bit I, convert
3123 (neg X) to (ashiftrt (ashift X C-I) C-I) where C is the bitsize of
3124 MODE minus 1. This will convert (neg (zero_extract X 1 Y)) to
3125 (sign_extract X 1 Y). But only do this if TEMP isn't a register
3126 or a SUBREG of one since we'd be making the expression more
3127 complex if it was just a register. */
3128
3129 if (GET_CODE (temp) != REG
3130 && ! (GET_CODE (temp) == SUBREG
3131 && GET_CODE (SUBREG_REG (temp)) == REG)
3132 && (i = exact_log2 (significant_bits (temp, mode))) >= 0)
3133 {
3134 rtx temp1 = simplify_shift_const
3135 (NULL_RTX, ASHIFTRT, mode,
3136 simplify_shift_const (NULL_RTX, ASHIFT, mode, temp,
3137 GET_MODE_BITSIZE (mode) - 1 - i),
3138 GET_MODE_BITSIZE (mode) - 1 - i);
3139
3140 /* If all we did was surround TEMP with the two shifts, we
3141 haven't improved anything, so don't use it. Otherwise,
3142 we are better off with TEMP1. */
3143 if (GET_CODE (temp1) != ASHIFTRT
3144 || GET_CODE (XEXP (temp1, 0)) != ASHIFT
3145 || XEXP (XEXP (temp1, 0), 0) != temp)
3146 {
3147 x = temp1;
3148 goto restart;
3149 }
3150 }
3151 break;
3152
3153 case FLOAT_TRUNCATE:
3154 /* (float_truncate:SF (float_extend:DF foo:SF)) = foo:SF. */
3155 if (GET_CODE (XEXP (x, 0)) == FLOAT_EXTEND
3156 && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode)
3157 return XEXP (XEXP (x, 0), 0);
3158 break;
3159
3160 #ifdef HAVE_cc0
3161 case COMPARE:
3162 /* Convert (compare FOO (const_int 0)) to FOO unless we aren't
3163 using cc0, in which case we want to leave it as a COMPARE
3164 so we can distinguish it from a register-register-copy. */
3165 if (XEXP (x, 1) == const0_rtx)
3166 return XEXP (x, 0);
3167
3168 /* In IEEE floating point, x-0 is not the same as x. */
3169 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3170 || GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) == MODE_INT)
3171 && XEXP (x, 1) == CONST0_RTX (GET_MODE (XEXP (x, 0))))
3172 return XEXP (x, 0);
3173 break;
3174 #endif
3175
3176 case CONST:
3177 /* (const (const X)) can become (const X). Do it this way rather than
3178 returning the inner CONST since CONST can be shared with a
3179 REG_EQUAL note. */
3180 if (GET_CODE (XEXP (x, 0)) == CONST)
3181 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
3182 break;
3183
3184 #ifdef HAVE_lo_sum
3185 case LO_SUM:
3186 /* Convert (lo_sum (high FOO) FOO) to FOO. This is necessary so we
3187 can add in an offset. find_split_point will split this address up
3188 again if it doesn't match. */
3189 if (GET_CODE (XEXP (x, 0)) == HIGH
3190 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1)))
3191 return XEXP (x, 1);
3192 break;
3193 #endif
3194
3195 case PLUS:
3196 /* If we have (plus (plus (A const) B)), associate it so that CONST is
3197 outermost. That's because that's the way indexed addresses are
3198 supposed to appear. This code used to check many more cases, but
3199 they are now checked elsewhere. */
3200 if (GET_CODE (XEXP (x, 0)) == PLUS
3201 && CONSTANT_ADDRESS_P (XEXP (XEXP (x, 0), 1)))
3202 return gen_binary (PLUS, mode,
3203 gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0),
3204 XEXP (x, 1)),
3205 XEXP (XEXP (x, 0), 1));
3206
3207 /* (plus (xor (and <foo> (const_int pow2 - 1)) <c>) <-c>)
3208 when c is (const_int (pow2 + 1) / 2) is a sign extension of a
3209 bit-field and can be replaced by either a sign_extend or a
3210 sign_extract. The `and' may be a zero_extend. */
3211 if (GET_CODE (XEXP (x, 0)) == XOR
3212 && GET_CODE (XEXP (x, 1)) == CONST_INT
3213 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3214 && INTVAL (XEXP (x, 1)) == - INTVAL (XEXP (XEXP (x, 0), 1))
3215 && (i = exact_log2 (INTVAL (XEXP (XEXP (x, 0), 1)))) >= 0
3216 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3217 && ((GET_CODE (XEXP (XEXP (x, 0), 0)) == AND
3218 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
3219 && (INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))
3220 == ((HOST_WIDE_INT) 1 << (i + 1)) - 1))
3221 || (GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND
3222 && (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)))
3223 == i + 1))))
3224 {
3225 x = simplify_shift_const
3226 (NULL_RTX, ASHIFTRT, mode,
3227 simplify_shift_const (NULL_RTX, ASHIFT, mode,
3228 XEXP (XEXP (XEXP (x, 0), 0), 0),
3229 GET_MODE_BITSIZE (mode) - (i + 1)),
3230 GET_MODE_BITSIZE (mode) - (i + 1));
3231 goto restart;
3232 }
3233
3234 /* If only the low-order bit of X is significant, (plus x -1)
3235 can become (ashiftrt (ashift (xor x 1) C) C) where C is
3236 the bitsize of the mode - 1. This allows simplification of
3237 "a = (b & 8) == 0;" */
3238 if (XEXP (x, 1) == constm1_rtx
3239 && GET_CODE (XEXP (x, 0)) != REG
3240 && ! (GET_CODE (XEXP (x,0)) == SUBREG
3241 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG)
3242 && significant_bits (XEXP (x, 0), mode) == 1)
3243 {
3244 x = simplify_shift_const
3245 (NULL_RTX, ASHIFTRT, mode,
3246 simplify_shift_const (NULL_RTX, ASHIFT, mode,
3247 gen_rtx_combine (XOR, mode,
3248 XEXP (x, 0), const1_rtx),
3249 GET_MODE_BITSIZE (mode) - 1),
3250 GET_MODE_BITSIZE (mode) - 1);
3251 goto restart;
3252 }
3253
3254 /* If we are adding two things that have no bits in common, convert
3255 the addition into an IOR. This will often be further simplified,
3256 for example in cases like ((a & 1) + (a & 2)), which can
3257 become a & 3. */
3258
3259 if ((significant_bits (XEXP (x, 0), mode)
3260 & significant_bits (XEXP (x, 1), mode)) == 0)
3261 {
3262 x = gen_binary (IOR, mode, XEXP (x, 0), XEXP (x, 1));
3263 goto restart;
3264 }
3265 break;
3266
3267 case MINUS:
3268 /* (minus <foo> (and <foo> (const_int -pow2))) becomes
3269 (and <foo> (const_int pow2-1)) */
3270 if (GET_CODE (XEXP (x, 1)) == AND
3271 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
3272 && exact_log2 (- INTVAL (XEXP (XEXP (x, 1), 1))) >= 0
3273 && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0)))
3274 {
3275 x = simplify_and_const_int (NULL_RTX, mode, XEXP (x, 0),
3276 - INTVAL (XEXP (XEXP (x, 1), 1)) - 1);
3277 goto restart;
3278 }
3279 break;
3280
3281 case MULT:
3282 /* If we have (mult (plus A B) C), apply the distributive law and then
3283 the inverse distributive law to see if things simplify. This
3284 occurs mostly in addresses, often when unrolling loops. */
3285
3286 if (GET_CODE (XEXP (x, 0)) == PLUS)
3287 {
3288 x = apply_distributive_law
3289 (gen_binary (PLUS, mode,
3290 gen_binary (MULT, mode,
3291 XEXP (XEXP (x, 0), 0), XEXP (x, 1)),
3292 gen_binary (MULT, mode,
3293 XEXP (XEXP (x, 0), 1), XEXP (x, 1))));
3294
3295 if (GET_CODE (x) != MULT)
3296 goto restart;
3297 }
3298
3299 /* If this is multiplication by a power of two and its first operand is
3300 a shift, treat the multiply as a shift to allow the shifts to
3301 possibly combine. */
3302 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3303 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0
3304 && (GET_CODE (XEXP (x, 0)) == ASHIFT
3305 || GET_CODE (XEXP (x, 0)) == LSHIFTRT
3306 || GET_CODE (XEXP (x, 0)) == ASHIFTRT
3307 || GET_CODE (XEXP (x, 0)) == ROTATE
3308 || GET_CODE (XEXP (x, 0)) == ROTATERT))
3309 {
3310 x = simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (x, 0), i);
3311 goto restart;
3312 }
3313
3314 /* Convert (mult (ashift (const_int 1) A) B) to (ashift B A). */
3315 if (GET_CODE (XEXP (x, 0)) == ASHIFT
3316 && XEXP (XEXP (x, 0), 0) == const1_rtx)
3317 return gen_rtx_combine (ASHIFT, mode, XEXP (x, 1),
3318 XEXP (XEXP (x, 0), 1));
3319 break;
3320
3321 case UDIV:
3322 /* If this is a divide by a power of two, treat it as a shift if
3323 its first operand is a shift. */
3324 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3325 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0
3326 && (GET_CODE (XEXP (x, 0)) == ASHIFT
3327 || GET_CODE (XEXP (x, 0)) == LSHIFTRT
3328 || GET_CODE (XEXP (x, 0)) == ASHIFTRT
3329 || GET_CODE (XEXP (x, 0)) == ROTATE
3330 || GET_CODE (XEXP (x, 0)) == ROTATERT))
3331 {
3332 x = simplify_shift_const (NULL_RTX, LSHIFTRT, mode, XEXP (x, 0), i);
3333 goto restart;
3334 }
3335 break;
3336
3337 case EQ: case NE:
3338 case GT: case GTU: case GE: case GEU:
3339 case LT: case LTU: case LE: case LEU:
3340 /* If the first operand is a condition code, we can't do anything
3341 with it. */
3342 if (GET_CODE (XEXP (x, 0)) == COMPARE
3343 || (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) != MODE_CC
3344 #ifdef HAVE_cc0
3345 && XEXP (x, 0) != cc0_rtx
3346 #endif
3347 ))
3348 {
3349 rtx op0 = XEXP (x, 0);
3350 rtx op1 = XEXP (x, 1);
3351 enum rtx_code new_code;
3352
3353 if (GET_CODE (op0) == COMPARE)
3354 op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
3355
3356 /* Simplify our comparison, if possible. */
3357 new_code = simplify_comparison (code, &op0, &op1);
3358
3359 #if STORE_FLAG_VALUE == 1
3360 /* If STORE_FLAG_VALUE is 1, we can convert (ne x 0) to simply X
3361 if only the low-order bit is significant in X (such as when
3362 X is a ZERO_EXTRACT of one bit. Similarly, we can convert
3363 EQ to (xor X 1). */
3364 if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
3365 && op1 == const0_rtx
3366 && significant_bits (op0, GET_MODE (op0)) == 1)
3367 return gen_lowpart_for_combine (mode, op0);
3368 else if (new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
3369 && op1 == const0_rtx
3370 && significant_bits (op0, GET_MODE (op0)) == 1)
3371 return gen_rtx_combine (XOR, mode,
3372 gen_lowpart_for_combine (mode, op0),
3373 const1_rtx);
3374 #endif
3375
3376 #if STORE_FLAG_VALUE == -1
3377 /* If STORE_FLAG_VALUE is -1, we can convert (ne x 0)
3378 to (neg x) if only the low-order bit of X is significant.
3379 This converts (ne (zero_extract X 1 Y) 0) to
3380 (sign_extract X 1 Y). */
3381 if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
3382 && op1 == const0_rtx
3383 && significant_bits (op0, GET_MODE (op0)) == 1)
3384 {
3385 x = gen_rtx_combine (NEG, mode,
3386 gen_lowpart_for_combine (mode, op0));
3387 goto restart;
3388 }
3389 #endif
3390
3391 /* If STORE_FLAG_VALUE says to just test the sign bit and X has just
3392 one significant bit, we can convert (ne x 0) to (ashift x c)
3393 where C puts the bit in the sign bit. Remove any AND with
3394 STORE_FLAG_VALUE when we are done, since we are only going to
3395 test the sign bit. */
3396 if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
3397 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3398 && (STORE_FLAG_VALUE
3399 == (HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
3400 && op1 == const0_rtx
3401 && mode == GET_MODE (op0)
3402 && (i = exact_log2 (significant_bits (op0, GET_MODE (op0)))) >= 0)
3403 {
3404 x = simplify_shift_const (NULL_RTX, ASHIFT, mode, op0,
3405 GET_MODE_BITSIZE (mode) - 1 - i);
3406 if (GET_CODE (x) == AND && XEXP (x, 1) == const_true_rtx)
3407 return XEXP (x, 0);
3408 else
3409 return x;
3410 }
3411
3412 /* If the code changed, return a whole new comparison. */
3413 if (new_code != code)
3414 return gen_rtx_combine (new_code, mode, op0, op1);
3415
3416 /* Otherwise, keep this operation, but maybe change its operands.
3417 This also converts (ne (compare FOO BAR) 0) to (ne FOO BAR). */
3418 SUBST (XEXP (x, 0), op0);
3419 SUBST (XEXP (x, 1), op1);
3420 }
3421 break;
3422
3423 case IF_THEN_ELSE:
3424 /* Sometimes we can simplify the arm of an IF_THEN_ELSE if a register
3425 used in it is being compared against certain values. Get the
3426 true and false comparisons and see if that says anything about the
3427 value of each arm. */
3428
3429 if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3430 && reversible_comparison_p (XEXP (x, 0))
3431 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG)
3432 {
3433 HOST_WIDE_INT sig;
3434 rtx from = XEXP (XEXP (x, 0), 0);
3435 enum rtx_code true_code = GET_CODE (XEXP (x, 0));
3436 enum rtx_code false_code = reverse_condition (true_code);
3437 rtx true_val = XEXP (XEXP (x, 0), 1);
3438 rtx false_val = true_val;
3439 rtx true_arm = XEXP (x, 1);
3440 rtx false_arm = XEXP (x, 2);
3441 int swapped = 0;
3442
3443 /* If FALSE_CODE is EQ, swap the codes and arms. */
3444
3445 if (false_code == EQ)
3446 {
3447 swapped = 1, true_code = EQ, false_code = NE;
3448 true_arm = XEXP (x, 2), false_arm = XEXP (x, 1);
3449 }
3450
3451 /* If we are comparing against zero and the expression being tested
3452 has only a single significant bit, that is its value when it is
3453 not equal to zero. Similarly if it is known to be -1 or 0. */
3454
3455 if (true_code == EQ && true_val == const0_rtx
3456 && exact_log2 (sig = significant_bits (from,
3457 GET_MODE (from))) >= 0)
3458 false_code = EQ, false_val = GEN_INT (sig);
3459 else if (true_code == EQ && true_val == const0_rtx
3460 && (num_sign_bit_copies (from, GET_MODE (from))
3461 == GET_MODE_BITSIZE (GET_MODE (from))))
3462 false_code = EQ, false_val = constm1_rtx;
3463
3464 /* Now simplify an arm if we know the value of the register
3465 in the branch and it is used in the arm. Be carefull due to
3466 the potential of locally-shared RTL. */
3467
3468 if (reg_mentioned_p (from, true_arm))
3469 true_arm = subst (known_cond (copy_rtx (true_arm), true_code,
3470 from, true_val),
3471 pc_rtx, pc_rtx, 0, 0);
3472 if (reg_mentioned_p (from, false_arm))
3473 false_arm = subst (known_cond (copy_rtx (false_arm), false_code,
3474 from, false_val),
3475 pc_rtx, pc_rtx, 0, 0);
3476
3477 SUBST (XEXP (x, 1), swapped ? false_arm : true_arm);
3478 SUBST (XEXP (x, 2), swapped ? true_arm : false_arm);
3479 }
3480
3481 /* If we have (if_then_else FOO (pc) (label_ref BAR)) and FOO can be
3482 reversed, do so to avoid needing two sets of patterns for
3483 subtract-and-branch insns. Similarly if we have a constant in that
3484 position or if the third operand is the same as the first operand
3485 of the comparison. */
3486
3487 if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3488 && reversible_comparison_p (XEXP (x, 0))
3489 && (XEXP (x, 1) == pc_rtx || GET_CODE (XEXP (x, 1)) == CONST_INT
3490 || rtx_equal_p (XEXP (x, 2), XEXP (XEXP (x, 0), 0))))
3491 {
3492 SUBST (XEXP (x, 0),
3493 gen_binary (reverse_condition (GET_CODE (XEXP (x, 0))),
3494 GET_MODE (XEXP (x, 0)),
3495 XEXP (XEXP (x, 0), 0), XEXP (XEXP (x, 0), 1)));
3496
3497 temp = XEXP (x, 1);
3498 SUBST (XEXP (x, 1), XEXP (x, 2));
3499 SUBST (XEXP (x, 2), temp);
3500 }
3501
3502 /* If the two arms are identical, we don't need the comparison. */
3503
3504 if (rtx_equal_p (XEXP (x, 1), XEXP (x, 2))
3505 && ! side_effects_p (XEXP (x, 0)))
3506 return XEXP (x, 1);
3507
3508 /* Look for cases where we have (abs x) or (neg (abs X)). */
3509
3510 if (GET_MODE_CLASS (mode) == MODE_INT
3511 && GET_CODE (XEXP (x, 2)) == NEG
3512 && rtx_equal_p (XEXP (x, 1), XEXP (XEXP (x, 2), 0))
3513 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3514 && rtx_equal_p (XEXP (x, 1), XEXP (XEXP (x, 0), 0))
3515 && ! side_effects_p (XEXP (x, 1)))
3516 switch (GET_CODE (XEXP (x, 0)))
3517 {
3518 case GT:
3519 case GE:
3520 x = gen_unary (ABS, mode, XEXP (x, 1));
3521 goto restart;
3522 case LT:
3523 case LE:
3524 x = gen_unary (NEG, mode, gen_unary (ABS, mode, XEXP (x, 1)));
3525 goto restart;
3526 }
3527
3528 /* Look for MIN or MAX. */
3529
3530 if (GET_MODE_CLASS (mode) == MODE_INT
3531 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3532 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1))
3533 && rtx_equal_p (XEXP (XEXP (x, 0), 1), XEXP (x, 2))
3534 && ! side_effects_p (XEXP (x, 0)))
3535 switch (GET_CODE (XEXP (x, 0)))
3536 {
3537 case GE:
3538 case GT:
3539 x = gen_binary (SMAX, mode, XEXP (x, 1), XEXP (x, 2));
3540 goto restart;
3541 case LE:
3542 case LT:
3543 x = gen_binary (SMIN, mode, XEXP (x, 1), XEXP (x, 2));
3544 goto restart;
3545 case GEU:
3546 case GTU:
3547 x = gen_binary (UMAX, mode, XEXP (x, 1), XEXP (x, 2));
3548 goto restart;
3549 case LEU:
3550 case LTU:
3551 x = gen_binary (UMIN, mode, XEXP (x, 1), XEXP (x, 2));
3552 goto restart;
3553 }
3554
3555 /* If we have something like (if_then_else (ne A 0) (OP X C) X),
3556 A is known to be either 0 or 1, and OP is an identity when its
3557 second operand is zero, this can be done as (OP X (mult A C)).
3558 Similarly if A is known to be 0 or -1 and also similarly if we have
3559 a ZERO_EXTEND or SIGN_EXTEND as long as X is already extended (so
3560 we don't destroy it). */
3561
3562 if (mode != VOIDmode
3563 && (GET_CODE (XEXP (x, 0)) == EQ || GET_CODE (XEXP (x, 0)) == NE)
3564 && XEXP (XEXP (x, 0), 1) == const0_rtx
3565 && (significant_bits (XEXP (XEXP (x, 0), 0), mode) == 1
3566 || (num_sign_bit_copies (XEXP (XEXP (x, 0), 0), mode)
3567 == GET_MODE_BITSIZE (mode))))
3568 {
3569 rtx nz = make_compound_operation (GET_CODE (XEXP (x, 0)) == NE
3570 ? XEXP (x, 1) : XEXP (x, 2));
3571 rtx z = GET_CODE (XEXP (x, 0)) == NE ? XEXP (x, 2) : XEXP (x, 1);
3572 rtx dir = (significant_bits (XEXP (XEXP (x, 0), 0), mode) == 1
3573 ? const1_rtx : constm1_rtx);
3574 rtx c = 0;
3575 enum machine_mode m = mode;
3576 enum rtx_code op, extend_op = 0;
3577
3578 if ((GET_CODE (nz) == PLUS || GET_CODE (nz) == MINUS
3579 || GET_CODE (nz) == IOR || GET_CODE (nz) == XOR
3580 || GET_CODE (nz) == ASHIFT
3581 || GET_CODE (nz) == LSHIFTRT || GET_CODE (nz) == ASHIFTRT)
3582 && rtx_equal_p (XEXP (nz, 0), z))
3583 c = XEXP (nz, 1), op = GET_CODE (nz);
3584 else if (GET_CODE (nz) == SIGN_EXTEND
3585 && (GET_CODE (XEXP (nz, 0)) == PLUS
3586 || GET_CODE (XEXP (nz, 0)) == MINUS
3587 || GET_CODE (XEXP (nz, 0)) == IOR
3588 || GET_CODE (XEXP (nz, 0)) == XOR
3589 || GET_CODE (XEXP (nz, 0)) == ASHIFT
3590 || GET_CODE (XEXP (nz, 0)) == LSHIFTRT
3591 || GET_CODE (XEXP (nz, 0)) == ASHIFTRT)
3592 && GET_CODE (XEXP (XEXP (nz, 0), 0)) == SUBREG
3593 && subreg_lowpart_p (XEXP (XEXP (nz, 0), 0))
3594 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (nz, 0), 0)), z)
3595 && (num_sign_bit_copies (z, GET_MODE (z))
3596 >= (GET_MODE_BITSIZE (mode)
3597 - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (nz, 0), 0))))))
3598 {
3599 c = XEXP (XEXP (nz, 0), 1);
3600 op = GET_CODE (XEXP (nz, 0));
3601 extend_op = SIGN_EXTEND;
3602 m = GET_MODE (XEXP (nz, 0));
3603 }
3604 else if (GET_CODE (nz) == ZERO_EXTEND
3605 && (GET_CODE (XEXP (nz, 0)) == PLUS
3606 || GET_CODE (XEXP (nz, 0)) == MINUS
3607 || GET_CODE (XEXP (nz, 0)) == IOR
3608 || GET_CODE (XEXP (nz, 0)) == XOR
3609 || GET_CODE (XEXP (nz, 0)) == ASHIFT
3610 || GET_CODE (XEXP (nz, 0)) == LSHIFTRT
3611 || GET_CODE (XEXP (nz, 0)) == ASHIFTRT)
3612 && GET_CODE (XEXP (XEXP (nz, 0), 0)) == SUBREG
3613 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3614 && subreg_lowpart_p (XEXP (XEXP (nz, 0), 0))
3615 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (nz, 0), 0)), z)
3616 && ((significant_bits (z, GET_MODE (z))
3617 & ~ GET_MODE_MASK (GET_MODE (XEXP (XEXP (nz, 0), 0))))
3618 == 0))
3619 {
3620 c = XEXP (XEXP (nz, 0), 1);
3621 op = GET_CODE (XEXP (nz, 0));
3622 extend_op = ZERO_EXTEND;
3623 m = GET_MODE (XEXP (nz, 0));
3624 }
3625
3626 if (c && ! side_effects_p (c) && ! side_effects_p (z))
3627 {
3628 temp
3629 = gen_binary (MULT, m,
3630 gen_lowpart_for_combine (m,
3631 XEXP (XEXP (x, 0), 0)),
3632 gen_binary (MULT, m, c, dir));
3633
3634 temp = gen_binary (op, m, gen_lowpart_for_combine (m, z), temp);
3635
3636 if (extend_op != 0)
3637 temp = gen_unary (extend_op, mode, temp);
3638
3639 return temp;
3640 }
3641 }
3642 break;
3643
3644 case ZERO_EXTRACT:
3645 case SIGN_EXTRACT:
3646 case ZERO_EXTEND:
3647 case SIGN_EXTEND:
3648 /* If we are processing SET_DEST, we are done. */
3649 if (in_dest)
3650 return x;
3651
3652 x = expand_compound_operation (x);
3653 if (GET_CODE (x) != code)
3654 goto restart;
3655 break;
3656
3657 case SET:
3658 /* (set (pc) (return)) gets written as (return). */
3659 if (GET_CODE (SET_DEST (x)) == PC && GET_CODE (SET_SRC (x)) == RETURN)
3660 return SET_SRC (x);
3661
3662 /* Convert this into a field assignment operation, if possible. */
3663 x = make_field_assignment (x);
3664
3665 /* If we are setting CC0 or if the source is a COMPARE, look for the
3666 use of the comparison result and try to simplify it unless we already
3667 have used undobuf.other_insn. */
3668 if ((GET_CODE (SET_SRC (x)) == COMPARE
3669 #ifdef HAVE_cc0
3670 || SET_DEST (x) == cc0_rtx
3671 #endif
3672 )
3673 && (cc_use = find_single_use (SET_DEST (x), subst_insn,
3674 &other_insn)) != 0
3675 && (undobuf.other_insn == 0 || other_insn == undobuf.other_insn)
3676 && GET_RTX_CLASS (GET_CODE (*cc_use)) == '<'
3677 && XEXP (*cc_use, 0) == SET_DEST (x))
3678 {
3679 enum rtx_code old_code = GET_CODE (*cc_use);
3680 enum rtx_code new_code;
3681 rtx op0, op1;
3682 int other_changed = 0;
3683 enum machine_mode compare_mode = GET_MODE (SET_DEST (x));
3684
3685 if (GET_CODE (SET_SRC (x)) == COMPARE)
3686 op0 = XEXP (SET_SRC (x), 0), op1 = XEXP (SET_SRC (x), 1);
3687 else
3688 op0 = SET_SRC (x), op1 = const0_rtx;
3689
3690 /* Simplify our comparison, if possible. */
3691 new_code = simplify_comparison (old_code, &op0, &op1);
3692
3693 #if !defined (HAVE_cc0) && defined (EXTRA_CC_MODES)
3694 /* If this machine has CC modes other than CCmode, check to see
3695 if we need to use a different CC mode here. */
3696 compare_mode = SELECT_CC_MODE (new_code, op0, op1);
3697
3698 /* If the mode changed, we have to change SET_DEST, the mode
3699 in the compare, and the mode in the place SET_DEST is used.
3700 If SET_DEST is a hard register, just build new versions with
3701 the proper mode. If it is a pseudo, we lose unless it is only
3702 time we set the pseudo, in which case we can safely change
3703 its mode. */
3704 if (compare_mode != GET_MODE (SET_DEST (x)))
3705 {
3706 int regno = REGNO (SET_DEST (x));
3707 rtx new_dest = gen_rtx (REG, compare_mode, regno);
3708
3709 if (regno < FIRST_PSEUDO_REGISTER
3710 || (reg_n_sets[regno] == 1
3711 && ! REG_USERVAR_P (SET_DEST (x))))
3712 {
3713 if (regno >= FIRST_PSEUDO_REGISTER)
3714 SUBST (regno_reg_rtx[regno], new_dest);
3715
3716 SUBST (SET_DEST (x), new_dest);
3717 SUBST (XEXP (*cc_use, 0), new_dest);
3718 other_changed = 1;
3719 }
3720 }
3721 #endif
3722
3723 /* If the code changed, we have to build a new comparison
3724 in undobuf.other_insn. */
3725 if (new_code != old_code)
3726 {
3727 unsigned mask;
3728
3729 SUBST (*cc_use, gen_rtx_combine (new_code, GET_MODE (*cc_use),
3730 SET_DEST (x), const0_rtx));
3731
3732 /* If the only change we made was to change an EQ into an
3733 NE or vice versa, OP0 has only one significant bit,
3734 and OP1 is zero, check if changing the user of the condition
3735 code will produce a valid insn. If it won't, we can keep
3736 the original code in that insn by surrounding our operation
3737 with an XOR. */
3738
3739 if (((old_code == NE && new_code == EQ)
3740 || (old_code == EQ && new_code == NE))
3741 && ! other_changed && op1 == const0_rtx
3742 && (GET_MODE_BITSIZE (GET_MODE (op0))
3743 <= HOST_BITS_PER_WIDE_INT)
3744 && (exact_log2 (mask = significant_bits (op0,
3745 GET_MODE (op0)))
3746 >= 0))
3747 {
3748 rtx pat = PATTERN (other_insn), note = 0;
3749
3750 if ((recog_for_combine (&pat, undobuf.other_insn, &note) < 0
3751 && ! check_asm_operands (pat)))
3752 {
3753 PUT_CODE (*cc_use, old_code);
3754 other_insn = 0;
3755
3756 op0 = gen_binary (XOR, GET_MODE (op0), op0,
3757 GEN_INT (mask));
3758 }
3759 }
3760
3761 other_changed = 1;
3762 }
3763
3764 if (other_changed)
3765 undobuf.other_insn = other_insn;
3766
3767 #ifdef HAVE_cc0
3768 /* If we are now comparing against zero, change our source if
3769 needed. If we do not use cc0, we always have a COMPARE. */
3770 if (op1 == const0_rtx && SET_DEST (x) == cc0_rtx)
3771 SUBST (SET_SRC (x), op0);
3772 else
3773 #endif
3774
3775 /* Otherwise, if we didn't previously have a COMPARE in the
3776 correct mode, we need one. */
3777 if (GET_CODE (SET_SRC (x)) != COMPARE
3778 || GET_MODE (SET_SRC (x)) != compare_mode)
3779 SUBST (SET_SRC (x), gen_rtx_combine (COMPARE, compare_mode,
3780 op0, op1));
3781 else
3782 {
3783 /* Otherwise, update the COMPARE if needed. */
3784 SUBST (XEXP (SET_SRC (x), 0), op0);
3785 SUBST (XEXP (SET_SRC (x), 1), op1);
3786 }
3787 }
3788 else
3789 {
3790 /* Get SET_SRC in a form where we have placed back any
3791 compound expressions. Then do the checks below. */
3792 temp = make_compound_operation (SET_SRC (x), SET);
3793 SUBST (SET_SRC (x), temp);
3794 }
3795
3796 /* If we have (set x (subreg:m1 (op:m2 ...) 0)) with OP being some
3797 operation, and X being a REG or (subreg (reg)), we may be able to
3798 convert this to (set (subreg:m2 x) (op)).
3799
3800 We can always do this if M1 is narrower than M2 because that
3801 means that we only care about the low bits of the result.
3802
3803 However, on most machines (those with BYTE_LOADS_ZERO_EXTEND
3804 and BYTES_LOADS_SIGN_EXTEND not defined), we cannot perform a
3805 narrower operation that requested since the high-order bits will
3806 be undefined. On machine where BYTE_LOADS_*_EXTEND is defined,
3807 however, this transformation is safe as long as M1 and M2 have
3808 the same number of words. */
3809
3810 if (GET_CODE (SET_SRC (x)) == SUBREG
3811 && subreg_lowpart_p (SET_SRC (x))
3812 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (SET_SRC (x)))) != 'o'
3813 && (((GET_MODE_SIZE (GET_MODE (SET_SRC (x))) + (UNITS_PER_WORD - 1))
3814 / UNITS_PER_WORD)
3815 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_SRC (x))))
3816 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
3817 #if ! defined(BYTE_LOADS_ZERO_EXTEND) && ! defined (BYTE_LOADS_SIGN_EXTEND)
3818 && (GET_MODE_SIZE (GET_MODE (SET_SRC (x)))
3819 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_SRC (x)))))
3820 #endif
3821 && (GET_CODE (SET_DEST (x)) == REG
3822 || (GET_CODE (SET_DEST (x)) == SUBREG
3823 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG)))
3824 {
3825 SUBST (SET_DEST (x),
3826 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (SET_SRC (x))),
3827 SET_DEST (x)));
3828 SUBST (SET_SRC (x), SUBREG_REG (SET_SRC (x)));
3829 }
3830
3831 #ifdef BYTE_LOADS_ZERO_EXTEND
3832 /* If we have (set FOO (subreg:M (mem:N BAR) 0)) with
3833 M wider than N, this would require a paradoxical subreg.
3834 Replace the subreg with a zero_extend to avoid the reload that
3835 would otherwise be required. */
3836 if (GET_CODE (SET_SRC (x)) == SUBREG
3837 && subreg_lowpart_p (SET_SRC (x))
3838 && SUBREG_WORD (SET_SRC (x)) == 0
3839 && (GET_MODE_SIZE (GET_MODE (SET_SRC (x)))
3840 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_SRC (x)))))
3841 && GET_CODE (SUBREG_REG (SET_SRC (x))) == MEM)
3842 SUBST (SET_SRC (x), gen_rtx_combine (ZERO_EXTEND,
3843 GET_MODE (SET_SRC (x)),
3844 XEXP (SET_SRC (x), 0)));
3845 #endif
3846
3847 #ifndef HAVE_conditional_move
3848
3849 /* If we don't have a conditional move, SET_SRC is an IF_THEN_ELSE,
3850 and we are comparing an item known to be 0 or -1 against 0, use a
3851 logical operation instead. Check for one of the arms being an IOR
3852 of the other arm with some value. We compute three terms to be
3853 IOR'ed together. In practice, at most two will be nonzero. Then
3854 we do the IOR's. */
3855
3856 if (GET_CODE (SET_DEST (x)) != PC
3857 && GET_CODE (SET_SRC (x)) == IF_THEN_ELSE
3858 && (GET_CODE (XEXP (SET_SRC (x), 0)) == EQ
3859 || GET_CODE (XEXP (SET_SRC (x), 0)) == NE)
3860 && XEXP (XEXP (SET_SRC (x), 0), 1) == const0_rtx
3861 && (num_sign_bit_copies (XEXP (XEXP (SET_SRC (x), 0), 0),
3862 GET_MODE (XEXP (XEXP (SET_SRC (x), 0), 0)))
3863 == GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (SET_SRC (x), 0), 0))))
3864 && ! side_effects_p (SET_SRC (x)))
3865 {
3866 rtx true = (GET_CODE (XEXP (SET_SRC (x), 0)) == NE
3867 ? XEXP (SET_SRC (x), 1) : XEXP (SET_SRC (x), 2));
3868 rtx false = (GET_CODE (XEXP (SET_SRC (x), 0)) == NE
3869 ? XEXP (SET_SRC (x), 2) : XEXP (SET_SRC (x), 1));
3870 rtx term1 = const0_rtx, term2, term3;
3871
3872 if (GET_CODE (true) == IOR && rtx_equal_p (XEXP (true, 0), false))
3873 term1 = false, true = XEXP (true, 1), false = const0_rtx;
3874 else if (GET_CODE (true) == IOR
3875 && rtx_equal_p (XEXP (true, 1), false))
3876 term1 = false, true = XEXP (true, 0), false = const0_rtx;
3877 else if (GET_CODE (false) == IOR
3878 && rtx_equal_p (XEXP (false, 0), true))
3879 term1 = true, false = XEXP (false, 1), true = const0_rtx;
3880 else if (GET_CODE (false) == IOR
3881 && rtx_equal_p (XEXP (false, 1), true))
3882 term1 = true, false = XEXP (false, 0), true = const0_rtx;
3883
3884 term2 = gen_binary (AND, GET_MODE (SET_SRC (x)),
3885 XEXP (XEXP (SET_SRC (x), 0), 0), true);
3886 term3 = gen_binary (AND, GET_MODE (SET_SRC (x)),
3887 gen_unary (NOT, GET_MODE (SET_SRC (x)),
3888 XEXP (XEXP (SET_SRC (x), 0), 0)),
3889 false);
3890
3891 SUBST (SET_SRC (x),
3892 gen_binary (IOR, GET_MODE (SET_SRC (x)),
3893 gen_binary (IOR, GET_MODE (SET_SRC (x)),
3894 term1, term2),
3895 term3));
3896 }
3897 #endif
3898 break;
3899
3900 case AND:
3901 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
3902 {
3903 x = simplify_and_const_int (x, mode, XEXP (x, 0),
3904 INTVAL (XEXP (x, 1)));
3905
3906 /* If we have (ior (and (X C1) C2)) and the next restart would be
3907 the last, simplify this by making C1 as small as possible
3908 and then exit. */
3909 if (n_restarts >= 3 && GET_CODE (x) == IOR
3910 && GET_CODE (XEXP (x, 0)) == AND
3911 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3912 && GET_CODE (XEXP (x, 1)) == CONST_INT)
3913 {
3914 temp = gen_binary (AND, mode, XEXP (XEXP (x, 0), 0),
3915 GEN_INT (INTVAL (XEXP (XEXP (x, 0), 1))
3916 & ~ INTVAL (XEXP (x, 1))));
3917 return gen_binary (IOR, mode, temp, XEXP (x, 1));
3918 }
3919
3920 if (GET_CODE (x) != AND)
3921 goto restart;
3922 }
3923
3924 /* Convert (A | B) & A to A. */
3925 if (GET_CODE (XEXP (x, 0)) == IOR
3926 && (rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1))
3927 || rtx_equal_p (XEXP (XEXP (x, 0), 1), XEXP (x, 1)))
3928 && ! side_effects_p (XEXP (XEXP (x, 0), 0))
3929 && ! side_effects_p (XEXP (XEXP (x, 0), 1)))
3930 return XEXP (x, 1);
3931
3932 /* Convert (A ^ B) & A to A & (~ B) since the latter is often a single
3933 insn (and may simplify more). */
3934 else if (GET_CODE (XEXP (x, 0)) == XOR
3935 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1))
3936 && ! side_effects_p (XEXP (x, 1)))
3937 {
3938 x = gen_binary (AND, mode,
3939 gen_unary (NOT, mode, XEXP (XEXP (x, 0), 1)),
3940 XEXP (x, 1));
3941 goto restart;
3942 }
3943 else if (GET_CODE (XEXP (x, 0)) == XOR
3944 && rtx_equal_p (XEXP (XEXP (x, 0), 1), XEXP (x, 1))
3945 && ! side_effects_p (XEXP (x, 1)))
3946 {
3947 x = gen_binary (AND, mode,
3948 gen_unary (NOT, mode, XEXP (XEXP (x, 0), 0)),
3949 XEXP (x, 1));
3950 goto restart;
3951 }
3952
3953 /* Similarly for (~ (A ^ B)) & A. */
3954 else if (GET_CODE (XEXP (x, 0)) == NOT
3955 && GET_CODE (XEXP (XEXP (x, 0), 0)) == XOR
3956 && rtx_equal_p (XEXP (XEXP (XEXP (x, 0), 0), 0), XEXP (x, 1))
3957 && ! side_effects_p (XEXP (x, 1)))
3958 {
3959 x = gen_binary (AND, mode, XEXP (XEXP (XEXP (x, 0), 0), 1),
3960 XEXP (x, 1));
3961 goto restart;
3962 }
3963 else if (GET_CODE (XEXP (x, 0)) == NOT
3964 && GET_CODE (XEXP (XEXP (x, 0), 0)) == XOR
3965 && rtx_equal_p (XEXP (XEXP (XEXP (x, 0), 0), 1), XEXP (x, 1))
3966 && ! side_effects_p (XEXP (x, 1)))
3967 {
3968 x = gen_binary (AND, mode, XEXP (XEXP (XEXP (x, 0), 0), 0),
3969 XEXP (x, 1));
3970 goto restart;
3971 }
3972
3973 /* If we have (and A B) with A not an object but that is known to
3974 be -1 or 0, this is equivalent to the expression
3975 (if_then_else (ne A (const_int 0)) B (const_int 0))
3976 We make this conversion because it may allow further
3977 simplifications and then allow use of conditional move insns.
3978 If the machine doesn't have condition moves, code in case SET
3979 will convert the IF_THEN_ELSE back to the logical operation.
3980 We build the IF_THEN_ELSE here in case further simplification
3981 is possible (e.g., we can convert it to ABS). */
3982
3983 if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) != 'o'
3984 && ! (GET_CODE (XEXP (x, 0)) == SUBREG
3985 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0)))) == 'o')
3986 && (num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
3987 == GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))))
3988 {
3989 rtx op0 = XEXP (x, 0);
3990 rtx op1 = const0_rtx;
3991 enum rtx_code comp_code
3992 = simplify_comparison (NE, &op0, &op1);
3993
3994 x = gen_rtx_combine (IF_THEN_ELSE, mode,
3995 gen_binary (comp_code, VOIDmode, op0, op1),
3996 XEXP (x, 1), const0_rtx);
3997 goto restart;
3998 }
3999
4000 /* In the following group of tests (and those in case IOR below),
4001 we start with some combination of logical operations and apply
4002 the distributive law followed by the inverse distributive law.
4003 Most of the time, this results in no change. However, if some of
4004 the operands are the same or inverses of each other, simplifications
4005 will result.
4006
4007 For example, (and (ior A B) (not B)) can occur as the result of
4008 expanding a bit field assignment. When we apply the distributive
4009 law to this, we get (ior (and (A (not B))) (and (B (not B)))),
4010 which then simplifies to (and (A (not B))). */
4011
4012 /* If we have (and (ior A B) C), apply the distributive law and then
4013 the inverse distributive law to see if things simplify. */
4014
4015 if (GET_CODE (XEXP (x, 0)) == IOR || GET_CODE (XEXP (x, 0)) == XOR)
4016 {
4017 x = apply_distributive_law
4018 (gen_binary (GET_CODE (XEXP (x, 0)), mode,
4019 gen_binary (AND, mode,
4020 XEXP (XEXP (x, 0), 0), XEXP (x, 1)),
4021 gen_binary (AND, mode,
4022 XEXP (XEXP (x, 0), 1), XEXP (x, 1))));
4023 if (GET_CODE (x) != AND)
4024 goto restart;
4025 }
4026
4027 if (GET_CODE (XEXP (x, 1)) == IOR || GET_CODE (XEXP (x, 1)) == XOR)
4028 {
4029 x = apply_distributive_law
4030 (gen_binary (GET_CODE (XEXP (x, 1)), mode,
4031 gen_binary (AND, mode,
4032 XEXP (XEXP (x, 1), 0), XEXP (x, 0)),
4033 gen_binary (AND, mode,
4034 XEXP (XEXP (x, 1), 1), XEXP (x, 0))));
4035 if (GET_CODE (x) != AND)
4036 goto restart;
4037 }
4038
4039 /* Similarly, taking advantage of the fact that
4040 (and (not A) (xor B C)) == (xor (ior A B) (ior A C)) */
4041
4042 if (GET_CODE (XEXP (x, 0)) == NOT && GET_CODE (XEXP (x, 1)) == XOR)
4043 {
4044 x = apply_distributive_law
4045 (gen_binary (XOR, mode,
4046 gen_binary (IOR, mode, XEXP (XEXP (x, 0), 0),
4047 XEXP (XEXP (x, 1), 0)),
4048 gen_binary (IOR, mode, XEXP (XEXP (x, 0), 0),
4049 XEXP (XEXP (x, 1), 1))));
4050 if (GET_CODE (x) != AND)
4051 goto restart;
4052 }
4053
4054 else if (GET_CODE (XEXP (x, 1)) == NOT && GET_CODE (XEXP (x, 0)) == XOR)
4055 {
4056 x = apply_distributive_law
4057 (gen_binary (XOR, mode,
4058 gen_binary (IOR, mode, XEXP (XEXP (x, 1), 0),
4059 XEXP (XEXP (x, 0), 0)),
4060 gen_binary (IOR, mode, XEXP (XEXP (x, 1), 0),
4061 XEXP (XEXP (x, 0), 1))));
4062 if (GET_CODE (x) != AND)
4063 goto restart;
4064 }
4065 break;
4066
4067 case IOR:
4068 /* (ior A C) is C if all significant bits of A are on in C. */
4069 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4070 && (significant_bits (XEXP (x, 0), mode)
4071 & ~ INTVAL (XEXP (x, 1))) == 0)
4072 return XEXP (x, 1);
4073
4074 /* Convert (A & B) | A to A. */
4075 if (GET_CODE (XEXP (x, 0)) == AND
4076 && (rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1))
4077 || rtx_equal_p (XEXP (XEXP (x, 0), 1), XEXP (x, 1)))
4078 && ! side_effects_p (XEXP (XEXP (x, 0), 0))
4079 && ! side_effects_p (XEXP (XEXP (x, 0), 1)))
4080 return XEXP (x, 1);
4081
4082 /* If we have (ior (and A B) C), apply the distributive law and then
4083 the inverse distributive law to see if things simplify. */
4084
4085 if (GET_CODE (XEXP (x, 0)) == AND)
4086 {
4087 x = apply_distributive_law
4088 (gen_binary (AND, mode,
4089 gen_binary (IOR, mode,
4090 XEXP (XEXP (x, 0), 0), XEXP (x, 1)),
4091 gen_binary (IOR, mode,
4092 XEXP (XEXP (x, 0), 1), XEXP (x, 1))));
4093
4094 if (GET_CODE (x) != IOR)
4095 goto restart;
4096 }
4097
4098 if (GET_CODE (XEXP (x, 1)) == AND)
4099 {
4100 x = apply_distributive_law
4101 (gen_binary (AND, mode,
4102 gen_binary (IOR, mode,
4103 XEXP (XEXP (x, 1), 0), XEXP (x, 0)),
4104 gen_binary (IOR, mode,
4105 XEXP (XEXP (x, 1), 1), XEXP (x, 0))));
4106
4107 if (GET_CODE (x) != IOR)
4108 goto restart;
4109 }
4110
4111 /* Convert (ior (ashift A CX) (lshiftrt A CY)) where CX+CY equals the
4112 mode size to (rotate A CX). */
4113
4114 if (((GET_CODE (XEXP (x, 0)) == ASHIFT
4115 && GET_CODE (XEXP (x, 1)) == LSHIFTRT)
4116 || (GET_CODE (XEXP (x, 1)) == ASHIFT
4117 && GET_CODE (XEXP (x, 0)) == LSHIFTRT))
4118 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (XEXP (x, 1), 0))
4119 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4120 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
4121 && (INTVAL (XEXP (XEXP (x, 0), 1)) + INTVAL (XEXP (XEXP (x, 1), 1))
4122 == GET_MODE_BITSIZE (mode)))
4123 {
4124 rtx shift_count;
4125
4126 if (GET_CODE (XEXP (x, 0)) == ASHIFT)
4127 shift_count = XEXP (XEXP (x, 0), 1);
4128 else
4129 shift_count = XEXP (XEXP (x, 1), 1);
4130 x = gen_rtx (ROTATE, mode, XEXP (XEXP (x, 0), 0), shift_count);
4131 goto restart;
4132 }
4133 break;
4134
4135 case XOR:
4136 /* Convert (XOR (NOT x) (NOT y)) to (XOR x y).
4137 Also convert (XOR (NOT x) y) to (NOT (XOR x y)), similarly for
4138 (NOT y). */
4139 {
4140 int num_negated = 0;
4141 rtx in1 = XEXP (x, 0), in2 = XEXP (x, 1);
4142
4143 if (GET_CODE (in1) == NOT)
4144 num_negated++, in1 = XEXP (in1, 0);
4145 if (GET_CODE (in2) == NOT)
4146 num_negated++, in2 = XEXP (in2, 0);
4147
4148 if (num_negated == 2)
4149 {
4150 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4151 SUBST (XEXP (x, 1), XEXP (XEXP (x, 1), 0));
4152 }
4153 else if (num_negated == 1)
4154 {
4155 x = gen_unary (NOT, mode,
4156 gen_binary (XOR, mode, in1, in2));
4157 goto restart;
4158 }
4159 }
4160
4161 /* Convert (xor (and A B) B) to (and (not A) B). The latter may
4162 correspond to a machine insn or result in further simplifications
4163 if B is a constant. */
4164
4165 if (GET_CODE (XEXP (x, 0)) == AND
4166 && rtx_equal_p (XEXP (XEXP (x, 0), 1), XEXP (x, 1))
4167 && ! side_effects_p (XEXP (x, 1)))
4168 {
4169 x = gen_binary (AND, mode,
4170 gen_unary (NOT, mode, XEXP (XEXP (x, 0), 0)),
4171 XEXP (x, 1));
4172 goto restart;
4173 }
4174 else if (GET_CODE (XEXP (x, 0)) == AND
4175 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1))
4176 && ! side_effects_p (XEXP (x, 1)))
4177 {
4178 x = gen_binary (AND, mode,
4179 gen_unary (NOT, mode, XEXP (XEXP (x, 0), 1)),
4180 XEXP (x, 1));
4181 goto restart;
4182 }
4183
4184
4185 #if STORE_FLAG_VALUE == 1
4186 /* (xor (comparison foo bar) (const_int 1)) can become the reversed
4187 comparison. */
4188 if (XEXP (x, 1) == const1_rtx
4189 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
4190 && reversible_comparison_p (XEXP (x, 0)))
4191 return gen_rtx_combine (reverse_condition (GET_CODE (XEXP (x, 0))),
4192 mode, XEXP (XEXP (x, 0), 0),
4193 XEXP (XEXP (x, 0), 1));
4194 #endif
4195
4196 /* (xor (comparison foo bar) (const_int sign-bit))
4197 when STORE_FLAG_VALUE is the sign bit. */
4198 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4199 && (STORE_FLAG_VALUE
4200 == (HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
4201 && XEXP (x, 1) == const_true_rtx
4202 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
4203 && reversible_comparison_p (XEXP (x, 0)))
4204 return gen_rtx_combine (reverse_condition (GET_CODE (XEXP (x, 0))),
4205 mode, XEXP (XEXP (x, 0), 0),
4206 XEXP (XEXP (x, 0), 1));
4207 break;
4208
4209 case ABS:
4210 /* (abs (neg <foo>)) -> (abs <foo>) */
4211 if (GET_CODE (XEXP (x, 0)) == NEG)
4212 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4213
4214 /* If operand is something known to be positive, ignore the ABS. */
4215 if (GET_CODE (XEXP (x, 0)) == FFS || GET_CODE (XEXP (x, 0)) == ABS
4216 || ((GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4217 <= HOST_BITS_PER_WIDE_INT)
4218 && ((significant_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
4219 & ((HOST_WIDE_INT) 1
4220 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1)))
4221 == 0)))
4222 return XEXP (x, 0);
4223
4224
4225 /* If operand is known to be only -1 or 0, convert ABS to NEG. */
4226 if (num_sign_bit_copies (XEXP (x, 0), mode) == GET_MODE_BITSIZE (mode))
4227 {
4228 x = gen_rtx_combine (NEG, mode, XEXP (x, 0));
4229 goto restart;
4230 }
4231 break;
4232
4233 case FFS:
4234 /* (ffs (*_extend <X>)) = (ffs <X>) */
4235 if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND
4236 || GET_CODE (XEXP (x, 0)) == ZERO_EXTEND)
4237 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4238 break;
4239
4240 case FLOAT:
4241 /* (float (sign_extend <X>)) = (float <X>). */
4242 if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND)
4243 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4244 break;
4245
4246 case LSHIFT:
4247 case ASHIFT:
4248 case LSHIFTRT:
4249 case ASHIFTRT:
4250 case ROTATE:
4251 case ROTATERT:
4252 /* If this is a shift by a constant amount, simplify it. */
4253 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
4254 {
4255 x = simplify_shift_const (x, code, mode, XEXP (x, 0),
4256 INTVAL (XEXP (x, 1)));
4257 if (GET_CODE (x) != code)
4258 goto restart;
4259 }
4260
4261 #ifdef SHIFT_COUNT_TRUNCATED
4262 else if (GET_CODE (XEXP (x, 1)) != REG)
4263 SUBST (XEXP (x, 1),
4264 force_to_mode (XEXP (x, 1), GET_MODE (x),
4265 exact_log2 (GET_MODE_BITSIZE (GET_MODE (x))),
4266 NULL_RTX));
4267 #endif
4268
4269 break;
4270 }
4271
4272 return x;
4273 }
4274 \f
4275 /* We consider ZERO_EXTRACT, SIGN_EXTRACT, and SIGN_EXTEND as "compound
4276 operations" because they can be replaced with two more basic operations.
4277 ZERO_EXTEND is also considered "compound" because it can be replaced with
4278 an AND operation, which is simpler, though only one operation.
4279
4280 The function expand_compound_operation is called with an rtx expression
4281 and will convert it to the appropriate shifts and AND operations,
4282 simplifying at each stage.
4283
4284 The function make_compound_operation is called to convert an expression
4285 consisting of shifts and ANDs into the equivalent compound expression.
4286 It is the inverse of this function, loosely speaking. */
4287
4288 static rtx
4289 expand_compound_operation (x)
4290 rtx x;
4291 {
4292 int pos = 0, len;
4293 int unsignedp = 0;
4294 int modewidth;
4295 rtx tem;
4296
4297 switch (GET_CODE (x))
4298 {
4299 case ZERO_EXTEND:
4300 unsignedp = 1;
4301 case SIGN_EXTEND:
4302 /* We can't necessarily use a const_int for a multiword mode;
4303 it depends on implicitly extending the value.
4304 Since we don't know the right way to extend it,
4305 we can't tell whether the implicit way is right.
4306
4307 Even for a mode that is no wider than a const_int,
4308 we can't win, because we need to sign extend one of its bits through
4309 the rest of it, and we don't know which bit. */
4310 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
4311 return x;
4312
4313 if (! FAKE_EXTEND_SAFE_P (GET_MODE (XEXP (x, 0)), XEXP (x, 0)))
4314 return x;
4315
4316 len = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)));
4317 /* If the inner object has VOIDmode (the only way this can happen
4318 is if it is a ASM_OPERANDS), we can't do anything since we don't
4319 know how much masking to do. */
4320 if (len == 0)
4321 return x;
4322
4323 break;
4324
4325 case ZERO_EXTRACT:
4326 unsignedp = 1;
4327 case SIGN_EXTRACT:
4328 /* If the operand is a CLOBBER, just return it. */
4329 if (GET_CODE (XEXP (x, 0)) == CLOBBER)
4330 return XEXP (x, 0);
4331
4332 if (GET_CODE (XEXP (x, 1)) != CONST_INT
4333 || GET_CODE (XEXP (x, 2)) != CONST_INT
4334 || GET_MODE (XEXP (x, 0)) == VOIDmode)
4335 return x;
4336
4337 len = INTVAL (XEXP (x, 1));
4338 pos = INTVAL (XEXP (x, 2));
4339
4340 /* If this goes outside the object being extracted, replace the object
4341 with a (use (mem ...)) construct that only combine understands
4342 and is used only for this purpose. */
4343 if (len + pos > GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
4344 SUBST (XEXP (x, 0), gen_rtx (USE, GET_MODE (x), XEXP (x, 0)));
4345
4346 #if BITS_BIG_ENDIAN
4347 pos = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - len - pos;
4348 #endif
4349 break;
4350
4351 default:
4352 return x;
4353 }
4354
4355 /* If we reach here, we want to return a pair of shifts. The inner
4356 shift is a left shift of BITSIZE - POS - LEN bits. The outer
4357 shift is a right shift of BITSIZE - LEN bits. It is arithmetic or
4358 logical depending on the value of UNSIGNEDP.
4359
4360 If this was a ZERO_EXTEND or ZERO_EXTRACT, this pair of shifts will be
4361 converted into an AND of a shift.
4362
4363 We must check for the case where the left shift would have a negative
4364 count. This can happen in a case like (x >> 31) & 255 on machines
4365 that can't shift by a constant. On those machines, we would first
4366 combine the shift with the AND to produce a variable-position
4367 extraction. Then the constant of 31 would be substituted in to produce
4368 a such a position. */
4369
4370 modewidth = GET_MODE_BITSIZE (GET_MODE (x));
4371 if (modewidth >= pos - len)
4372 tem = simplify_shift_const (NULL_RTX, unsignedp ? LSHIFTRT : ASHIFTRT,
4373 GET_MODE (x),
4374 simplify_shift_const (NULL_RTX, ASHIFT,
4375 GET_MODE (x),
4376 XEXP (x, 0),
4377 modewidth - pos - len),
4378 modewidth - len);
4379
4380 else if (unsignedp && len < HOST_BITS_PER_WIDE_INT)
4381 tem = simplify_and_const_int (NULL_RTX, GET_MODE (x),
4382 simplify_shift_const (NULL_RTX, LSHIFTRT,
4383 GET_MODE (x),
4384 XEXP (x, 0), pos),
4385 ((HOST_WIDE_INT) 1 << len) - 1);
4386 else
4387 /* Any other cases we can't handle. */
4388 return x;
4389
4390
4391 /* If we couldn't do this for some reason, return the original
4392 expression. */
4393 if (GET_CODE (tem) == CLOBBER)
4394 return x;
4395
4396 return tem;
4397 }
4398 \f
4399 /* X is a SET which contains an assignment of one object into
4400 a part of another (such as a bit-field assignment, STRICT_LOW_PART,
4401 or certain SUBREGS). If possible, convert it into a series of
4402 logical operations.
4403
4404 We half-heartedly support variable positions, but do not at all
4405 support variable lengths. */
4406
4407 static rtx
4408 expand_field_assignment (x)
4409 rtx x;
4410 {
4411 rtx inner;
4412 rtx pos; /* Always counts from low bit. */
4413 int len;
4414 rtx mask;
4415 enum machine_mode compute_mode;
4416
4417 /* Loop until we find something we can't simplify. */
4418 while (1)
4419 {
4420 if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
4421 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG)
4422 {
4423 inner = SUBREG_REG (XEXP (SET_DEST (x), 0));
4424 len = GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)));
4425 pos = const0_rtx;
4426 }
4427 else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
4428 && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT)
4429 {
4430 inner = XEXP (SET_DEST (x), 0);
4431 len = INTVAL (XEXP (SET_DEST (x), 1));
4432 pos = XEXP (SET_DEST (x), 2);
4433
4434 /* If the position is constant and spans the width of INNER,
4435 surround INNER with a USE to indicate this. */
4436 if (GET_CODE (pos) == CONST_INT
4437 && INTVAL (pos) + len > GET_MODE_BITSIZE (GET_MODE (inner)))
4438 inner = gen_rtx (USE, GET_MODE (SET_DEST (x)), inner);
4439
4440 #if BITS_BIG_ENDIAN
4441 if (GET_CODE (pos) == CONST_INT)
4442 pos = GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner)) - len
4443 - INTVAL (pos));
4444 else if (GET_CODE (pos) == MINUS
4445 && GET_CODE (XEXP (pos, 1)) == CONST_INT
4446 && (INTVAL (XEXP (pos, 1))
4447 == GET_MODE_BITSIZE (GET_MODE (inner)) - len))
4448 /* If position is ADJUST - X, new position is X. */
4449 pos = XEXP (pos, 0);
4450 else
4451 pos = gen_binary (MINUS, GET_MODE (pos),
4452 GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner))
4453 - len),
4454 pos);
4455 #endif
4456 }
4457
4458 /* A SUBREG between two modes that occupy the same numbers of words
4459 can be done by moving the SUBREG to the source. */
4460 else if (GET_CODE (SET_DEST (x)) == SUBREG
4461 && (((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
4462 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
4463 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
4464 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
4465 {
4466 x = gen_rtx (SET, VOIDmode, SUBREG_REG (SET_DEST (x)),
4467 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (SET_DEST (x))),
4468 SET_SRC (x)));
4469 continue;
4470 }
4471 else
4472 break;
4473
4474 while (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
4475 inner = SUBREG_REG (inner);
4476
4477 compute_mode = GET_MODE (inner);
4478
4479 /* Compute a mask of LEN bits, if we can do this on the host machine. */
4480 if (len < HOST_BITS_PER_WIDE_INT)
4481 mask = GEN_INT (((HOST_WIDE_INT) 1 << len) - 1);
4482 else
4483 break;
4484
4485 /* Now compute the equivalent expression. Make a copy of INNER
4486 for the SET_DEST in case it is a MEM into which we will substitute;
4487 we don't want shared RTL in that case. */
4488 x = gen_rtx (SET, VOIDmode, copy_rtx (inner),
4489 gen_binary (IOR, compute_mode,
4490 gen_binary (AND, compute_mode,
4491 gen_unary (NOT, compute_mode,
4492 gen_binary (ASHIFT,
4493 compute_mode,
4494 mask, pos)),
4495 inner),
4496 gen_binary (ASHIFT, compute_mode,
4497 gen_binary (AND, compute_mode,
4498 gen_lowpart_for_combine
4499 (compute_mode,
4500 SET_SRC (x)),
4501 mask),
4502 pos)));
4503 }
4504
4505 return x;
4506 }
4507 \f
4508 /* Return an RTX for a reference to LEN bits of INNER. POS is the starting
4509 bit position (counted from the LSB) if >= 0; otherwise POS_RTX represents
4510 the starting bit position.
4511
4512 INNER may be a USE. This will occur when we started with a bitfield
4513 that went outside the boundary of the object in memory, which is
4514 allowed on most machines. To isolate this case, we produce a USE
4515 whose mode is wide enough and surround the MEM with it. The only
4516 code that understands the USE is this routine. If it is not removed,
4517 it will cause the resulting insn not to match.
4518
4519 UNSIGNEDP is non-zero for an unsigned reference and zero for a
4520 signed reference.
4521
4522 IN_DEST is non-zero if this is a reference in the destination of a
4523 SET. This is used when a ZERO_ or SIGN_EXTRACT isn't needed. If non-zero,
4524 a STRICT_LOW_PART will be used, if zero, ZERO_EXTEND or SIGN_EXTEND will
4525 be used.
4526
4527 IN_COMPARE is non-zero if we are in a COMPARE. This means that a
4528 ZERO_EXTRACT should be built even for bits starting at bit 0.
4529
4530 MODE is the desired mode of the result (if IN_DEST == 0). */
4531
4532 static rtx
4533 make_extraction (mode, inner, pos, pos_rtx, len,
4534 unsignedp, in_dest, in_compare)
4535 enum machine_mode mode;
4536 rtx inner;
4537 int pos;
4538 rtx pos_rtx;
4539 int len;
4540 int unsignedp;
4541 int in_dest, in_compare;
4542 {
4543 /* This mode describes the size of the storage area
4544 to fetch the overall value from. Within that, we
4545 ignore the POS lowest bits, etc. */
4546 enum machine_mode is_mode = GET_MODE (inner);
4547 enum machine_mode inner_mode;
4548 enum machine_mode wanted_mem_mode = byte_mode;
4549 enum machine_mode pos_mode = word_mode;
4550 enum machine_mode extraction_mode = word_mode;
4551 enum machine_mode tmode = mode_for_size (len, MODE_INT, 1);
4552 int spans_byte = 0;
4553 rtx new = 0;
4554
4555 /* Get some information about INNER and get the innermost object. */
4556 if (GET_CODE (inner) == USE)
4557 /* (use:SI (mem:QI foo)) stands for (mem:SI foo). */
4558 /* We don't need to adjust the position because we set up the USE
4559 to pretend that it was a full-word object. */
4560 spans_byte = 1, inner = XEXP (inner, 0);
4561 else if (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
4562 {
4563 /* If going from (subreg:SI (mem:QI ...)) to (mem:QI ...),
4564 consider just the QI as the memory to extract from.
4565 The subreg adds or removes high bits; its mode is
4566 irrelevant to the meaning of this extraction,
4567 since POS and LEN count from the lsb. */
4568 if (GET_CODE (SUBREG_REG (inner)) == MEM)
4569 is_mode = GET_MODE (SUBREG_REG (inner));
4570 inner = SUBREG_REG (inner);
4571 }
4572
4573 inner_mode = GET_MODE (inner);
4574
4575 if (pos_rtx && GET_CODE (pos_rtx) == CONST_INT)
4576 pos = INTVAL (pos_rtx);
4577
4578 /* See if this can be done without an extraction. We never can if the
4579 width of the field is not the same as that of some integer mode. For
4580 registers, we can only avoid the extraction if the position is at the
4581 low-order bit and this is either not in the destination or we have the
4582 appropriate STRICT_LOW_PART operation available.
4583
4584 For MEM, we can avoid an extract if the field starts on an appropriate
4585 boundary and we can change the mode of the memory reference. However,
4586 we cannot directly access the MEM if we have a USE and the underlying
4587 MEM is not TMODE. This combination means that MEM was being used in a
4588 context where bits outside its mode were being referenced; that is only
4589 valid in bit-field insns. */
4590
4591 if (tmode != BLKmode
4592 && ! (spans_byte && inner_mode != tmode)
4593 && ((pos == 0 && GET_CODE (inner) != MEM
4594 && (! in_dest
4595 || (GET_CODE (inner) == REG
4596 && (movstrict_optab->handlers[(int) tmode].insn_code
4597 != CODE_FOR_nothing))))
4598 || (GET_CODE (inner) == MEM && pos >= 0
4599 && (pos
4600 % (STRICT_ALIGNMENT ? GET_MODE_ALIGNMENT (tmode)
4601 : BITS_PER_UNIT)) == 0
4602 /* We can't do this if we are widening INNER_MODE (it
4603 may not be aligned, for one thing). */
4604 && GET_MODE_BITSIZE (inner_mode) >= GET_MODE_BITSIZE (tmode)
4605 && (inner_mode == tmode
4606 || (! mode_dependent_address_p (XEXP (inner, 0))
4607 && ! MEM_VOLATILE_P (inner))))))
4608 {
4609 /* If INNER is a MEM, make a new MEM that encompasses just the desired
4610 field. If the original and current mode are the same, we need not
4611 adjust the offset. Otherwise, we do if bytes big endian.
4612
4613 If INNER is not a MEM, get a piece consisting of the just the field
4614 of interest (in this case POS must be 0). */
4615
4616 if (GET_CODE (inner) == MEM)
4617 {
4618 int offset;
4619 /* POS counts from lsb, but make OFFSET count in memory order. */
4620 if (BYTES_BIG_ENDIAN)
4621 offset = (GET_MODE_BITSIZE (is_mode) - len - pos) / BITS_PER_UNIT;
4622 else
4623 offset = pos / BITS_PER_UNIT;
4624
4625 new = gen_rtx (MEM, tmode, plus_constant (XEXP (inner, 0), offset));
4626 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (inner);
4627 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (inner);
4628 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (inner);
4629 }
4630 else if (GET_CODE (inner) == REG)
4631 /* We can't call gen_lowpart_for_combine here since we always want
4632 a SUBREG and it would sometimes return a new hard register. */
4633 new = gen_rtx (SUBREG, tmode, inner,
4634 (WORDS_BIG_ENDIAN
4635 && GET_MODE_SIZE (inner_mode) > UNITS_PER_WORD
4636 ? ((GET_MODE_SIZE (inner_mode) - GET_MODE_SIZE (tmode))
4637 / UNITS_PER_WORD)
4638 : 0));
4639 else
4640 new = force_to_mode (inner, tmode, len, NULL_RTX);
4641
4642 /* If this extraction is going into the destination of a SET,
4643 make a STRICT_LOW_PART unless we made a MEM. */
4644
4645 if (in_dest)
4646 return (GET_CODE (new) == MEM ? new
4647 : (GET_CODE (new) != SUBREG
4648 ? gen_rtx (CLOBBER, tmode, const0_rtx)
4649 : gen_rtx_combine (STRICT_LOW_PART, VOIDmode, new)));
4650
4651 /* Otherwise, sign- or zero-extend unless we already are in the
4652 proper mode. */
4653
4654 return (mode == tmode ? new
4655 : gen_rtx_combine (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
4656 mode, new));
4657 }
4658
4659 /* Unless this is a COMPARE or we have a funny memory reference,
4660 don't do anything with zero-extending field extracts starting at
4661 the low-order bit since they are simple AND operations. */
4662 if (pos == 0 && ! in_dest && ! in_compare && ! spans_byte && unsignedp)
4663 return 0;
4664
4665 /* Get the mode to use should INNER be a MEM, the mode for the position,
4666 and the mode for the result. */
4667 #ifdef HAVE_insv
4668 if (in_dest)
4669 {
4670 wanted_mem_mode = insn_operand_mode[(int) CODE_FOR_insv][0];
4671 pos_mode = insn_operand_mode[(int) CODE_FOR_insv][2];
4672 extraction_mode = insn_operand_mode[(int) CODE_FOR_insv][3];
4673 }
4674 #endif
4675
4676 #ifdef HAVE_extzv
4677 if (! in_dest && unsignedp)
4678 {
4679 wanted_mem_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
4680 pos_mode = insn_operand_mode[(int) CODE_FOR_extzv][3];
4681 extraction_mode = insn_operand_mode[(int) CODE_FOR_extzv][0];
4682 }
4683 #endif
4684
4685 #ifdef HAVE_extv
4686 if (! in_dest && ! unsignedp)
4687 {
4688 wanted_mem_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
4689 pos_mode = insn_operand_mode[(int) CODE_FOR_extv][3];
4690 extraction_mode = insn_operand_mode[(int) CODE_FOR_extv][0];
4691 }
4692 #endif
4693
4694 /* Never narrow an object, since that might not be safe. */
4695
4696 if (mode != VOIDmode
4697 && GET_MODE_SIZE (extraction_mode) < GET_MODE_SIZE (mode))
4698 extraction_mode = mode;
4699
4700 if (pos_rtx && GET_MODE (pos_rtx) != VOIDmode
4701 && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
4702 pos_mode = GET_MODE (pos_rtx);
4703
4704 /* If this is not from memory or we have to change the mode of memory and
4705 cannot, the desired mode is EXTRACTION_MODE. */
4706 if (GET_CODE (inner) != MEM
4707 || (inner_mode != wanted_mem_mode
4708 && (mode_dependent_address_p (XEXP (inner, 0))
4709 || MEM_VOLATILE_P (inner))))
4710 wanted_mem_mode = extraction_mode;
4711
4712 #if BITS_BIG_ENDIAN
4713 /* If position is constant, compute new position. Otherwise, build
4714 subtraction. */
4715 if (pos >= 0)
4716 pos = (MAX (GET_MODE_BITSIZE (is_mode), GET_MODE_BITSIZE (wanted_mem_mode))
4717 - len - pos);
4718 else
4719 pos_rtx
4720 = gen_rtx_combine (MINUS, GET_MODE (pos_rtx),
4721 GEN_INT (MAX (GET_MODE_BITSIZE (is_mode),
4722 GET_MODE_BITSIZE (wanted_mem_mode))
4723 - len),
4724 pos_rtx);
4725 #endif
4726
4727 /* If INNER has a wider mode, make it smaller. If this is a constant
4728 extract, try to adjust the byte to point to the byte containing
4729 the value. */
4730 if (wanted_mem_mode != VOIDmode
4731 && GET_MODE_SIZE (wanted_mem_mode) < GET_MODE_SIZE (is_mode)
4732 && ((GET_CODE (inner) == MEM
4733 && (inner_mode == wanted_mem_mode
4734 || (! mode_dependent_address_p (XEXP (inner, 0))
4735 && ! MEM_VOLATILE_P (inner))))))
4736 {
4737 int offset = 0;
4738
4739 /* The computations below will be correct if the machine is big
4740 endian in both bits and bytes or little endian in bits and bytes.
4741 If it is mixed, we must adjust. */
4742
4743 #if BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
4744 if (! spans_byte && is_mode != wanted_mem_mode)
4745 offset = (GET_MODE_SIZE (is_mode)
4746 - GET_MODE_SIZE (wanted_mem_mode) - offset);
4747 #endif
4748
4749 /* If bytes are big endian and we had a paradoxical SUBREG, we must
4750 adjust OFFSET to compensate. */
4751 #if BYTES_BIG_ENDIAN
4752 if (! spans_byte
4753 && GET_MODE_SIZE (inner_mode) < GET_MODE_SIZE (is_mode))
4754 offset -= GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (inner_mode);
4755 #endif
4756
4757 /* If this is a constant position, we can move to the desired byte. */
4758 if (pos >= 0)
4759 {
4760 offset += pos / BITS_PER_UNIT;
4761 pos %= GET_MODE_BITSIZE (wanted_mem_mode);
4762 }
4763
4764 if (offset != 0 || inner_mode != wanted_mem_mode)
4765 {
4766 rtx newmem = gen_rtx (MEM, wanted_mem_mode,
4767 plus_constant (XEXP (inner, 0), offset));
4768 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (inner);
4769 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (inner);
4770 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (inner);
4771 inner = newmem;
4772 }
4773 }
4774
4775 /* If INNER is not memory, we can always get it into the proper mode. */
4776 else if (GET_CODE (inner) != MEM)
4777 inner = force_to_mode (inner, extraction_mode,
4778 (pos < 0 ? GET_MODE_BITSIZE (extraction_mode)
4779 : len + pos),
4780 NULL_RTX);
4781
4782 /* Adjust mode of POS_RTX, if needed. If we want a wider mode, we
4783 have to zero extend. Otherwise, we can just use a SUBREG. */
4784 if (pos < 0
4785 && GET_MODE_SIZE (pos_mode) > GET_MODE_SIZE (GET_MODE (pos_rtx)))
4786 pos_rtx = gen_rtx_combine (ZERO_EXTEND, pos_mode, pos_rtx);
4787 else if (pos < 0
4788 && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
4789 pos_rtx = gen_lowpart_for_combine (pos_mode, pos_rtx);
4790
4791 /* Make POS_RTX unless we already have it and it is correct. */
4792 if (pos_rtx == 0 || (pos >= 0 && INTVAL (pos_rtx) != pos))
4793 pos_rtx = GEN_INT (pos);
4794
4795 /* Make the required operation. See if we can use existing rtx. */
4796 new = gen_rtx_combine (unsignedp ? ZERO_EXTRACT : SIGN_EXTRACT,
4797 extraction_mode, inner, GEN_INT (len), pos_rtx);
4798 if (! in_dest)
4799 new = gen_lowpart_for_combine (mode, new);
4800
4801 return new;
4802 }
4803 \f
4804 /* Look at the expression rooted at X. Look for expressions
4805 equivalent to ZERO_EXTRACT, SIGN_EXTRACT, ZERO_EXTEND, SIGN_EXTEND.
4806 Form these expressions.
4807
4808 Return the new rtx, usually just X.
4809
4810 Also, for machines like the Vax that don't have logical shift insns,
4811 try to convert logical to arithmetic shift operations in cases where
4812 they are equivalent. This undoes the canonicalizations to logical
4813 shifts done elsewhere.
4814
4815 We try, as much as possible, to re-use rtl expressions to save memory.
4816
4817 IN_CODE says what kind of expression we are processing. Normally, it is
4818 SET. In a memory address (inside a MEM, PLUS or minus, the latter two
4819 being kludges), it is MEM. When processing the arguments of a comparison
4820 or a COMPARE against zero, it is COMPARE. */
4821
4822 static rtx
4823 make_compound_operation (x, in_code)
4824 rtx x;
4825 enum rtx_code in_code;
4826 {
4827 enum rtx_code code = GET_CODE (x);
4828 enum machine_mode mode = GET_MODE (x);
4829 int mode_width = GET_MODE_BITSIZE (mode);
4830 enum rtx_code next_code;
4831 int i, count;
4832 rtx new = 0;
4833 char *fmt;
4834
4835 /* Select the code to be used in recursive calls. Once we are inside an
4836 address, we stay there. If we have a comparison, set to COMPARE,
4837 but once inside, go back to our default of SET. */
4838
4839 next_code = (code == MEM || code == PLUS || code == MINUS ? MEM
4840 : ((code == COMPARE || GET_RTX_CLASS (code) == '<')
4841 && XEXP (x, 1) == const0_rtx) ? COMPARE
4842 : in_code == COMPARE ? SET : in_code);
4843
4844 /* Process depending on the code of this operation. If NEW is set
4845 non-zero, it will be returned. */
4846
4847 switch (code)
4848 {
4849 case ASHIFT:
4850 case LSHIFT:
4851 /* Convert shifts by constants into multiplications if inside
4852 an address. */
4853 if (in_code == MEM && GET_CODE (XEXP (x, 1)) == CONST_INT
4854 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
4855 && INTVAL (XEXP (x, 1)) >= 0)
4856 new = gen_rtx_combine (MULT, mode, XEXP (x, 0),
4857 GEN_INT ((HOST_WIDE_INT) 1
4858 << INTVAL (XEXP (x, 1))));
4859 break;
4860
4861 case AND:
4862 /* If the second operand is not a constant, we can't do anything
4863 with it. */
4864 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
4865 break;
4866
4867 /* If the constant is a power of two minus one and the first operand
4868 is a logical right shift, make an extraction. */
4869 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
4870 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
4871 new = make_extraction (mode, XEXP (XEXP (x, 0), 0), -1,
4872 XEXP (XEXP (x, 0), 1), i, 1,
4873 0, in_code == COMPARE);
4874
4875 /* Same as previous, but for (subreg (lshiftrt ...)) in first op. */
4876 else if (GET_CODE (XEXP (x, 0)) == SUBREG
4877 && subreg_lowpart_p (XEXP (x, 0))
4878 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == LSHIFTRT
4879 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
4880 new = make_extraction (GET_MODE (SUBREG_REG (XEXP (x, 0))),
4881 XEXP (SUBREG_REG (XEXP (x, 0)), 0), -1,
4882 XEXP (SUBREG_REG (XEXP (x, 0)), 1), i, 1,
4883 0, in_code == COMPARE);
4884
4885
4886 /* If we are have (and (rotate X C) M) and C is larger than the number
4887 of bits in M, this is an extraction. */
4888
4889 else if (GET_CODE (XEXP (x, 0)) == ROTATE
4890 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4891 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0
4892 && i <= INTVAL (XEXP (XEXP (x, 0), 1)))
4893 new = make_extraction (mode, XEXP (XEXP (x, 0), 0),
4894 (GET_MODE_BITSIZE (mode)
4895 - INTVAL (XEXP (XEXP (x, 0), 1))),
4896 NULL_RTX, i, 1, 0, in_code == COMPARE);
4897
4898 /* On machines without logical shifts, if the operand of the AND is
4899 a logical shift and our mask turns off all the propagated sign
4900 bits, we can replace the logical shift with an arithmetic shift. */
4901 else if (ashr_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
4902 && (lshr_optab->handlers[(int) mode].insn_code
4903 == CODE_FOR_nothing)
4904 && GET_CODE (XEXP (x, 0)) == LSHIFTRT
4905 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4906 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
4907 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
4908 && mode_width <= HOST_BITS_PER_WIDE_INT)
4909 {
4910 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
4911
4912 mask >>= INTVAL (XEXP (XEXP (x, 0), 1));
4913 if ((INTVAL (XEXP (x, 1)) & ~mask) == 0)
4914 SUBST (XEXP (x, 0),
4915 gen_rtx_combine (ASHIFTRT, mode, XEXP (XEXP (x, 0), 0),
4916 XEXP (XEXP (x, 0), 1)));
4917 }
4918
4919 /* If the constant is one less than a power of two, this might be
4920 representable by an extraction even if no shift is present.
4921 If it doesn't end up being a ZERO_EXTEND, we will ignore it unless
4922 we are in a COMPARE. */
4923 else if ((i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
4924 new = make_extraction (mode, XEXP (x, 0), 0, NULL_RTX, i, 1,
4925 0, in_code == COMPARE);
4926
4927 /* If we are in a comparison and this is an AND with a power of two,
4928 convert this into the appropriate bit extract. */
4929 else if (in_code == COMPARE
4930 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
4931 new = make_extraction (mode, XEXP (x, 0), i, NULL_RTX, 1, 1, 0, 1);
4932
4933 break;
4934
4935 case LSHIFTRT:
4936 /* If the sign bit is known to be zero, replace this with an
4937 arithmetic shift. */
4938 if (ashr_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing
4939 && lshr_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
4940 && mode_width <= HOST_BITS_PER_WIDE_INT
4941 && (significant_bits (XEXP (x, 0), mode)
4942 & (1 << (mode_width - 1))) == 0)
4943 {
4944 new = gen_rtx_combine (ASHIFTRT, mode, XEXP (x, 0), XEXP (x, 1));
4945 break;
4946 }
4947
4948 /* ... fall through ... */
4949
4950 case ASHIFTRT:
4951 /* If we have (ashiftrt (ashift foo C1) C2) with C2 >= C1,
4952 this is a SIGN_EXTRACT. */
4953 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4954 && GET_CODE (XEXP (x, 0)) == ASHIFT
4955 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4956 && INTVAL (XEXP (x, 1)) >= INTVAL (XEXP (XEXP (x, 0), 1)))
4957 new = make_extraction (mode, XEXP (XEXP (x, 0), 0),
4958 (INTVAL (XEXP (x, 1))
4959 - INTVAL (XEXP (XEXP (x, 0), 1))),
4960 NULL_RTX, mode_width - INTVAL (XEXP (x, 1)),
4961 code == LSHIFTRT, 0, in_code == COMPARE);
4962
4963 /* Similarly if we have (ashifrt (OP (ashift foo C1) C3) C2). In these
4964 cases, we are better off returning a SIGN_EXTEND of the operation. */
4965
4966 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4967 && (GET_CODE (XEXP (x, 0)) == IOR || GET_CODE (XEXP (x, 0)) == AND
4968 || GET_CODE (XEXP (x, 0)) == XOR
4969 || GET_CODE (XEXP (x, 0)) == PLUS)
4970 && GET_CODE (XEXP (XEXP (x, 0), 0)) == ASHIFT
4971 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
4972 && INTVAL (XEXP (x, 1)) >= INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))
4973 && INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1)) < HOST_BITS_PER_WIDE_INT
4974 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4975 && (INTVAL (XEXP (XEXP (x, 0), 1))
4976 & (((HOST_WIDE_INT) 1
4977 << INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))) - 1)) == 0)
4978 {
4979 HOST_WIDE_INT newop1
4980 = (INTVAL (XEXP (XEXP (x, 0), 1))
4981 >> INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1)));
4982
4983 new = make_extraction (mode,
4984 gen_binary (GET_CODE (XEXP (x, 0)), mode,
4985 XEXP (XEXP (XEXP (x, 0), 0), 0),
4986 GEN_INT (newop1)),
4987 (INTVAL (XEXP (x, 1))
4988 - INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))),
4989 NULL_RTX, mode_width - INTVAL (XEXP (x, 1)),
4990 code == LSHIFTRT, 0, in_code == COMPARE);
4991 }
4992
4993 /* Similarly for (ashiftrt (neg (ashift FOO C1)) C2). */
4994 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4995 && GET_CODE (XEXP (x, 0)) == NEG
4996 && GET_CODE (XEXP (XEXP (x, 0), 0)) == ASHIFT
4997 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
4998 && INTVAL (XEXP (x, 1)) >= INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1)))
4999 new = make_extraction (mode,
5000 gen_unary (GET_CODE (XEXP (x, 0)), mode,
5001 XEXP (XEXP (XEXP (x, 0), 0), 0)),
5002 (INTVAL (XEXP (x, 1))
5003 - INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))),
5004 NULL_RTX, mode_width - INTVAL (XEXP (x, 1)),
5005 code == LSHIFTRT, 0, in_code == COMPARE);
5006 break;
5007 }
5008
5009 if (new)
5010 {
5011 x = gen_lowpart_for_combine (mode, new);
5012 code = GET_CODE (x);
5013 }
5014
5015 /* Now recursively process each operand of this operation. */
5016 fmt = GET_RTX_FORMAT (code);
5017 for (i = 0; i < GET_RTX_LENGTH (code); i++)
5018 if (fmt[i] == 'e')
5019 {
5020 new = make_compound_operation (XEXP (x, i), next_code);
5021 SUBST (XEXP (x, i), new);
5022 }
5023
5024 return x;
5025 }
5026 \f
5027 /* Given M see if it is a value that would select a field of bits
5028 within an item, but not the entire word. Return -1 if not.
5029 Otherwise, return the starting position of the field, where 0 is the
5030 low-order bit.
5031
5032 *PLEN is set to the length of the field. */
5033
5034 static int
5035 get_pos_from_mask (m, plen)
5036 unsigned HOST_WIDE_INT m;
5037 int *plen;
5038 {
5039 /* Get the bit number of the first 1 bit from the right, -1 if none. */
5040 int pos = exact_log2 (m & - m);
5041
5042 if (pos < 0)
5043 return -1;
5044
5045 /* Now shift off the low-order zero bits and see if we have a power of
5046 two minus 1. */
5047 *plen = exact_log2 ((m >> pos) + 1);
5048
5049 if (*plen <= 0)
5050 return -1;
5051
5052 return pos;
5053 }
5054 \f
5055 /* Rewrite X so that it is an expression in MODE. We only care about the
5056 low-order BITS bits so we can ignore AND operations that just clear
5057 higher-order bits.
5058
5059 Also, if REG is non-zero and X is a register equal in value to REG,
5060 replace X with REG. */
5061
5062 static rtx
5063 force_to_mode (x, mode, bits, reg)
5064 rtx x;
5065 enum machine_mode mode;
5066 int bits;
5067 rtx reg;
5068 {
5069 enum rtx_code code = GET_CODE (x);
5070 enum machine_mode op_mode = mode;
5071
5072 /* If X is narrower than MODE or if BITS is larger than the size of MODE,
5073 just get X in the proper mode. */
5074
5075 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode)
5076 || bits > GET_MODE_BITSIZE (mode))
5077 return gen_lowpart_for_combine (mode, x);
5078
5079 switch (code)
5080 {
5081 case SIGN_EXTEND:
5082 case ZERO_EXTEND:
5083 case ZERO_EXTRACT:
5084 case SIGN_EXTRACT:
5085 x = expand_compound_operation (x);
5086 if (GET_CODE (x) != code)
5087 return force_to_mode (x, mode, bits, reg);
5088 break;
5089
5090 case REG:
5091 if (reg != 0 && (rtx_equal_p (get_last_value (reg), x)
5092 || rtx_equal_p (reg, get_last_value (x))))
5093 x = reg;
5094 break;
5095
5096 case CONST_INT:
5097 if (bits < HOST_BITS_PER_WIDE_INT)
5098 x = GEN_INT (INTVAL (x) & (((HOST_WIDE_INT) 1 << bits) - 1));
5099 return x;
5100
5101 case SUBREG:
5102 /* Ignore low-order SUBREGs. */
5103 if (subreg_lowpart_p (x))
5104 return force_to_mode (SUBREG_REG (x), mode, bits, reg);
5105 break;
5106
5107 case AND:
5108 /* If this is an AND with a constant. Otherwise, we fall through to
5109 do the general binary case. */
5110
5111 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
5112 {
5113 HOST_WIDE_INT mask = INTVAL (XEXP (x, 1));
5114 int len = exact_log2 (mask + 1);
5115 rtx op = XEXP (x, 0);
5116
5117 /* If this is masking some low-order bits, we may be able to
5118 impose a stricter constraint on what bits of the operand are
5119 required. */
5120
5121 op = force_to_mode (op, mode, len > 0 ? MIN (len, bits) : bits,
5122 reg);
5123
5124 if (bits < HOST_BITS_PER_WIDE_INT)
5125 mask &= ((HOST_WIDE_INT) 1 << bits) - 1;
5126
5127 /* If we have no AND in MODE, use the original mode for the
5128 operation. */
5129
5130 if (and_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
5131 op_mode = GET_MODE (x);
5132
5133 x = simplify_and_const_int (x, op_mode, op, mask);
5134
5135 /* If X is still an AND, see if it is an AND with a mask that
5136 is just some low-order bits. If so, and it is BITS wide (it
5137 can't be wider), we don't need it. */
5138
5139 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
5140 && bits < HOST_BITS_PER_WIDE_INT
5141 && INTVAL (XEXP (x, 1)) == ((HOST_WIDE_INT) 1 << bits) - 1)
5142 x = XEXP (x, 0);
5143
5144 break;
5145 }
5146
5147 /* ... fall through ... */
5148
5149 case PLUS:
5150 case MINUS:
5151 case MULT:
5152 case IOR:
5153 case XOR:
5154 /* For most binary operations, just propagate into the operation and
5155 change the mode if we have an operation of that mode. */
5156
5157 if ((code == PLUS
5158 && add_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
5159 || (code == MINUS
5160 && sub_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
5161 || (code == MULT && (smul_optab->handlers[(int) mode].insn_code
5162 == CODE_FOR_nothing))
5163 || (code == AND
5164 && and_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
5165 || (code == IOR
5166 && ior_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
5167 || (code == XOR && (xor_optab->handlers[(int) mode].insn_code
5168 == CODE_FOR_nothing)))
5169 op_mode = GET_MODE (x);
5170
5171 x = gen_binary (code, op_mode,
5172 gen_lowpart_for_combine (op_mode,
5173 force_to_mode (XEXP (x, 0),
5174 mode, bits,
5175 reg)),
5176 gen_lowpart_for_combine (op_mode,
5177 force_to_mode (XEXP (x, 1),
5178 mode, bits,
5179 reg)));
5180 break;
5181
5182 case ASHIFT:
5183 case LSHIFT:
5184 /* For left shifts, do the same, but just for the first operand.
5185 If the shift count is a constant, we need even fewer bits of the
5186 first operand. */
5187
5188 if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) < bits)
5189 bits -= INTVAL (XEXP (x, 1));
5190
5191 if ((code == ASHIFT
5192 && ashl_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
5193 || (code == LSHIFT && (lshl_optab->handlers[(int) mode].insn_code
5194 == CODE_FOR_nothing)))
5195 op_mode = GET_MODE (x);
5196
5197 x = gen_binary (code, op_mode,
5198 gen_lowpart_for_combine (op_mode,
5199 force_to_mode (XEXP (x, 0),
5200 mode, bits,
5201 reg)),
5202 XEXP (x, 1));
5203 break;
5204
5205 case LSHIFTRT:
5206 /* Here we can only do something if the shift count is a constant and
5207 the count plus BITS is no larger than the width of MODE, we can do
5208 the shift in MODE. */
5209
5210 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5211 && INTVAL (XEXP (x, 1)) + bits <= GET_MODE_BITSIZE (mode))
5212 {
5213 rtx inner = force_to_mode (XEXP (x, 0), mode,
5214 bits + INTVAL (XEXP (x, 1)), reg);
5215
5216 if (lshr_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
5217 op_mode = GET_MODE (x);
5218
5219 x = gen_binary (LSHIFTRT, op_mode,
5220 gen_lowpart_for_combine (op_mode, inner),
5221 XEXP (x, 1));
5222 }
5223 break;
5224
5225 case ASHIFTRT:
5226 /* If this is a sign-extension operation that just affects bits
5227 we don't care about, remove it. */
5228
5229 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5230 && INTVAL (XEXP (x, 1)) >= 0
5231 && INTVAL (XEXP (x, 1)) <= GET_MODE_BITSIZE (GET_MODE (x)) - bits
5232 && GET_CODE (XEXP (x, 0)) == ASHIFT
5233 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5234 && INTVAL (XEXP (XEXP (x, 0), 1)) == INTVAL (XEXP (x, 1)))
5235 return force_to_mode (XEXP (XEXP (x, 0), 0), mode, bits, reg);
5236 break;
5237
5238 case NEG:
5239 case NOT:
5240 if ((code == NEG
5241 && neg_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
5242 || (code == NOT && (one_cmpl_optab->handlers[(int) mode].insn_code
5243 == CODE_FOR_nothing)))
5244 op_mode = GET_MODE (x);
5245
5246 /* Handle these similarly to the way we handle most binary operations. */
5247 x = gen_unary (code, op_mode,
5248 gen_lowpart_for_combine (op_mode,
5249 force_to_mode (XEXP (x, 0), mode,
5250 bits, reg)));
5251 break;
5252
5253 case IF_THEN_ELSE:
5254 /* We have no way of knowing if the IF_THEN_ELSE can itself be
5255 written in a narrower mode. We play it safe and do not do so. */
5256
5257 SUBST (XEXP (x, 1),
5258 gen_lowpart_for_combine (GET_MODE (x),
5259 force_to_mode (XEXP (x, 1), mode,
5260 bits, reg)));
5261 SUBST (XEXP (x, 2),
5262 gen_lowpart_for_combine (GET_MODE (x),
5263 force_to_mode (XEXP (x, 2), mode,
5264 bits, reg)));
5265 break;
5266 }
5267
5268 /* Ensure we return a value of the proper mode. */
5269 return gen_lowpart_for_combine (mode, x);
5270 }
5271 \f
5272 /* Return the value of expression X given the fact that condition COND
5273 is known to be true when applied to REG as its first operand and VAL
5274 as its second. X is known to not be shared and so can be modified in
5275 place.
5276
5277 We only handle the simplest cases, and specifically those cases that
5278 arise with IF_THEN_ELSE expressions. */
5279
5280 static rtx
5281 known_cond (x, cond, reg, val)
5282 rtx x;
5283 enum rtx_code cond;
5284 rtx reg, val;
5285 {
5286 enum rtx_code code = GET_CODE (x);
5287 rtx new, temp;
5288 char *fmt;
5289 int i, j;
5290
5291 if (side_effects_p (x))
5292 return x;
5293
5294 if (cond == EQ && rtx_equal_p (x, reg))
5295 return val;
5296
5297 /* If X is (abs REG) and we know something about REG's relationship
5298 with zero, we may be able to simplify this. */
5299
5300 if (code == ABS && rtx_equal_p (XEXP (x, 0), reg) && val == const0_rtx)
5301 switch (cond)
5302 {
5303 case GE: case GT: case EQ:
5304 return XEXP (x, 0);
5305 case LT: case LE:
5306 return gen_unary (NEG, GET_MODE (XEXP (x, 0)), XEXP (x, 0));
5307 }
5308
5309 /* The only other cases we handle are MIN, MAX, and comparisons if the
5310 operands are the same as REG and VAL. */
5311
5312 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
5313 {
5314 if (rtx_equal_p (XEXP (x, 0), val))
5315 cond = swap_condition (cond), temp = val, val = reg, reg = temp;
5316
5317 if (rtx_equal_p (XEXP (x, 0), reg) && rtx_equal_p (XEXP (x, 1), val))
5318 {
5319 if (GET_RTX_CLASS (code) == '<')
5320 return (comparison_dominates_p (cond, code) ? const_true_rtx
5321 : (comparison_dominates_p (cond,
5322 reverse_condition (code))
5323 ? const0_rtx : x));
5324
5325 else if (code == SMAX || code == SMIN
5326 || code == UMIN || code == UMAX)
5327 {
5328 int unsignedp = (code == UMIN || code == UMAX);
5329
5330 if (code == SMAX || code == UMAX)
5331 cond = reverse_condition (cond);
5332
5333 switch (cond)
5334 {
5335 case GE: case GT:
5336 return unsignedp ? x : XEXP (x, 1);
5337 case LE: case LT:
5338 return unsignedp ? x : XEXP (x, 0);
5339 case GEU: case GTU:
5340 return unsignedp ? XEXP (x, 1) : x;
5341 case LEU: case LTU:
5342 return unsignedp ? XEXP (x, 0) : x;
5343 }
5344 }
5345 }
5346 }
5347
5348 fmt = GET_RTX_FORMAT (code);
5349 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5350 {
5351 if (fmt[i] == 'e')
5352 SUBST (XEXP (x, i), known_cond (XEXP (x, i), cond, reg, val));
5353 else if (fmt[i] == 'E')
5354 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5355 SUBST (XVECEXP (x, i, j), known_cond (XVECEXP (x, i, j),
5356 cond, reg, val));
5357 }
5358
5359 return x;
5360 }
5361 \f
5362 /* See if X, a SET operation, can be rewritten as a bit-field assignment.
5363 Return that assignment if so.
5364
5365 We only handle the most common cases. */
5366
5367 static rtx
5368 make_field_assignment (x)
5369 rtx x;
5370 {
5371 rtx dest = SET_DEST (x);
5372 rtx src = SET_SRC (x);
5373 rtx ourdest;
5374 rtx assign;
5375 HOST_WIDE_INT c1;
5376 int pos, len;
5377 rtx other;
5378 enum machine_mode mode;
5379
5380 /* If SRC was (and (not (ashift (const_int 1) POS)) DEST), this is
5381 a clear of a one-bit field. We will have changed it to
5382 (and (rotate (const_int -2) POS) DEST), so check for that. Also check
5383 for a SUBREG. */
5384
5385 if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == ROTATE
5386 && GET_CODE (XEXP (XEXP (src, 0), 0)) == CONST_INT
5387 && INTVAL (XEXP (XEXP (src, 0), 0)) == -2
5388 && (rtx_equal_p (dest, XEXP (src, 1))
5389 || rtx_equal_p (dest, get_last_value (XEXP (src, 1)))
5390 || rtx_equal_p (get_last_value (dest), XEXP (src, 1))))
5391 {
5392 assign = make_extraction (VOIDmode, dest, -1, XEXP (XEXP (src, 0), 1),
5393 1, 1, 1, 0);
5394 return gen_rtx (SET, VOIDmode, assign, const0_rtx);
5395 }
5396
5397 else if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == SUBREG
5398 && subreg_lowpart_p (XEXP (src, 0))
5399 && (GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
5400 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (src, 0)))))
5401 && GET_CODE (SUBREG_REG (XEXP (src, 0))) == ROTATE
5402 && INTVAL (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == -2
5403 && (rtx_equal_p (dest, XEXP (src, 1))
5404 || rtx_equal_p (dest, get_last_value (XEXP (src, 1)))
5405 || rtx_equal_p (get_last_value (dest), XEXP (src, 1))))
5406 {
5407 assign = make_extraction (VOIDmode, dest, -1,
5408 XEXP (SUBREG_REG (XEXP (src, 0)), 1),
5409 1, 1, 1, 0);
5410 return gen_rtx (SET, VOIDmode, assign, const0_rtx);
5411 }
5412
5413 /* If SRC is (ior (ashift (const_int 1) POS DEST)), this is a set of a
5414 one-bit field. */
5415 else if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == ASHIFT
5416 && XEXP (XEXP (src, 0), 0) == const1_rtx
5417 && (rtx_equal_p (dest, XEXP (src, 1))
5418 || rtx_equal_p (dest, get_last_value (XEXP (src, 1)))
5419 || rtx_equal_p (get_last_value (dest), XEXP (src, 1))))
5420 {
5421 assign = make_extraction (VOIDmode, dest, -1, XEXP (XEXP (src, 0), 1),
5422 1, 1, 1, 0);
5423 return gen_rtx (SET, VOIDmode, assign, const1_rtx);
5424 }
5425
5426 /* The other case we handle is assignments into a constant-position
5427 field. They look like (ior (and DEST C1) OTHER). If C1 represents
5428 a mask that has all one bits except for a group of zero bits and
5429 OTHER is known to have zeros where C1 has ones, this is such an
5430 assignment. Compute the position and length from C1. Shift OTHER
5431 to the appropriate position, force it to the required mode, and
5432 make the extraction. Check for the AND in both operands. */
5433
5434 if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == AND
5435 && GET_CODE (XEXP (XEXP (src, 0), 1)) == CONST_INT
5436 && (rtx_equal_p (XEXP (XEXP (src, 0), 0), dest)
5437 || rtx_equal_p (XEXP (XEXP (src, 0), 0), get_last_value (dest))
5438 || rtx_equal_p (get_last_value (XEXP (XEXP (src, 0), 1)), dest)))
5439 c1 = INTVAL (XEXP (XEXP (src, 0), 1)), other = XEXP (src, 1);
5440 else if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 1)) == AND
5441 && GET_CODE (XEXP (XEXP (src, 1), 1)) == CONST_INT
5442 && (rtx_equal_p (XEXP (XEXP (src, 1), 0), dest)
5443 || rtx_equal_p (XEXP (XEXP (src, 1), 0), get_last_value (dest))
5444 || rtx_equal_p (get_last_value (XEXP (XEXP (src, 1), 0)),
5445 dest)))
5446 c1 = INTVAL (XEXP (XEXP (src, 1), 1)), other = XEXP (src, 0);
5447 else
5448 return x;
5449
5450 pos = get_pos_from_mask (~c1, &len);
5451 if (pos < 0 || pos + len > GET_MODE_BITSIZE (GET_MODE (dest))
5452 || (c1 & significant_bits (other, GET_MODE (other))) != 0)
5453 return x;
5454
5455 assign = make_extraction (VOIDmode, dest, pos, NULL_RTX, len, 1, 1, 0);
5456
5457 /* The mode to use for the source is the mode of the assignment, or of
5458 what is inside a possible STRICT_LOW_PART. */
5459 mode = (GET_CODE (assign) == STRICT_LOW_PART
5460 ? GET_MODE (XEXP (assign, 0)) : GET_MODE (assign));
5461
5462 /* Shift OTHER right POS places and make it the source, restricting it
5463 to the proper length and mode. */
5464
5465 src = force_to_mode (simplify_shift_const (NULL_RTX, LSHIFTRT,
5466 GET_MODE (src), other, pos),
5467 mode, len, dest);
5468
5469 return gen_rtx_combine (SET, VOIDmode, assign, src);
5470 }
5471 \f
5472 /* See if X is of the form (+ (* a c) (* b c)) and convert to (* (+ a b) c)
5473 if so. */
5474
5475 static rtx
5476 apply_distributive_law (x)
5477 rtx x;
5478 {
5479 enum rtx_code code = GET_CODE (x);
5480 rtx lhs, rhs, other;
5481 rtx tem;
5482 enum rtx_code inner_code;
5483
5484 /* The outer operation can only be one of the following: */
5485 if (code != IOR && code != AND && code != XOR
5486 && code != PLUS && code != MINUS)
5487 return x;
5488
5489 lhs = XEXP (x, 0), rhs = XEXP (x, 1);
5490
5491 /* If either operand is a primitive we can't do anything, so get out fast. */
5492 if (GET_RTX_CLASS (GET_CODE (lhs)) == 'o'
5493 || GET_RTX_CLASS (GET_CODE (rhs)) == 'o')
5494 return x;
5495
5496 lhs = expand_compound_operation (lhs);
5497 rhs = expand_compound_operation (rhs);
5498 inner_code = GET_CODE (lhs);
5499 if (inner_code != GET_CODE (rhs))
5500 return x;
5501
5502 /* See if the inner and outer operations distribute. */
5503 switch (inner_code)
5504 {
5505 case LSHIFTRT:
5506 case ASHIFTRT:
5507 case AND:
5508 case IOR:
5509 /* These all distribute except over PLUS. */
5510 if (code == PLUS || code == MINUS)
5511 return x;
5512 break;
5513
5514 case MULT:
5515 if (code != PLUS && code != MINUS)
5516 return x;
5517 break;
5518
5519 case ASHIFT:
5520 case LSHIFT:
5521 /* These are also multiplies, so they distribute over everything. */
5522 break;
5523
5524 case SUBREG:
5525 /* Non-paradoxical SUBREGs distributes over all operations, provided
5526 the inner modes and word numbers are the same, this is an extraction
5527 of a low-order part, we don't convert an fp operation to int or
5528 vice versa, and we would not be converting a single-word
5529 operation into a multi-word operation. The latter test is not
5530 required, but it prevents generating unneeded multi-word operations.
5531 Some of the previous tests are redundant given the latter test, but
5532 are retained because they are required for correctness.
5533
5534 We produce the result slightly differently in this case. */
5535
5536 if (GET_MODE (SUBREG_REG (lhs)) != GET_MODE (SUBREG_REG (rhs))
5537 || SUBREG_WORD (lhs) != SUBREG_WORD (rhs)
5538 || ! subreg_lowpart_p (lhs)
5539 || (GET_MODE_CLASS (GET_MODE (lhs))
5540 != GET_MODE_CLASS (GET_MODE (SUBREG_REG (lhs))))
5541 || (GET_MODE_SIZE (GET_MODE (lhs))
5542 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))))
5543 || GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))) > UNITS_PER_WORD)
5544 return x;
5545
5546 tem = gen_binary (code, GET_MODE (SUBREG_REG (lhs)),
5547 SUBREG_REG (lhs), SUBREG_REG (rhs));
5548 return gen_lowpart_for_combine (GET_MODE (x), tem);
5549
5550 default:
5551 return x;
5552 }
5553
5554 /* Set LHS and RHS to the inner operands (A and B in the example
5555 above) and set OTHER to the common operand (C in the example).
5556 These is only one way to do this unless the inner operation is
5557 commutative. */
5558 if (GET_RTX_CLASS (inner_code) == 'c'
5559 && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 0)))
5560 other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 1);
5561 else if (GET_RTX_CLASS (inner_code) == 'c'
5562 && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 1)))
5563 other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 0);
5564 else if (GET_RTX_CLASS (inner_code) == 'c'
5565 && rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 0)))
5566 other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 1);
5567 else if (rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 1)))
5568 other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 0);
5569 else
5570 return x;
5571
5572 /* Form the new inner operation, seeing if it simplifies first. */
5573 tem = gen_binary (code, GET_MODE (x), lhs, rhs);
5574
5575 /* There is one exception to the general way of distributing:
5576 (a ^ b) | (a ^ c) -> (~a) & (b ^ c) */
5577 if (code == XOR && inner_code == IOR)
5578 {
5579 inner_code = AND;
5580 other = gen_unary (NOT, GET_MODE (x), other);
5581 }
5582
5583 /* We may be able to continuing distributing the result, so call
5584 ourselves recursively on the inner operation before forming the
5585 outer operation, which we return. */
5586 return gen_binary (inner_code, GET_MODE (x),
5587 apply_distributive_law (tem), other);
5588 }
5589 \f
5590 /* We have X, a logical `and' of VAROP with the constant CONSTOP, to be done
5591 in MODE.
5592
5593 Return an equivalent form, if different from X. Otherwise, return X. If
5594 X is zero, we are to always construct the equivalent form. */
5595
5596 static rtx
5597 simplify_and_const_int (x, mode, varop, constop)
5598 rtx x;
5599 enum machine_mode mode;
5600 rtx varop;
5601 unsigned HOST_WIDE_INT constop;
5602 {
5603 register enum machine_mode tmode;
5604 register rtx temp;
5605 unsigned HOST_WIDE_INT significant;
5606
5607 /* There is a large class of optimizations based on the principle that
5608 some operations produce results where certain bits are known to be zero,
5609 and hence are not significant to the AND. For example, if we have just
5610 done a left shift of one bit, the low-order bit is known to be zero and
5611 hence an AND with a mask of ~1 would not do anything.
5612
5613 At the end of the following loop, we set:
5614
5615 VAROP to be the item to be AND'ed with;
5616 CONSTOP to the constant value to AND it with. */
5617
5618 while (1)
5619 {
5620 /* If we ever encounter a mode wider than the host machine's widest
5621 integer size, we can't compute the masks accurately, so give up. */
5622 if (GET_MODE_BITSIZE (GET_MODE (varop)) > HOST_BITS_PER_WIDE_INT)
5623 break;
5624
5625 /* Unless one of the cases below does a `continue',
5626 a `break' will be executed to exit the loop. */
5627
5628 switch (GET_CODE (varop))
5629 {
5630 case CLOBBER:
5631 /* If VAROP is a (clobber (const_int)), return it since we know
5632 we are generating something that won't match. */
5633 return varop;
5634
5635 #if ! BITS_BIG_ENDIAN
5636 case USE:
5637 /* VAROP is a (use (mem ..)) that was made from a bit-field
5638 extraction that spanned the boundary of the MEM. If we are
5639 now masking so it is within that boundary, we don't need the
5640 USE any more. */
5641 if ((constop & ~ GET_MODE_MASK (GET_MODE (XEXP (varop, 0)))) == 0)
5642 {
5643 varop = XEXP (varop, 0);
5644 continue;
5645 }
5646 break;
5647 #endif
5648
5649 case SUBREG:
5650 if (subreg_lowpart_p (varop)
5651 /* We can ignore the effect this SUBREG if it narrows the mode
5652 or, on machines where byte operations extend, if the
5653 constant masks to zero all the bits the mode doesn't have. */
5654 && ((GET_MODE_SIZE (GET_MODE (varop))
5655 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop))))
5656 #if defined(BYTE_LOADS_ZERO_EXTEND) || defined(BYTE_LOADS_SIGN_EXTEND)
5657 || (0 == (constop
5658 & GET_MODE_MASK (GET_MODE (varop))
5659 & ~ GET_MODE_MASK (GET_MODE (SUBREG_REG (varop)))))
5660 #endif
5661 ))
5662 {
5663 varop = SUBREG_REG (varop);
5664 continue;
5665 }
5666 break;
5667
5668 case ZERO_EXTRACT:
5669 case SIGN_EXTRACT:
5670 case ZERO_EXTEND:
5671 case SIGN_EXTEND:
5672 /* Try to expand these into a series of shifts and then work
5673 with that result. If we can't, for example, if the extract
5674 isn't at a fixed position, give up. */
5675 temp = expand_compound_operation (varop);
5676 if (temp != varop)
5677 {
5678 varop = temp;
5679 continue;
5680 }
5681 break;
5682
5683 case AND:
5684 if (GET_CODE (XEXP (varop, 1)) == CONST_INT)
5685 {
5686 constop &= INTVAL (XEXP (varop, 1));
5687 varop = XEXP (varop, 0);
5688 continue;
5689 }
5690 break;
5691
5692 case IOR:
5693 case XOR:
5694 /* If VAROP is (ior (lshiftrt FOO C1) C2), try to commute the IOR and
5695 LSHIFT so we end up with an (and (lshiftrt (ior ...) ...) ...)
5696 operation which may be a bitfield extraction. */
5697
5698 if (GET_CODE (XEXP (varop, 0)) == LSHIFTRT
5699 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
5700 && INTVAL (XEXP (XEXP (varop, 0), 1)) >= 0
5701 && INTVAL (XEXP (XEXP (varop, 0), 1)) < HOST_BITS_PER_WIDE_INT
5702 && GET_CODE (XEXP (varop, 1)) == CONST_INT
5703 && (INTVAL (XEXP (varop, 1))
5704 & ~ significant_bits (XEXP (varop, 0),
5705 GET_MODE (varop)) == 0))
5706 {
5707 temp = GEN_INT ((INTVAL (XEXP (varop, 1)) & constop)
5708 << INTVAL (XEXP (XEXP (varop, 0), 1)));
5709 temp = gen_binary (GET_CODE (varop), GET_MODE (varop),
5710 XEXP (XEXP (varop, 0), 0), temp);
5711 varop = gen_rtx_combine (LSHIFTRT, GET_MODE (varop),
5712 temp, XEXP (varop, 1));
5713 continue;
5714 }
5715
5716 /* Apply the AND to both branches of the IOR or XOR, then try to
5717 apply the distributive law. This may eliminate operations
5718 if either branch can be simplified because of the AND.
5719 It may also make some cases more complex, but those cases
5720 probably won't match a pattern either with or without this. */
5721 return
5722 gen_lowpart_for_combine
5723 (mode, apply_distributive_law
5724 (gen_rtx_combine
5725 (GET_CODE (varop), GET_MODE (varop),
5726 simplify_and_const_int (NULL_RTX, GET_MODE (varop),
5727 XEXP (varop, 0), constop),
5728 simplify_and_const_int (NULL_RTX, GET_MODE (varop),
5729 XEXP (varop, 1), constop))));
5730
5731 case NOT:
5732 /* (and (not FOO)) is (and (xor FOO CONST_OP)) so if FOO is an
5733 LSHIFTRT we can do the same as above. */
5734
5735 if (GET_CODE (XEXP (varop, 0)) == LSHIFTRT
5736 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
5737 && INTVAL (XEXP (XEXP (varop, 0), 1)) >= 0
5738 && INTVAL (XEXP (XEXP (varop, 0), 1)) < HOST_BITS_PER_WIDE_INT)
5739 {
5740 temp = GEN_INT (constop << INTVAL (XEXP (XEXP (varop, 0), 1)));
5741 temp = gen_binary (XOR, GET_MODE (varop),
5742 XEXP (XEXP (varop, 0), 0), temp);
5743 varop = gen_rtx_combine (LSHIFTRT, GET_MODE (varop),
5744 temp, XEXP (XEXP (varop, 0), 1));
5745 continue;
5746 }
5747 break;
5748
5749 case ASHIFTRT:
5750 /* If we are just looking for the sign bit, we don't need this
5751 shift at all, even if it has a variable count. */
5752 if (constop == ((HOST_WIDE_INT) 1
5753 << (GET_MODE_BITSIZE (GET_MODE (varop)) - 1)))
5754 {
5755 varop = XEXP (varop, 0);
5756 continue;
5757 }
5758
5759 /* If this is a shift by a constant, get a mask that contains
5760 those bits that are not copies of the sign bit. We then have
5761 two cases: If CONSTOP only includes those bits, this can be
5762 a logical shift, which may allow simplifications. If CONSTOP
5763 is a single-bit field not within those bits, we are requesting
5764 a copy of the sign bit and hence can shift the sign bit to
5765 the appropriate location. */
5766 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
5767 && INTVAL (XEXP (varop, 1)) >= 0
5768 && INTVAL (XEXP (varop, 1)) < HOST_BITS_PER_WIDE_INT)
5769 {
5770 int i = -1;
5771
5772 significant = GET_MODE_MASK (GET_MODE (varop));
5773 significant >>= INTVAL (XEXP (varop, 1));
5774
5775 if ((constop & ~significant) == 0
5776 || (i = exact_log2 (constop)) >= 0)
5777 {
5778 varop = simplify_shift_const
5779 (varop, LSHIFTRT, GET_MODE (varop), XEXP (varop, 0),
5780 i < 0 ? INTVAL (XEXP (varop, 1))
5781 : GET_MODE_BITSIZE (GET_MODE (varop)) - 1 - i);
5782 if (GET_CODE (varop) != ASHIFTRT)
5783 continue;
5784 }
5785 }
5786
5787 /* If our mask is 1, convert this to a LSHIFTRT. This can be done
5788 even if the shift count isn't a constant. */
5789 if (constop == 1)
5790 varop = gen_rtx_combine (LSHIFTRT, GET_MODE (varop),
5791 XEXP (varop, 0), XEXP (varop, 1));
5792 break;
5793
5794 case NE:
5795 /* (and (ne FOO 0) CONST) can be (and FOO CONST) if CONST is
5796 included in STORE_FLAG_VALUE and FOO has no significant bits
5797 not in CONST. */
5798 if ((constop & ~ STORE_FLAG_VALUE) == 0
5799 && XEXP (varop, 0) == const0_rtx
5800 && (significant_bits (XEXP (varop, 0), mode) & ~ constop) == 0)
5801 {
5802 varop = XEXP (varop, 0);
5803 continue;
5804 }
5805 break;
5806
5807 case PLUS:
5808 /* In (and (plus FOO C1) M), if M is a mask that just turns off
5809 low-order bits (as in an alignment operation) and FOO is already
5810 aligned to that boundary, we can convert remove this AND
5811 and possibly the PLUS if it is now adding zero. */
5812 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
5813 && exact_log2 (-constop) >= 0
5814 && (significant_bits (XEXP (varop, 0), mode) & ~ constop) == 0)
5815 {
5816 varop = plus_constant (XEXP (varop, 0),
5817 INTVAL (XEXP (varop, 1)) & constop);
5818 constop = ~0;
5819 break;
5820 }
5821
5822 /* ... fall through ... */
5823
5824 case MINUS:
5825 /* In (and (plus (and FOO M1) BAR) M2), if M1 and M2 are one
5826 less than powers of two and M2 is narrower than M1, we can
5827 eliminate the inner AND. This occurs when incrementing
5828 bit fields. */
5829
5830 if (GET_CODE (XEXP (varop, 0)) == ZERO_EXTRACT
5831 || GET_CODE (XEXP (varop, 0)) == ZERO_EXTEND)
5832 SUBST (XEXP (varop, 0),
5833 expand_compound_operation (XEXP (varop, 0)));
5834
5835 if (GET_CODE (XEXP (varop, 0)) == AND
5836 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
5837 && exact_log2 (constop + 1) >= 0
5838 && exact_log2 (INTVAL (XEXP (XEXP (varop, 0), 1)) + 1) >= 0
5839 && (~ INTVAL (XEXP (XEXP (varop, 0), 1)) & constop) == 0)
5840 SUBST (XEXP (varop, 0), XEXP (XEXP (varop, 0), 0));
5841 break;
5842 }
5843
5844 break;
5845 }
5846
5847 /* If we have reached a constant, this whole thing is constant. */
5848 if (GET_CODE (varop) == CONST_INT)
5849 return GEN_INT (constop & INTVAL (varop));
5850
5851 /* See what bits are significant in VAROP. */
5852 significant = significant_bits (varop, mode);
5853
5854 /* Turn off all bits in the constant that are known to already be zero.
5855 Thus, if the AND isn't needed at all, we will have CONSTOP == SIGNIFICANT
5856 which is tested below. */
5857
5858 constop &= significant;
5859
5860 /* If we don't have any bits left, return zero. */
5861 if (constop == 0)
5862 return const0_rtx;
5863
5864 /* Get VAROP in MODE. Try to get a SUBREG if not. Don't make a new SUBREG
5865 if we already had one (just check for the simplest cases). */
5866 if (x && GET_CODE (XEXP (x, 0)) == SUBREG
5867 && GET_MODE (XEXP (x, 0)) == mode
5868 && SUBREG_REG (XEXP (x, 0)) == varop)
5869 varop = XEXP (x, 0);
5870 else
5871 varop = gen_lowpart_for_combine (mode, varop);
5872
5873 /* If we can't make the SUBREG, try to return what we were given. */
5874 if (GET_CODE (varop) == CLOBBER)
5875 return x ? x : varop;
5876
5877 /* If we are only masking insignificant bits, return VAROP. */
5878 if (constop == significant)
5879 x = varop;
5880
5881 /* Otherwise, return an AND. See how much, if any, of X we can use. */
5882 else if (x == 0 || GET_CODE (x) != AND || GET_MODE (x) != mode)
5883 x = gen_rtx_combine (AND, mode, varop, GEN_INT (constop));
5884
5885 else
5886 {
5887 if (GET_CODE (XEXP (x, 1)) != CONST_INT
5888 || INTVAL (XEXP (x, 1)) != constop)
5889 SUBST (XEXP (x, 1), GEN_INT (constop));
5890
5891 SUBST (XEXP (x, 0), varop);
5892 }
5893
5894 return x;
5895 }
5896 \f
5897 /* Given an expression, X, compute which bits in X can be non-zero.
5898 We don't care about bits outside of those defined in MODE.
5899
5900 For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
5901 a shift, AND, or zero_extract, we can do better. */
5902
5903 static unsigned HOST_WIDE_INT
5904 significant_bits (x, mode)
5905 rtx x;
5906 enum machine_mode mode;
5907 {
5908 unsigned HOST_WIDE_INT significant = GET_MODE_MASK (mode);
5909 unsigned HOST_WIDE_INT inner_sig;
5910 enum rtx_code code;
5911 int mode_width = GET_MODE_BITSIZE (mode);
5912 rtx tem;
5913
5914 /* If X is wider than MODE, use its mode instead. */
5915 if (GET_MODE_BITSIZE (GET_MODE (x)) > mode_width)
5916 {
5917 mode = GET_MODE (x);
5918 significant = GET_MODE_MASK (mode);
5919 mode_width = GET_MODE_BITSIZE (mode);
5920 }
5921
5922 if (mode_width > HOST_BITS_PER_WIDE_INT)
5923 /* Our only callers in this case look for single bit values. So
5924 just return the mode mask. Those tests will then be false. */
5925 return significant;
5926
5927 code = GET_CODE (x);
5928 switch (code)
5929 {
5930 case REG:
5931 #ifdef STACK_BOUNDARY
5932 /* If this is the stack pointer, we may know something about its
5933 alignment. If PUSH_ROUNDING is defined, it is possible for the
5934 stack to be momentarily aligned only to that amount, so we pick
5935 the least alignment. */
5936
5937 if (x == stack_pointer_rtx)
5938 {
5939 int sp_alignment = STACK_BOUNDARY / BITS_PER_UNIT;
5940
5941 #ifdef PUSH_ROUNDING
5942 sp_alignment = MIN (PUSH_ROUNDING (1), sp_alignment);
5943 #endif
5944
5945 return significant & ~ (sp_alignment - 1);
5946 }
5947 #endif
5948
5949 /* If X is a register whose value we can find, use that value.
5950 Otherwise, use the previously-computed significant bits for this
5951 register. */
5952
5953 tem = get_last_value (x);
5954 if (tem)
5955 return significant_bits (tem, mode);
5956 else if (significant_valid && reg_significant[REGNO (x)])
5957 return reg_significant[REGNO (x)] & significant;
5958 else
5959 return significant;
5960
5961 case CONST_INT:
5962 return INTVAL (x);
5963
5964 #ifdef BYTE_LOADS_ZERO_EXTEND
5965 case MEM:
5966 /* In many, if not most, RISC machines, reading a byte from memory
5967 zeros the rest of the register. Noticing that fact saves a lot
5968 of extra zero-extends. */
5969 significant &= GET_MODE_MASK (GET_MODE (x));
5970 break;
5971 #endif
5972
5973 #if STORE_FLAG_VALUE == 1
5974 case EQ: case NE:
5975 case GT: case GTU:
5976 case LT: case LTU:
5977 case GE: case GEU:
5978 case LE: case LEU:
5979
5980 if (GET_MODE_CLASS (mode) == MODE_INT)
5981 significant = 1;
5982
5983 /* A comparison operation only sets the bits given by its mode. The
5984 rest are set undefined. */
5985 if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
5986 significant |= (GET_MODE_MASK (mode) & ~ GET_MODE_MASK (GET_MODE (x)));
5987 break;
5988 #endif
5989
5990 case NEG:
5991 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
5992 == GET_MODE_BITSIZE (GET_MODE (x)))
5993 significant = 1;
5994
5995 if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
5996 significant |= (GET_MODE_MASK (mode) & ~ GET_MODE_MASK (GET_MODE (x)));
5997 break;
5998
5999 case ABS:
6000 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
6001 == GET_MODE_BITSIZE (GET_MODE (x)))
6002 significant = 1;
6003 break;
6004
6005 case TRUNCATE:
6006 significant &= (significant_bits (XEXP (x, 0), mode)
6007 & GET_MODE_MASK (mode));
6008 break;
6009
6010 case ZERO_EXTEND:
6011 significant &= significant_bits (XEXP (x, 0), mode);
6012 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
6013 significant &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
6014 break;
6015
6016 case SIGN_EXTEND:
6017 /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
6018 Otherwise, show all the bits in the outer mode but not the inner
6019 may be non-zero. */
6020 inner_sig = significant_bits (XEXP (x, 0), mode);
6021 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
6022 {
6023 inner_sig &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
6024 if (inner_sig &
6025 (((HOST_WIDE_INT) 1
6026 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1))))
6027 inner_sig |= (GET_MODE_MASK (mode)
6028 & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
6029 }
6030
6031 significant &= inner_sig;
6032 break;
6033
6034 case AND:
6035 significant &= (significant_bits (XEXP (x, 0), mode)
6036 & significant_bits (XEXP (x, 1), mode));
6037 break;
6038
6039 case XOR: case IOR:
6040 case UMIN: case UMAX: case SMIN: case SMAX:
6041 significant &= (significant_bits (XEXP (x, 0), mode)
6042 | significant_bits (XEXP (x, 1), mode));
6043 break;
6044
6045 case PLUS: case MINUS:
6046 case MULT:
6047 case DIV: case UDIV:
6048 case MOD: case UMOD:
6049 /* We can apply the rules of arithmetic to compute the number of
6050 high- and low-order zero bits of these operations. We start by
6051 computing the width (position of the highest-order non-zero bit)
6052 and the number of low-order zero bits for each value. */
6053 {
6054 unsigned HOST_WIDE_INT sig0 = significant_bits (XEXP (x, 0), mode);
6055 unsigned HOST_WIDE_INT sig1 = significant_bits (XEXP (x, 1), mode);
6056 int width0 = floor_log2 (sig0) + 1;
6057 int width1 = floor_log2 (sig1) + 1;
6058 int low0 = floor_log2 (sig0 & -sig0);
6059 int low1 = floor_log2 (sig1 & -sig1);
6060 int op0_maybe_minusp = (sig0 & (1 << (mode_width - 1)));
6061 int op1_maybe_minusp = (sig1 & (1 << (mode_width - 1)));
6062 int result_width = mode_width;
6063 int result_low = 0;
6064
6065 switch (code)
6066 {
6067 case PLUS:
6068 result_width = MAX (width0, width1) + 1;
6069 result_low = MIN (low0, low1);
6070 break;
6071 case MINUS:
6072 result_low = MIN (low0, low1);
6073 break;
6074 case MULT:
6075 result_width = width0 + width1;
6076 result_low = low0 + low1;
6077 break;
6078 case DIV:
6079 if (! op0_maybe_minusp && ! op1_maybe_minusp)
6080 result_width = width0;
6081 break;
6082 case UDIV:
6083 result_width = width0;
6084 break;
6085 case MOD:
6086 if (! op0_maybe_minusp && ! op1_maybe_minusp)
6087 result_width = MIN (width0, width1);
6088 result_low = MIN (low0, low1);
6089 break;
6090 case UMOD:
6091 result_width = MIN (width0, width1);
6092 result_low = MIN (low0, low1);
6093 break;
6094 }
6095
6096 if (result_width < mode_width)
6097 significant &= ((HOST_WIDE_INT) 1 << result_width) - 1;
6098
6099 if (result_low > 0)
6100 significant &= ~ (((HOST_WIDE_INT) 1 << result_low) - 1);
6101 }
6102 break;
6103
6104 case ZERO_EXTRACT:
6105 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6106 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
6107 significant &= ((HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
6108 break;
6109
6110 case SUBREG:
6111 /* If this is a SUBREG formed for a promoted variable that has
6112 been zero-extended, we know that at least the high-order bits
6113 are zero, though others might be too. */
6114
6115 if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x))
6116 significant = (GET_MODE_MASK (GET_MODE (x))
6117 & significant_bits (SUBREG_REG (x), GET_MODE (x)));
6118
6119 /* If the inner mode is a single word for both the host and target
6120 machines, we can compute this from which bits of the inner
6121 object are known significant. */
6122 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) <= BITS_PER_WORD
6123 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
6124 <= HOST_BITS_PER_WIDE_INT))
6125 {
6126 significant &= significant_bits (SUBREG_REG (x), mode);
6127 #if ! defined(BYTE_LOADS_ZERO_EXTEND) && ! defined(BYTE_LOADS_SIGN_EXTEND)
6128 /* On many CISC machines, accessing an object in a wider mode
6129 causes the high-order bits to become undefined. So they are
6130 not known to be zero. */
6131 if (GET_MODE_SIZE (GET_MODE (x))
6132 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
6133 significant |= (GET_MODE_MASK (GET_MODE (x))
6134 & ~ GET_MODE_MASK (GET_MODE (SUBREG_REG (x))));
6135 #endif
6136 }
6137 break;
6138
6139 case ASHIFTRT:
6140 case LSHIFTRT:
6141 case ASHIFT:
6142 case LSHIFT:
6143 case ROTATE:
6144 /* The significant bits are in two classes: any bits within MODE
6145 that aren't in GET_MODE (x) are always significant. The rest of the
6146 significant bits are those that are significant in the operand of
6147 the shift when shifted the appropriate number of bits. This
6148 shows that high-order bits are cleared by the right shift and
6149 low-order bits by left shifts. */
6150 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6151 && INTVAL (XEXP (x, 1)) >= 0
6152 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
6153 {
6154 enum machine_mode inner_mode = GET_MODE (x);
6155 int width = GET_MODE_BITSIZE (inner_mode);
6156 int count = INTVAL (XEXP (x, 1));
6157 unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
6158 unsigned HOST_WIDE_INT op_significant
6159 = significant_bits (XEXP (x, 0), mode);
6160 unsigned HOST_WIDE_INT inner = op_significant & mode_mask;
6161 unsigned HOST_WIDE_INT outer = 0;
6162
6163 if (mode_width > width)
6164 outer = (op_significant & significant & ~ mode_mask);
6165
6166 if (code == LSHIFTRT)
6167 inner >>= count;
6168 else if (code == ASHIFTRT)
6169 {
6170 inner >>= count;
6171
6172 /* If the sign bit was significant at before the shift, we
6173 need to mark all the places it could have been copied to
6174 by the shift significant. */
6175 if (inner & ((HOST_WIDE_INT) 1 << (width - 1 - count)))
6176 inner |= (((HOST_WIDE_INT) 1 << count) - 1) << (width - count);
6177 }
6178 else if (code == LSHIFT || code == ASHIFT)
6179 inner <<= count;
6180 else
6181 inner = ((inner << (count % width)
6182 | (inner >> (width - (count % width)))) & mode_mask);
6183
6184 significant &= (outer | inner);
6185 }
6186 break;
6187
6188 case FFS:
6189 /* This is at most the number of bits in the mode. */
6190 significant = ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width) + 1)) - 1;
6191 break;
6192
6193 case IF_THEN_ELSE:
6194 significant &= (significant_bits (XEXP (x, 1), mode)
6195 | significant_bits (XEXP (x, 2), mode));
6196 break;
6197 }
6198
6199 return significant;
6200 }
6201 \f
6202 /* Return the number of bits at the high-order end of X that are known to
6203 be equal to the sign bit. This number will always be between 1 and
6204 the number of bits in the mode of X. MODE is the mode to be used
6205 if X is VOIDmode. */
6206
6207 static int
6208 num_sign_bit_copies (x, mode)
6209 rtx x;
6210 enum machine_mode mode;
6211 {
6212 enum rtx_code code = GET_CODE (x);
6213 int bitwidth;
6214 int num0, num1, result;
6215 unsigned HOST_WIDE_INT sig;
6216 rtx tem;
6217
6218 /* If we weren't given a mode, use the mode of X. If the mode is still
6219 VOIDmode, we don't know anything. */
6220
6221 if (mode == VOIDmode)
6222 mode = GET_MODE (x);
6223
6224 if (mode == VOIDmode)
6225 return 0;
6226
6227 bitwidth = GET_MODE_BITSIZE (mode);
6228
6229 switch (code)
6230 {
6231 case REG:
6232 if (significant_valid && reg_sign_bit_copies[REGNO (x)] != 0)
6233 return reg_sign_bit_copies[REGNO (x)];
6234
6235 tem = get_last_value (x);
6236 if (tem != 0)
6237 return num_sign_bit_copies (tem, mode);
6238 break;
6239
6240 #ifdef BYTE_LOADS_SIGN_EXTEND
6241 case MEM:
6242 /* Some RISC machines sign-extend all loads of smaller than a word. */
6243 return MAX (1, bitwidth - GET_MODE_BITSIZE (GET_MODE (x)) + 1);
6244 #endif
6245
6246 case CONST_INT:
6247 /* If the constant is negative, take its 1's complement and remask.
6248 Then see how many zero bits we have. */
6249 sig = INTVAL (x) & GET_MODE_MASK (mode);
6250 if (sig & ((HOST_WIDE_INT) 1 << (bitwidth - 1)))
6251 sig = (~ sig) & GET_MODE_MASK (mode);
6252
6253 return (sig == 0 ? bitwidth : bitwidth - floor_log2 (sig) - 1);
6254
6255 case SUBREG:
6256 /* If this is a SUBREG for a promoted object that is sign-extended
6257 and we are looking at it in a wider mode, we know that at least the
6258 high-order bits are known to be sign bit copies. */
6259
6260 if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x))
6261 return (GET_MODE_BITSIZE (mode) - GET_MODE_BITSIZE (GET_MODE (x))
6262 + num_sign_bit_copies (SUBREG_REG (x), GET_MODE (x)));
6263
6264 /* For a smaller object, just ignore the high bits. */
6265 if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
6266 {
6267 num0 = num_sign_bit_copies (SUBREG_REG (x), VOIDmode);
6268 return MAX (1, (num0
6269 - (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
6270 - bitwidth)));
6271 }
6272
6273 #if defined(BYTE_LOADS_ZERO_EXTEND) || defined(BYTE_LOADS_SIGN_EXTEND)
6274 /* For paradoxical SUBREGs, just look inside since, on machines with
6275 one of these defined, we assume that operations are actually
6276 performed on the full register. Note that we are passing MODE
6277 to the recursive call, so the number of sign bit copies will
6278 remain relative to that mode, not the inner mode. */
6279
6280 if (GET_MODE_SIZE (GET_MODE (x))
6281 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
6282 return num_sign_bit_copies (SUBREG_REG (x), mode);
6283 #endif
6284
6285 break;
6286
6287 case SIGN_EXTRACT:
6288 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
6289 return MAX (1, bitwidth - INTVAL (XEXP (x, 1)));
6290 break;
6291
6292 case SIGN_EXTEND:
6293 return (bitwidth - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
6294 + num_sign_bit_copies (XEXP (x, 0), VOIDmode));
6295
6296 case TRUNCATE:
6297 /* For a smaller object, just ignore the high bits. */
6298 num0 = num_sign_bit_copies (XEXP (x, 0), VOIDmode);
6299 return MAX (1, (num0 - (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
6300 - bitwidth)));
6301
6302 case NOT:
6303 return num_sign_bit_copies (XEXP (x, 0), mode);
6304
6305 case ROTATE: case ROTATERT:
6306 /* If we are rotating left by a number of bits less than the number
6307 of sign bit copies, we can just subtract that amount from the
6308 number. */
6309 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6310 && INTVAL (XEXP (x, 1)) >= 0 && INTVAL (XEXP (x, 1)) < bitwidth)
6311 {
6312 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
6313 return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
6314 : bitwidth - INTVAL (XEXP (x, 1))));
6315 }
6316 break;
6317
6318 case NEG:
6319 /* In general, this subtracts one sign bit copy. But if the value
6320 is known to be positive, the number of sign bit copies is the
6321 same as that of the input. Finally, if the input has just one
6322 significant bit, all the bits are copies of the sign bit. */
6323 sig = significant_bits (XEXP (x, 0), mode);
6324 if (sig == 1)
6325 return bitwidth;
6326
6327 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
6328 if (num0 > 1
6329 && (((HOST_WIDE_INT) 1 << (bitwidth - 1)) & sig))
6330 num0--;
6331
6332 return num0;
6333
6334 case IOR: case AND: case XOR:
6335 case SMIN: case SMAX: case UMIN: case UMAX:
6336 /* Logical operations will preserve the number of sign-bit copies.
6337 MIN and MAX operations always return one of the operands. */
6338 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
6339 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
6340 return MIN (num0, num1);
6341
6342 case PLUS: case MINUS:
6343 /* For addition and subtraction, we can have a 1-bit carry. However,
6344 if we are subtracting 1 from a positive number, there will not
6345 be such a carry. Furthermore, if the positive number is known to
6346 be 0 or 1, we know the result is either -1 or 0. */
6347
6348 if (code == PLUS && XEXP (x, 1) == constm1_rtx
6349 /* Don't do this if XEXP (x, 0) is a paradoxical subreg
6350 because in principle we don't know what the high bits are. */
6351 && !(GET_CODE (XEXP (x, 0)) == SUBREG
6352 && (GET_MODE_SIZE (GET_MODE (XEXP (XEXP (x, 0), 0)))
6353 < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))))
6354 {
6355 sig = significant_bits (XEXP (x, 0), mode);
6356 if ((((HOST_WIDE_INT) 1 << (bitwidth - 1)) & sig) == 0)
6357 return (sig == 1 || sig == 0 ? bitwidth
6358 : bitwidth - floor_log2 (sig) - 1);
6359 }
6360
6361 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
6362 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
6363 return MAX (1, MIN (num0, num1) - 1);
6364
6365 case MULT:
6366 /* The number of bits of the product is the sum of the number of
6367 bits of both terms. However, unless one of the terms if known
6368 to be positive, we must allow for an additional bit since negating
6369 a negative number can remove one sign bit copy. */
6370
6371 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
6372 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
6373
6374 result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
6375 if (result > 0
6376 && ((significant_bits (XEXP (x, 0), mode)
6377 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
6378 && (significant_bits (XEXP (x, 1), mode)
6379 & ((HOST_WIDE_INT) 1 << (bitwidth - 1)) != 0))
6380 result--;
6381
6382 return MAX (1, result);
6383
6384 case UDIV:
6385 /* The result must be <= the first operand. */
6386 return num_sign_bit_copies (XEXP (x, 0), mode);
6387
6388 case UMOD:
6389 /* The result must be <= the scond operand. */
6390 return num_sign_bit_copies (XEXP (x, 1), mode);
6391
6392 case DIV:
6393 /* Similar to unsigned division, except that we have to worry about
6394 the case where the divisor is negative, in which case we have
6395 to add 1. */
6396 result = num_sign_bit_copies (XEXP (x, 0), mode);
6397 if (result > 1
6398 && (significant_bits (XEXP (x, 1), mode)
6399 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
6400 result --;
6401
6402 return result;
6403
6404 case MOD:
6405 result = num_sign_bit_copies (XEXP (x, 1), mode);
6406 if (result > 1
6407 && (significant_bits (XEXP (x, 1), mode)
6408 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
6409 result --;
6410
6411 return result;
6412
6413 case ASHIFTRT:
6414 /* Shifts by a constant add to the number of bits equal to the
6415 sign bit. */
6416 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
6417 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6418 && INTVAL (XEXP (x, 1)) > 0)
6419 num0 = MIN (bitwidth, num0 + INTVAL (XEXP (x, 1)));
6420
6421 return num0;
6422
6423 case ASHIFT:
6424 case LSHIFT:
6425 /* Left shifts destroy copies. */
6426 if (GET_CODE (XEXP (x, 1)) != CONST_INT
6427 || INTVAL (XEXP (x, 1)) < 0
6428 || INTVAL (XEXP (x, 1)) >= bitwidth)
6429 return 1;
6430
6431 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
6432 return MAX (1, num0 - INTVAL (XEXP (x, 1)));
6433
6434 case IF_THEN_ELSE:
6435 num0 = num_sign_bit_copies (XEXP (x, 1), mode);
6436 num1 = num_sign_bit_copies (XEXP (x, 2), mode);
6437 return MIN (num0, num1);
6438
6439 #if STORE_FLAG_VALUE == -1
6440 case EQ: case NE: case GE: case GT: case LE: case LT:
6441 case GEU: case GTU: case LEU: case LTU:
6442 return bitwidth;
6443 #endif
6444 }
6445
6446 /* If we haven't been able to figure it out by one of the above rules,
6447 see if some of the high-order bits are known to be zero. If so,
6448 count those bits and return one less than that amount. */
6449
6450 sig = significant_bits (x, mode);
6451 return sig == GET_MODE_MASK (mode) ? 1 : bitwidth - floor_log2 (sig) - 1;
6452 }
6453 \f
6454 /* Return the number of "extended" bits there are in X, when interpreted
6455 as a quantity in MODE whose signedness is indicated by UNSIGNEDP. For
6456 unsigned quantities, this is the number of high-order zero bits.
6457 For signed quantities, this is the number of copies of the sign bit
6458 minus 1. In both case, this function returns the number of "spare"
6459 bits. For example, if two quantities for which this function returns
6460 at least 1 are added, the addition is known not to overflow.
6461
6462 This function will always return 0 unless called during combine, which
6463 implies that it must be called from a define_split. */
6464
6465 int
6466 extended_count (x, mode, unsignedp)
6467 rtx x;
6468 enum machine_mode mode;
6469 int unsignedp;
6470 {
6471 if (significant_valid == 0)
6472 return 0;
6473
6474 return (unsignedp
6475 ? (GET_MODE_BITSIZE (mode) - 1
6476 - floor_log2 (significant_bits (x, mode)))
6477 : num_sign_bit_copies (x, mode) - 1);
6478 }
6479 \f
6480 /* This function is called from `simplify_shift_const' to merge two
6481 outer operations. Specifically, we have already found that we need
6482 to perform operation *POP0 with constant *PCONST0 at the outermost
6483 position. We would now like to also perform OP1 with constant CONST1
6484 (with *POP0 being done last).
6485
6486 Return 1 if we can do the operation and update *POP0 and *PCONST0 with
6487 the resulting operation. *PCOMP_P is set to 1 if we would need to
6488 complement the innermost operand, otherwise it is unchanged.
6489
6490 MODE is the mode in which the operation will be done. No bits outside
6491 the width of this mode matter. It is assumed that the width of this mode
6492 is smaller than or equal to HOST_BITS_PER_WIDE_INT.
6493
6494 If *POP0 or OP1 are NIL, it means no operation is required. Only NEG, PLUS,
6495 IOR, XOR, and AND are supported. We may set *POP0 to SET if the proper
6496 result is simply *PCONST0.
6497
6498 If the resulting operation cannot be expressed as one operation, we
6499 return 0 and do not change *POP0, *PCONST0, and *PCOMP_P. */
6500
6501 static int
6502 merge_outer_ops (pop0, pconst0, op1, const1, mode, pcomp_p)
6503 enum rtx_code *pop0;
6504 HOST_WIDE_INT *pconst0;
6505 enum rtx_code op1;
6506 HOST_WIDE_INT const1;
6507 enum machine_mode mode;
6508 int *pcomp_p;
6509 {
6510 enum rtx_code op0 = *pop0;
6511 HOST_WIDE_INT const0 = *pconst0;
6512
6513 const0 &= GET_MODE_MASK (mode);
6514 const1 &= GET_MODE_MASK (mode);
6515
6516 /* If OP0 is an AND, clear unimportant bits in CONST1. */
6517 if (op0 == AND)
6518 const1 &= const0;
6519
6520 /* If OP0 or OP1 is NIL, this is easy. Similarly if they are the same or
6521 if OP0 is SET. */
6522
6523 if (op1 == NIL || op0 == SET)
6524 return 1;
6525
6526 else if (op0 == NIL)
6527 op0 = op1, const0 = const1;
6528
6529 else if (op0 == op1)
6530 {
6531 switch (op0)
6532 {
6533 case AND:
6534 const0 &= const1;
6535 break;
6536 case IOR:
6537 const0 |= const1;
6538 break;
6539 case XOR:
6540 const0 ^= const1;
6541 break;
6542 case PLUS:
6543 const0 += const1;
6544 break;
6545 case NEG:
6546 op0 = NIL;
6547 break;
6548 }
6549 }
6550
6551 /* Otherwise, if either is a PLUS or NEG, we can't do anything. */
6552 else if (op0 == PLUS || op1 == PLUS || op0 == NEG || op1 == NEG)
6553 return 0;
6554
6555 /* If the two constants aren't the same, we can't do anything. The
6556 remaining six cases can all be done. */
6557 else if (const0 != const1)
6558 return 0;
6559
6560 else
6561 switch (op0)
6562 {
6563 case IOR:
6564 if (op1 == AND)
6565 /* (a & b) | b == b */
6566 op0 = SET;
6567 else /* op1 == XOR */
6568 /* (a ^ b) | b == a | b */
6569 ;
6570 break;
6571
6572 case XOR:
6573 if (op1 == AND)
6574 /* (a & b) ^ b == (~a) & b */
6575 op0 = AND, *pcomp_p = 1;
6576 else /* op1 == IOR */
6577 /* (a | b) ^ b == a & ~b */
6578 op0 = AND, *pconst0 = ~ const0;
6579 break;
6580
6581 case AND:
6582 if (op1 == IOR)
6583 /* (a | b) & b == b */
6584 op0 = SET;
6585 else /* op1 == XOR */
6586 /* (a ^ b) & b) == (~a) & b */
6587 *pcomp_p = 1;
6588 break;
6589 }
6590
6591 /* Check for NO-OP cases. */
6592 const0 &= GET_MODE_MASK (mode);
6593 if (const0 == 0
6594 && (op0 == IOR || op0 == XOR || op0 == PLUS))
6595 op0 = NIL;
6596 else if (const0 == 0 && op0 == AND)
6597 op0 = SET;
6598 else if (const0 == GET_MODE_MASK (mode) && op0 == AND)
6599 op0 = NIL;
6600
6601 *pop0 = op0;
6602 *pconst0 = const0;
6603
6604 return 1;
6605 }
6606 \f
6607 /* Simplify a shift of VAROP by COUNT bits. CODE says what kind of shift.
6608 The result of the shift is RESULT_MODE. X, if non-zero, is an expression
6609 that we started with.
6610
6611 The shift is normally computed in the widest mode we find in VAROP, as
6612 long as it isn't a different number of words than RESULT_MODE. Exceptions
6613 are ASHIFTRT and ROTATE, which are always done in their original mode, */
6614
6615 static rtx
6616 simplify_shift_const (x, code, result_mode, varop, count)
6617 rtx x;
6618 enum rtx_code code;
6619 enum machine_mode result_mode;
6620 rtx varop;
6621 int count;
6622 {
6623 enum rtx_code orig_code = code;
6624 int orig_count = count;
6625 enum machine_mode mode = result_mode;
6626 enum machine_mode shift_mode, tmode;
6627 int mode_words
6628 = (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
6629 /* We form (outer_op (code varop count) (outer_const)). */
6630 enum rtx_code outer_op = NIL;
6631 HOST_WIDE_INT outer_const;
6632 rtx const_rtx;
6633 int complement_p = 0;
6634 rtx new;
6635
6636 /* If we were given an invalid count, don't do anything except exactly
6637 what was requested. */
6638
6639 if (count < 0 || count > GET_MODE_BITSIZE (mode))
6640 {
6641 if (x)
6642 return x;
6643
6644 return gen_rtx (code, mode, varop, GEN_INT (count));
6645 }
6646
6647 /* Unless one of the branches of the `if' in this loop does a `continue',
6648 we will `break' the loop after the `if'. */
6649
6650 while (count != 0)
6651 {
6652 /* If we have an operand of (clobber (const_int 0)), just return that
6653 value. */
6654 if (GET_CODE (varop) == CLOBBER)
6655 return varop;
6656
6657 /* If we discovered we had to complement VAROP, leave. Making a NOT
6658 here would cause an infinite loop. */
6659 if (complement_p)
6660 break;
6661
6662 /* Convert ROTATETRT to ROTATE. */
6663 if (code == ROTATERT)
6664 code = ROTATE, count = GET_MODE_BITSIZE (result_mode) - count;
6665
6666 /* Canonicalize LSHIFT to ASHIFT. */
6667 if (code == LSHIFT)
6668 code = ASHIFT;
6669
6670 /* We need to determine what mode we will do the shift in. If the
6671 shift is a ASHIFTRT or ROTATE, we must always do it in the mode it
6672 was originally done in. Otherwise, we can do it in MODE, the widest
6673 mode encountered. */
6674 shift_mode = (code == ASHIFTRT || code == ROTATE ? result_mode : mode);
6675
6676 /* Handle cases where the count is greater than the size of the mode
6677 minus 1. For ASHIFT, use the size minus one as the count (this can
6678 occur when simplifying (lshiftrt (ashiftrt ..))). For rotates,
6679 take the count modulo the size. For other shifts, the result is
6680 zero.
6681
6682 Since these shifts are being produced by the compiler by combining
6683 multiple operations, each of which are defined, we know what the
6684 result is supposed to be. */
6685
6686 if (count > GET_MODE_BITSIZE (shift_mode) - 1)
6687 {
6688 if (code == ASHIFTRT)
6689 count = GET_MODE_BITSIZE (shift_mode) - 1;
6690 else if (code == ROTATE || code == ROTATERT)
6691 count %= GET_MODE_BITSIZE (shift_mode);
6692 else
6693 {
6694 /* We can't simply return zero because there may be an
6695 outer op. */
6696 varop = const0_rtx;
6697 count = 0;
6698 break;
6699 }
6700 }
6701
6702 /* Negative counts are invalid and should not have been made (a
6703 programmer-specified negative count should have been handled
6704 above). */
6705 else if (count < 0)
6706 abort ();
6707
6708 /* An arithmetic right shift of a quantity known to be -1 or 0
6709 is a no-op. */
6710 if (code == ASHIFTRT
6711 && (num_sign_bit_copies (varop, shift_mode)
6712 == GET_MODE_BITSIZE (shift_mode)))
6713 {
6714 count = 0;
6715 break;
6716 }
6717
6718 /* We simplify the tests below and elsewhere by converting
6719 ASHIFTRT to LSHIFTRT if we know the sign bit is clear.
6720 `make_compound_operation' will convert it to a ASHIFTRT for
6721 those machines (such as Vax) that don't have a LSHIFTRT. */
6722 if (GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT
6723 && code == ASHIFTRT
6724 && ((significant_bits (varop, shift_mode)
6725 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (shift_mode) - 1)))
6726 == 0))
6727 code = LSHIFTRT;
6728
6729 switch (GET_CODE (varop))
6730 {
6731 case SIGN_EXTEND:
6732 case ZERO_EXTEND:
6733 case SIGN_EXTRACT:
6734 case ZERO_EXTRACT:
6735 new = expand_compound_operation (varop);
6736 if (new != varop)
6737 {
6738 varop = new;
6739 continue;
6740 }
6741 break;
6742
6743 case MEM:
6744 /* If we have (xshiftrt (mem ...) C) and C is MODE_WIDTH
6745 minus the width of a smaller mode, we can do this with a
6746 SIGN_EXTEND or ZERO_EXTEND from the narrower memory location. */
6747 if ((code == ASHIFTRT || code == LSHIFTRT)
6748 && ! mode_dependent_address_p (XEXP (varop, 0))
6749 && ! MEM_VOLATILE_P (varop)
6750 && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
6751 MODE_INT, 1)) != BLKmode)
6752 {
6753 #if BYTES_BIG_ENDIAN
6754 new = gen_rtx (MEM, tmode, XEXP (varop, 0));
6755 #else
6756 new = gen_rtx (MEM, tmode,
6757 plus_constant (XEXP (varop, 0),
6758 count / BITS_PER_UNIT));
6759 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (varop);
6760 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (varop);
6761 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (varop);
6762 #endif
6763 varop = gen_rtx_combine (code == ASHIFTRT ? SIGN_EXTEND
6764 : ZERO_EXTEND, mode, new);
6765 count = 0;
6766 continue;
6767 }
6768 break;
6769
6770 case USE:
6771 /* Similar to the case above, except that we can only do this if
6772 the resulting mode is the same as that of the underlying
6773 MEM and adjust the address depending on the *bits* endianness
6774 because of the way that bit-field extract insns are defined. */
6775 if ((code == ASHIFTRT || code == LSHIFTRT)
6776 && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
6777 MODE_INT, 1)) != BLKmode
6778 && tmode == GET_MODE (XEXP (varop, 0)))
6779 {
6780 #if BITS_BIG_ENDIAN
6781 new = XEXP (varop, 0);
6782 #else
6783 new = copy_rtx (XEXP (varop, 0));
6784 SUBST (XEXP (new, 0),
6785 plus_constant (XEXP (new, 0),
6786 count / BITS_PER_UNIT));
6787 #endif
6788
6789 varop = gen_rtx_combine (code == ASHIFTRT ? SIGN_EXTEND
6790 : ZERO_EXTEND, mode, new);
6791 count = 0;
6792 continue;
6793 }
6794 break;
6795
6796 case SUBREG:
6797 /* If VAROP is a SUBREG, strip it as long as the inner operand has
6798 the same number of words as what we've seen so far. Then store
6799 the widest mode in MODE. */
6800 if (subreg_lowpart_p (varop)
6801 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
6802 > GET_MODE_SIZE (GET_MODE (varop)))
6803 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
6804 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
6805 == mode_words))
6806 {
6807 varop = SUBREG_REG (varop);
6808 if (GET_MODE_SIZE (GET_MODE (varop)) > GET_MODE_SIZE (mode))
6809 mode = GET_MODE (varop);
6810 continue;
6811 }
6812 break;
6813
6814 case MULT:
6815 /* Some machines use MULT instead of ASHIFT because MULT
6816 is cheaper. But it is still better on those machines to
6817 merge two shifts into one. */
6818 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
6819 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
6820 {
6821 varop = gen_binary (ASHIFT, GET_MODE (varop), XEXP (varop, 0),
6822 GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));;
6823 continue;
6824 }
6825 break;
6826
6827 case UDIV:
6828 /* Similar, for when divides are cheaper. */
6829 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
6830 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
6831 {
6832 varop = gen_binary (LSHIFTRT, GET_MODE (varop), XEXP (varop, 0),
6833 GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));
6834 continue;
6835 }
6836 break;
6837
6838 case ASHIFTRT:
6839 /* If we are extracting just the sign bit of an arithmetic right
6840 shift, that shift is not needed. */
6841 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1)
6842 {
6843 varop = XEXP (varop, 0);
6844 continue;
6845 }
6846
6847 /* ... fall through ... */
6848
6849 case LSHIFTRT:
6850 case ASHIFT:
6851 case LSHIFT:
6852 case ROTATE:
6853 /* Here we have two nested shifts. The result is usually the
6854 AND of a new shift with a mask. We compute the result below. */
6855 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
6856 && INTVAL (XEXP (varop, 1)) >= 0
6857 && INTVAL (XEXP (varop, 1)) < GET_MODE_BITSIZE (GET_MODE (varop))
6858 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
6859 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
6860 {
6861 enum rtx_code first_code = GET_CODE (varop);
6862 int first_count = INTVAL (XEXP (varop, 1));
6863 unsigned HOST_WIDE_INT mask;
6864 rtx mask_rtx;
6865 rtx inner;
6866
6867 if (first_code == LSHIFT)
6868 first_code = ASHIFT;
6869
6870 /* We have one common special case. We can't do any merging if
6871 the inner code is an ASHIFTRT of a smaller mode. However, if
6872 we have (ashift:M1 (subreg:M1 (ashiftrt:M2 FOO C1) 0) C2)
6873 with C2 == GET_MODE_BITSIZE (M1) - GET_MODE_BITSIZE (M2),
6874 we can convert it to
6875 (ashiftrt:M1 (ashift:M1 (and:M1 (subreg:M1 FOO 0 C2) C3) C1).
6876 This simplifies certain SIGN_EXTEND operations. */
6877 if (code == ASHIFT && first_code == ASHIFTRT
6878 && (GET_MODE_BITSIZE (result_mode)
6879 - GET_MODE_BITSIZE (GET_MODE (varop))) == count)
6880 {
6881 /* C3 has the low-order C1 bits zero. */
6882
6883 mask = (GET_MODE_MASK (mode)
6884 & ~ (((HOST_WIDE_INT) 1 << first_count) - 1));
6885
6886 varop = simplify_and_const_int (NULL_RTX, result_mode,
6887 XEXP (varop, 0), mask);
6888 varop = simplify_shift_const (NULL_RTX, ASHIFT, result_mode,
6889 varop, count);
6890 count = first_count;
6891 code = ASHIFTRT;
6892 continue;
6893 }
6894
6895 /* If this was (ashiftrt (ashift foo C1) C2) and FOO has more
6896 than C1 high-order bits equal to the sign bit, we can convert
6897 this to either an ASHIFT or a ASHIFTRT depending on the
6898 two counts.
6899
6900 We cannot do this if VAROP's mode is not SHIFT_MODE. */
6901
6902 if (code == ASHIFTRT && first_code == ASHIFT
6903 && GET_MODE (varop) == shift_mode
6904 && (num_sign_bit_copies (XEXP (varop, 0), shift_mode)
6905 > first_count))
6906 {
6907 count -= first_count;
6908 if (count < 0)
6909 count = - count, code = ASHIFT;
6910 varop = XEXP (varop, 0);
6911 continue;
6912 }
6913
6914 /* There are some cases we can't do. If CODE is ASHIFTRT,
6915 we can only do this if FIRST_CODE is also ASHIFTRT.
6916
6917 We can't do the case when CODE is ROTATE and FIRST_CODE is
6918 ASHIFTRT.
6919
6920 If the mode of this shift is not the mode of the outer shift,
6921 we can't do this if either shift is ASHIFTRT or ROTATE.
6922
6923 Finally, we can't do any of these if the mode is too wide
6924 unless the codes are the same.
6925
6926 Handle the case where the shift codes are the same
6927 first. */
6928
6929 if (code == first_code)
6930 {
6931 if (GET_MODE (varop) != result_mode
6932 && (code == ASHIFTRT || code == ROTATE))
6933 break;
6934
6935 count += first_count;
6936 varop = XEXP (varop, 0);
6937 continue;
6938 }
6939
6940 if (code == ASHIFTRT
6941 || (code == ROTATE && first_code == ASHIFTRT)
6942 || GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT
6943 || (GET_MODE (varop) != result_mode
6944 && (first_code == ASHIFTRT || first_code == ROTATE
6945 || code == ROTATE)))
6946 break;
6947
6948 /* To compute the mask to apply after the shift, shift the
6949 significant bits of the inner shift the same way the
6950 outer shift will. */
6951
6952 mask_rtx = GEN_INT (significant_bits (varop, GET_MODE (varop)));
6953
6954 mask_rtx
6955 = simplify_binary_operation (code, result_mode, mask_rtx,
6956 GEN_INT (count));
6957
6958 /* Give up if we can't compute an outer operation to use. */
6959 if (mask_rtx == 0
6960 || GET_CODE (mask_rtx) != CONST_INT
6961 || ! merge_outer_ops (&outer_op, &outer_const, AND,
6962 INTVAL (mask_rtx),
6963 result_mode, &complement_p))
6964 break;
6965
6966 /* If the shifts are in the same direction, we add the
6967 counts. Otherwise, we subtract them. */
6968 if ((code == ASHIFTRT || code == LSHIFTRT)
6969 == (first_code == ASHIFTRT || first_code == LSHIFTRT))
6970 count += first_count;
6971 else
6972 count -= first_count;
6973
6974 /* If COUNT is positive, the new shift is usually CODE,
6975 except for the two exceptions below, in which case it is
6976 FIRST_CODE. If the count is negative, FIRST_CODE should
6977 always be used */
6978 if (count > 0
6979 && ((first_code == ROTATE && code == ASHIFT)
6980 || (first_code == ASHIFTRT && code == LSHIFTRT)))
6981 code = first_code;
6982 else if (count < 0)
6983 code = first_code, count = - count;
6984
6985 varop = XEXP (varop, 0);
6986 continue;
6987 }
6988
6989 /* If we have (A << B << C) for any shift, we can convert this to
6990 (A << C << B). This wins if A is a constant. Only try this if
6991 B is not a constant. */
6992
6993 else if (GET_CODE (varop) == code
6994 && GET_CODE (XEXP (varop, 1)) != CONST_INT
6995 && 0 != (new
6996 = simplify_binary_operation (code, mode,
6997 XEXP (varop, 0),
6998 GEN_INT (count))))
6999 {
7000 varop = gen_rtx_combine (code, mode, new, XEXP (varop, 1));
7001 count = 0;
7002 continue;
7003 }
7004 break;
7005
7006 case NOT:
7007 /* Make this fit the case below. */
7008 varop = gen_rtx_combine (XOR, mode, XEXP (varop, 0),
7009 GEN_INT (GET_MODE_MASK (mode)));
7010 continue;
7011
7012 case IOR:
7013 case AND:
7014 case XOR:
7015 /* If we have (xshiftrt (ior (plus X (const_int -1)) X) C)
7016 with C the size of VAROP - 1 and the shift is logical if
7017 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
7018 we have an (le X 0) operation. If we have an arithmetic shift
7019 and STORE_FLAG_VALUE is 1 or we have a logical shift with
7020 STORE_FLAG_VALUE of -1, we have a (neg (le X 0)) operation. */
7021
7022 if (GET_CODE (varop) == IOR && GET_CODE (XEXP (varop, 0)) == PLUS
7023 && XEXP (XEXP (varop, 0), 1) == constm1_rtx
7024 && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
7025 && (code == LSHIFTRT || code == ASHIFTRT)
7026 && count == GET_MODE_BITSIZE (GET_MODE (varop)) - 1
7027 && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
7028 {
7029 count = 0;
7030 varop = gen_rtx_combine (LE, GET_MODE (varop), XEXP (varop, 1),
7031 const0_rtx);
7032
7033 if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
7034 varop = gen_rtx_combine (NEG, GET_MODE (varop), varop);
7035
7036 continue;
7037 }
7038
7039 /* If we have (shift (logical)), move the logical to the outside
7040 to allow it to possibly combine with another logical and the
7041 shift to combine with another shift. This also canonicalizes to
7042 what a ZERO_EXTRACT looks like. Also, some machines have
7043 (and (shift)) insns. */
7044
7045 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
7046 && (new = simplify_binary_operation (code, result_mode,
7047 XEXP (varop, 1),
7048 GEN_INT (count))) != 0
7049 && merge_outer_ops (&outer_op, &outer_const, GET_CODE (varop),
7050 INTVAL (new), result_mode, &complement_p))
7051 {
7052 varop = XEXP (varop, 0);
7053 continue;
7054 }
7055
7056 /* If we can't do that, try to simplify the shift in each arm of the
7057 logical expression, make a new logical expression, and apply
7058 the inverse distributive law. */
7059 {
7060 rtx lhs = simplify_shift_const (NULL_RTX, code, result_mode,
7061 XEXP (varop, 0), count);
7062 rtx rhs = simplify_shift_const (NULL_RTX, code, result_mode,
7063 XEXP (varop, 1), count);
7064
7065 varop = gen_binary (GET_CODE (varop), result_mode, lhs, rhs);
7066 varop = apply_distributive_law (varop);
7067
7068 count = 0;
7069 }
7070 break;
7071
7072 case EQ:
7073 /* convert (lshift (eq FOO 0) C) to (xor FOO 1) if STORE_FLAG_VALUE
7074 says that the sign bit can be tested, FOO has mode MODE, C is
7075 GET_MODE_BITSIZE (MODE) - 1, and FOO has only the low-order bit
7076 significant. */
7077 if (code == LSHIFT
7078 && XEXP (varop, 1) == const0_rtx
7079 && GET_MODE (XEXP (varop, 0)) == result_mode
7080 && count == GET_MODE_BITSIZE (result_mode) - 1
7081 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
7082 && ((STORE_FLAG_VALUE
7083 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (result_mode) - 1))))
7084 && significant_bits (XEXP (varop, 0), result_mode) == 1
7085 && merge_outer_ops (&outer_op, &outer_const, XOR,
7086 (HOST_WIDE_INT) 1, result_mode,
7087 &complement_p))
7088 {
7089 varop = XEXP (varop, 0);
7090 count = 0;
7091 continue;
7092 }
7093 break;
7094
7095 case NEG:
7096 /* (lshiftrt (neg A) C) where A is either 0 or 1 and C is one less
7097 than the number of bits in the mode is equivalent to A. */
7098 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1
7099 && significant_bits (XEXP (varop, 0), result_mode) == 1)
7100 {
7101 varop = XEXP (varop, 0);
7102 count = 0;
7103 continue;
7104 }
7105
7106 /* NEG commutes with ASHIFT since it is multiplication. Move the
7107 NEG outside to allow shifts to combine. */
7108 if (code == ASHIFT
7109 && merge_outer_ops (&outer_op, &outer_const, NEG,
7110 (HOST_WIDE_INT) 0, result_mode,
7111 &complement_p))
7112 {
7113 varop = XEXP (varop, 0);
7114 continue;
7115 }
7116 break;
7117
7118 case PLUS:
7119 /* (lshiftrt (plus A -1) C) where A is either 0 or 1 and C
7120 is one less than the number of bits in the mode is
7121 equivalent to (xor A 1). */
7122 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1
7123 && XEXP (varop, 1) == constm1_rtx
7124 && significant_bits (XEXP (varop, 0), result_mode) == 1
7125 && merge_outer_ops (&outer_op, &outer_const, XOR,
7126 (HOST_WIDE_INT) 1, result_mode,
7127 &complement_p))
7128 {
7129 count = 0;
7130 varop = XEXP (varop, 0);
7131 continue;
7132 }
7133
7134 /* If we have (xshiftrt (plus FOO BAR) C), and the only bits
7135 significant in BAR are those being shifted out and those
7136 bits are known zero in FOO, we can replace the PLUS with FOO.
7137 Similarly in the other operand order. This code occurs when
7138 we are computing the size of a variable-size array. */
7139
7140 if ((code == ASHIFTRT || code == LSHIFTRT)
7141 && count < HOST_BITS_PER_WIDE_INT
7142 && significant_bits (XEXP (varop, 1), result_mode) >> count == 0
7143 && (significant_bits (XEXP (varop, 1), result_mode)
7144 & significant_bits (XEXP (varop, 0), result_mode)) == 0)
7145 {
7146 varop = XEXP (varop, 0);
7147 continue;
7148 }
7149 else if ((code == ASHIFTRT || code == LSHIFTRT)
7150 && count < HOST_BITS_PER_WIDE_INT
7151 && 0 == (significant_bits (XEXP (varop, 0), result_mode)
7152 >> count)
7153 && 0 == (significant_bits (XEXP (varop, 0), result_mode)
7154 & significant_bits (XEXP (varop, 1),
7155 result_mode)))
7156 {
7157 varop = XEXP (varop, 1);
7158 continue;
7159 }
7160
7161 /* (ashift (plus foo C) N) is (plus (ashift foo N) C'). */
7162 if (code == ASHIFT
7163 && GET_CODE (XEXP (varop, 1)) == CONST_INT
7164 && (new = simplify_binary_operation (ASHIFT, result_mode,
7165 XEXP (varop, 1),
7166 GEN_INT (count))) != 0
7167 && merge_outer_ops (&outer_op, &outer_const, PLUS,
7168 INTVAL (new), result_mode, &complement_p))
7169 {
7170 varop = XEXP (varop, 0);
7171 continue;
7172 }
7173 break;
7174
7175 case MINUS:
7176 /* If we have (xshiftrt (minus (ashiftrt X C)) X) C)
7177 with C the size of VAROP - 1 and the shift is logical if
7178 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
7179 we have a (gt X 0) operation. If the shift is arithmetic with
7180 STORE_FLAG_VALUE of 1 or logical with STORE_FLAG_VALUE == -1,
7181 we have a (neg (gt X 0)) operation. */
7182
7183 if (GET_CODE (XEXP (varop, 0)) == ASHIFTRT
7184 && count == GET_MODE_BITSIZE (GET_MODE (varop)) - 1
7185 && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
7186 && (code == LSHIFTRT || code == ASHIFTRT)
7187 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
7188 && INTVAL (XEXP (XEXP (varop, 0), 1)) == count
7189 && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
7190 {
7191 count = 0;
7192 varop = gen_rtx_combine (GT, GET_MODE (varop), XEXP (varop, 1),
7193 const0_rtx);
7194
7195 if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
7196 varop = gen_rtx_combine (NEG, GET_MODE (varop), varop);
7197
7198 continue;
7199 }
7200 break;
7201 }
7202
7203 break;
7204 }
7205
7206 /* We need to determine what mode to do the shift in. If the shift is
7207 a ASHIFTRT or ROTATE, we must always do it in the mode it was originally
7208 done in. Otherwise, we can do it in MODE, the widest mode encountered.
7209 The code we care about is that of the shift that will actually be done,
7210 not the shift that was originally requested. */
7211 shift_mode = (code == ASHIFTRT || code == ROTATE ? result_mode : mode);
7212
7213 /* We have now finished analyzing the shift. The result should be
7214 a shift of type CODE with SHIFT_MODE shifting VAROP COUNT places. If
7215 OUTER_OP is non-NIL, it is an operation that needs to be applied
7216 to the result of the shift. OUTER_CONST is the relevant constant,
7217 but we must turn off all bits turned off in the shift.
7218
7219 If we were passed a value for X, see if we can use any pieces of
7220 it. If not, make new rtx. */
7221
7222 if (x && GET_RTX_CLASS (GET_CODE (x)) == '2'
7223 && GET_CODE (XEXP (x, 1)) == CONST_INT
7224 && INTVAL (XEXP (x, 1)) == count)
7225 const_rtx = XEXP (x, 1);
7226 else
7227 const_rtx = GEN_INT (count);
7228
7229 if (x && GET_CODE (XEXP (x, 0)) == SUBREG
7230 && GET_MODE (XEXP (x, 0)) == shift_mode
7231 && SUBREG_REG (XEXP (x, 0)) == varop)
7232 varop = XEXP (x, 0);
7233 else if (GET_MODE (varop) != shift_mode)
7234 varop = gen_lowpart_for_combine (shift_mode, varop);
7235
7236 /* If we can't make the SUBREG, try to return what we were given. */
7237 if (GET_CODE (varop) == CLOBBER)
7238 return x ? x : varop;
7239
7240 new = simplify_binary_operation (code, shift_mode, varop, const_rtx);
7241 if (new != 0)
7242 x = new;
7243 else
7244 {
7245 if (x == 0 || GET_CODE (x) != code || GET_MODE (x) != shift_mode)
7246 x = gen_rtx_combine (code, shift_mode, varop, const_rtx);
7247
7248 SUBST (XEXP (x, 0), varop);
7249 SUBST (XEXP (x, 1), const_rtx);
7250 }
7251
7252 /* If we were doing a LSHIFTRT in a wider mode than it was originally,
7253 turn off all the bits that the shift would have turned off. */
7254 if (orig_code == LSHIFTRT && result_mode != shift_mode)
7255 x = simplify_and_const_int (NULL_RTX, shift_mode, x,
7256 GET_MODE_MASK (result_mode) >> orig_count);
7257
7258 /* Do the remainder of the processing in RESULT_MODE. */
7259 x = gen_lowpart_for_combine (result_mode, x);
7260
7261 /* If COMPLEMENT_P is set, we have to complement X before doing the outer
7262 operation. */
7263 if (complement_p)
7264 x = gen_unary (NOT, result_mode, x);
7265
7266 if (outer_op != NIL)
7267 {
7268 if (GET_MODE_BITSIZE (result_mode) < HOST_BITS_PER_WIDE_INT)
7269 outer_const &= GET_MODE_MASK (result_mode);
7270
7271 if (outer_op == AND)
7272 x = simplify_and_const_int (NULL_RTX, result_mode, x, outer_const);
7273 else if (outer_op == SET)
7274 /* This means that we have determined that the result is
7275 equivalent to a constant. This should be rare. */
7276 x = GEN_INT (outer_const);
7277 else if (GET_RTX_CLASS (outer_op) == '1')
7278 x = gen_unary (outer_op, result_mode, x);
7279 else
7280 x = gen_binary (outer_op, result_mode, x, GEN_INT (outer_const));
7281 }
7282
7283 return x;
7284 }
7285 \f
7286 /* Like recog, but we receive the address of a pointer to a new pattern.
7287 We try to match the rtx that the pointer points to.
7288 If that fails, we may try to modify or replace the pattern,
7289 storing the replacement into the same pointer object.
7290
7291 Modifications include deletion or addition of CLOBBERs.
7292
7293 PNOTES is a pointer to a location where any REG_UNUSED notes added for
7294 the CLOBBERs are placed.
7295
7296 The value is the final insn code from the pattern ultimately matched,
7297 or -1. */
7298
7299 static int
7300 recog_for_combine (pnewpat, insn, pnotes)
7301 rtx *pnewpat;
7302 rtx insn;
7303 rtx *pnotes;
7304 {
7305 register rtx pat = *pnewpat;
7306 int insn_code_number;
7307 int num_clobbers_to_add = 0;
7308 int i;
7309 rtx notes = 0;
7310
7311 /* Is the result of combination a valid instruction? */
7312 insn_code_number = recog (pat, insn, &num_clobbers_to_add);
7313
7314 /* If it isn't, there is the possibility that we previously had an insn
7315 that clobbered some register as a side effect, but the combined
7316 insn doesn't need to do that. So try once more without the clobbers
7317 unless this represents an ASM insn. */
7318
7319 if (insn_code_number < 0 && ! check_asm_operands (pat)
7320 && GET_CODE (pat) == PARALLEL)
7321 {
7322 int pos;
7323
7324 for (pos = 0, i = 0; i < XVECLEN (pat, 0); i++)
7325 if (GET_CODE (XVECEXP (pat, 0, i)) != CLOBBER)
7326 {
7327 if (i != pos)
7328 SUBST (XVECEXP (pat, 0, pos), XVECEXP (pat, 0, i));
7329 pos++;
7330 }
7331
7332 SUBST_INT (XVECLEN (pat, 0), pos);
7333
7334 if (pos == 1)
7335 pat = XVECEXP (pat, 0, 0);
7336
7337 insn_code_number = recog (pat, insn, &num_clobbers_to_add);
7338 }
7339
7340 /* If we had any clobbers to add, make a new pattern than contains
7341 them. Then check to make sure that all of them are dead. */
7342 if (num_clobbers_to_add)
7343 {
7344 rtx newpat = gen_rtx (PARALLEL, VOIDmode,
7345 gen_rtvec (GET_CODE (pat) == PARALLEL
7346 ? XVECLEN (pat, 0) + num_clobbers_to_add
7347 : num_clobbers_to_add + 1));
7348
7349 if (GET_CODE (pat) == PARALLEL)
7350 for (i = 0; i < XVECLEN (pat, 0); i++)
7351 XVECEXP (newpat, 0, i) = XVECEXP (pat, 0, i);
7352 else
7353 XVECEXP (newpat, 0, 0) = pat;
7354
7355 add_clobbers (newpat, insn_code_number);
7356
7357 for (i = XVECLEN (newpat, 0) - num_clobbers_to_add;
7358 i < XVECLEN (newpat, 0); i++)
7359 {
7360 if (GET_CODE (XEXP (XVECEXP (newpat, 0, i), 0)) == REG
7361 && ! reg_dead_at_p (XEXP (XVECEXP (newpat, 0, i), 0), insn))
7362 return -1;
7363 notes = gen_rtx (EXPR_LIST, REG_UNUSED,
7364 XEXP (XVECEXP (newpat, 0, i), 0), notes);
7365 }
7366 pat = newpat;
7367 }
7368
7369 *pnewpat = pat;
7370 *pnotes = notes;
7371
7372 return insn_code_number;
7373 }
7374 \f
7375 /* Like gen_lowpart but for use by combine. In combine it is not possible
7376 to create any new pseudoregs. However, it is safe to create
7377 invalid memory addresses, because combine will try to recognize
7378 them and all they will do is make the combine attempt fail.
7379
7380 If for some reason this cannot do its job, an rtx
7381 (clobber (const_int 0)) is returned.
7382 An insn containing that will not be recognized. */
7383
7384 #undef gen_lowpart
7385
7386 static rtx
7387 gen_lowpart_for_combine (mode, x)
7388 enum machine_mode mode;
7389 register rtx x;
7390 {
7391 rtx result;
7392
7393 if (GET_MODE (x) == mode)
7394 return x;
7395
7396 if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
7397 return gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
7398
7399 /* X might be a paradoxical (subreg (mem)). In that case, gen_lowpart
7400 won't know what to do. So we will strip off the SUBREG here and
7401 process normally. */
7402 if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
7403 {
7404 x = SUBREG_REG (x);
7405 if (GET_MODE (x) == mode)
7406 return x;
7407 }
7408
7409 result = gen_lowpart_common (mode, x);
7410 if (result)
7411 return result;
7412
7413 if (GET_CODE (x) == MEM)
7414 {
7415 register int offset = 0;
7416 rtx new;
7417
7418 /* Refuse to work on a volatile memory ref or one with a mode-dependent
7419 address. */
7420 if (MEM_VOLATILE_P (x) || mode_dependent_address_p (XEXP (x, 0)))
7421 return gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
7422
7423 /* If we want to refer to something bigger than the original memref,
7424 generate a perverse subreg instead. That will force a reload
7425 of the original memref X. */
7426 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode))
7427 return gen_rtx (SUBREG, mode, x, 0);
7428
7429 #if WORDS_BIG_ENDIAN
7430 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
7431 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
7432 #endif
7433 #if BYTES_BIG_ENDIAN
7434 /* Adjust the address so that the address-after-the-data
7435 is unchanged. */
7436 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
7437 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
7438 #endif
7439 new = gen_rtx (MEM, mode, plus_constant (XEXP (x, 0), offset));
7440 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
7441 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
7442 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
7443 return new;
7444 }
7445
7446 /* If X is a comparison operator, rewrite it in a new mode. This
7447 probably won't match, but may allow further simplifications. */
7448 else if (GET_RTX_CLASS (GET_CODE (x)) == '<')
7449 return gen_rtx_combine (GET_CODE (x), mode, XEXP (x, 0), XEXP (x, 1));
7450
7451 /* If we couldn't simplify X any other way, just enclose it in a
7452 SUBREG. Normally, this SUBREG won't match, but some patterns may
7453 include an explicit SUBREG or we may simplify it further in combine. */
7454 else
7455 {
7456 int word = 0;
7457
7458 if (WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD)
7459 word = ((GET_MODE_SIZE (GET_MODE (x))
7460 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD))
7461 / UNITS_PER_WORD);
7462 return gen_rtx (SUBREG, mode, x, word);
7463 }
7464 }
7465 \f
7466 /* Make an rtx expression. This is a subset of gen_rtx and only supports
7467 expressions of 1, 2, or 3 operands, each of which are rtx expressions.
7468
7469 If the identical expression was previously in the insn (in the undobuf),
7470 it will be returned. Only if it is not found will a new expression
7471 be made. */
7472
7473 /*VARARGS2*/
7474 static rtx
7475 gen_rtx_combine (va_alist)
7476 va_dcl
7477 {
7478 va_list p;
7479 enum rtx_code code;
7480 enum machine_mode mode;
7481 int n_args;
7482 rtx args[3];
7483 int i, j;
7484 char *fmt;
7485 rtx rt;
7486
7487 va_start (p);
7488 code = va_arg (p, enum rtx_code);
7489 mode = va_arg (p, enum machine_mode);
7490 n_args = GET_RTX_LENGTH (code);
7491 fmt = GET_RTX_FORMAT (code);
7492
7493 if (n_args == 0 || n_args > 3)
7494 abort ();
7495
7496 /* Get each arg and verify that it is supposed to be an expression. */
7497 for (j = 0; j < n_args; j++)
7498 {
7499 if (*fmt++ != 'e')
7500 abort ();
7501
7502 args[j] = va_arg (p, rtx);
7503 }
7504
7505 /* See if this is in undobuf. Be sure we don't use objects that came
7506 from another insn; this could produce circular rtl structures. */
7507
7508 for (i = previous_num_undos; i < undobuf.num_undo; i++)
7509 if (!undobuf.undo[i].is_int
7510 && GET_CODE (undobuf.undo[i].old_contents.rtx) == code
7511 && GET_MODE (undobuf.undo[i].old_contents.rtx) == mode)
7512 {
7513 for (j = 0; j < n_args; j++)
7514 if (XEXP (undobuf.undo[i].old_contents.rtx, j) != args[j])
7515 break;
7516
7517 if (j == n_args)
7518 return undobuf.undo[i].old_contents.rtx;
7519 }
7520
7521 /* Otherwise make a new rtx. We know we have 1, 2, or 3 args.
7522 Use rtx_alloc instead of gen_rtx because it's faster on RISC. */
7523 rt = rtx_alloc (code);
7524 PUT_MODE (rt, mode);
7525 XEXP (rt, 0) = args[0];
7526 if (n_args > 1)
7527 {
7528 XEXP (rt, 1) = args[1];
7529 if (n_args > 2)
7530 XEXP (rt, 2) = args[2];
7531 }
7532 return rt;
7533 }
7534
7535 /* These routines make binary and unary operations by first seeing if they
7536 fold; if not, a new expression is allocated. */
7537
7538 static rtx
7539 gen_binary (code, mode, op0, op1)
7540 enum rtx_code code;
7541 enum machine_mode mode;
7542 rtx op0, op1;
7543 {
7544 rtx result;
7545 rtx tem;
7546
7547 if (GET_RTX_CLASS (code) == 'c'
7548 && (GET_CODE (op0) == CONST_INT
7549 || (CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)))
7550 tem = op0, op0 = op1, op1 = tem;
7551
7552 if (GET_RTX_CLASS (code) == '<')
7553 {
7554 enum machine_mode op_mode = GET_MODE (op0);
7555 if (op_mode == VOIDmode)
7556 op_mode = GET_MODE (op1);
7557 result = simplify_relational_operation (code, op_mode, op0, op1);
7558 }
7559 else
7560 result = simplify_binary_operation (code, mode, op0, op1);
7561
7562 if (result)
7563 return result;
7564
7565 /* Put complex operands first and constants second. */
7566 if (GET_RTX_CLASS (code) == 'c'
7567 && ((CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)
7568 || (GET_RTX_CLASS (GET_CODE (op0)) == 'o'
7569 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')
7570 || (GET_CODE (op0) == SUBREG
7571 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op0))) == 'o'
7572 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')))
7573 return gen_rtx_combine (code, mode, op1, op0);
7574
7575 return gen_rtx_combine (code, mode, op0, op1);
7576 }
7577
7578 static rtx
7579 gen_unary (code, mode, op0)
7580 enum rtx_code code;
7581 enum machine_mode mode;
7582 rtx op0;
7583 {
7584 rtx result = simplify_unary_operation (code, mode, op0, mode);
7585
7586 if (result)
7587 return result;
7588
7589 return gen_rtx_combine (code, mode, op0);
7590 }
7591 \f
7592 /* Simplify a comparison between *POP0 and *POP1 where CODE is the
7593 comparison code that will be tested.
7594
7595 The result is a possibly different comparison code to use. *POP0 and
7596 *POP1 may be updated.
7597
7598 It is possible that we might detect that a comparison is either always
7599 true or always false. However, we do not perform general constant
7600 folding in combine, so this knowledge isn't useful. Such tautologies
7601 should have been detected earlier. Hence we ignore all such cases. */
7602
7603 static enum rtx_code
7604 simplify_comparison (code, pop0, pop1)
7605 enum rtx_code code;
7606 rtx *pop0;
7607 rtx *pop1;
7608 {
7609 rtx op0 = *pop0;
7610 rtx op1 = *pop1;
7611 rtx tem, tem1;
7612 int i;
7613 enum machine_mode mode, tmode;
7614
7615 /* Try a few ways of applying the same transformation to both operands. */
7616 while (1)
7617 {
7618 /* If both operands are the same constant shift, see if we can ignore the
7619 shift. We can if the shift is a rotate or if the bits shifted out of
7620 this shift are not significant for either input and if the type of
7621 comparison is compatible with the shift. */
7622 if (GET_CODE (op0) == GET_CODE (op1)
7623 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
7624 && ((GET_CODE (op0) == ROTATE && (code == NE || code == EQ))
7625 || ((GET_CODE (op0) == LSHIFTRT
7626 || GET_CODE (op0) == ASHIFT || GET_CODE (op0) == LSHIFT)
7627 && (code != GT && code != LT && code != GE && code != LE))
7628 || (GET_CODE (op0) == ASHIFTRT
7629 && (code != GTU && code != LTU
7630 && code != GEU && code != GEU)))
7631 && GET_CODE (XEXP (op0, 1)) == CONST_INT
7632 && INTVAL (XEXP (op0, 1)) >= 0
7633 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
7634 && XEXP (op0, 1) == XEXP (op1, 1))
7635 {
7636 enum machine_mode mode = GET_MODE (op0);
7637 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
7638 int shift_count = INTVAL (XEXP (op0, 1));
7639
7640 if (GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFTRT)
7641 mask &= (mask >> shift_count) << shift_count;
7642 else if (GET_CODE (op0) == ASHIFT || GET_CODE (op0) == LSHIFT)
7643 mask = (mask & (mask << shift_count)) >> shift_count;
7644
7645 if ((significant_bits (XEXP (op0, 0), mode) & ~ mask) == 0
7646 && (significant_bits (XEXP (op1, 0), mode) & ~ mask) == 0)
7647 op0 = XEXP (op0, 0), op1 = XEXP (op1, 0);
7648 else
7649 break;
7650 }
7651
7652 /* If both operands are AND's of a paradoxical SUBREG by constant, the
7653 SUBREGs are of the same mode, and, in both cases, the AND would
7654 be redundant if the comparison was done in the narrower mode,
7655 do the comparison in the narrower mode (e.g., we are AND'ing with 1
7656 and the operand's significant bits are 0xffffff01; in that case if
7657 we only care about QImode, we don't need the AND). This case occurs
7658 if the output mode of an scc insn is not SImode and
7659 STORE_FLAG_VALUE == 1 (e.g., the 386). */
7660
7661 else if (GET_CODE (op0) == AND && GET_CODE (op1) == AND
7662 && GET_CODE (XEXP (op0, 1)) == CONST_INT
7663 && GET_CODE (XEXP (op1, 1)) == CONST_INT
7664 && GET_CODE (XEXP (op0, 0)) == SUBREG
7665 && GET_CODE (XEXP (op1, 0)) == SUBREG
7666 && (GET_MODE_SIZE (GET_MODE (XEXP (op0, 0)))
7667 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (op0, 0)))))
7668 && (GET_MODE (SUBREG_REG (XEXP (op0, 0)))
7669 == GET_MODE (SUBREG_REG (XEXP (op1, 0))))
7670 && (significant_bits (SUBREG_REG (XEXP (op0, 0)),
7671 GET_MODE (SUBREG_REG (XEXP (op0, 0))))
7672 & ~ INTVAL (XEXP (op0, 1))) == 0
7673 && (significant_bits (SUBREG_REG (XEXP (op1, 0)),
7674 GET_MODE (SUBREG_REG (XEXP (op1, 0))))
7675 & ~ INTVAL (XEXP (op1, 1))) == 0)
7676 {
7677 op0 = SUBREG_REG (XEXP (op0, 0));
7678 op1 = SUBREG_REG (XEXP (op1, 0));
7679
7680 /* the resulting comparison is always unsigned since we masked off
7681 the original sign bit. */
7682 code = unsigned_condition (code);
7683 }
7684 else
7685 break;
7686 }
7687
7688 /* If the first operand is a constant, swap the operands and adjust the
7689 comparison code appropriately. */
7690 if (CONSTANT_P (op0))
7691 {
7692 tem = op0, op0 = op1, op1 = tem;
7693 code = swap_condition (code);
7694 }
7695
7696 /* We now enter a loop during which we will try to simplify the comparison.
7697 For the most part, we only are concerned with comparisons with zero,
7698 but some things may really be comparisons with zero but not start
7699 out looking that way. */
7700
7701 while (GET_CODE (op1) == CONST_INT)
7702 {
7703 enum machine_mode mode = GET_MODE (op0);
7704 int mode_width = GET_MODE_BITSIZE (mode);
7705 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
7706 int equality_comparison_p;
7707 int sign_bit_comparison_p;
7708 int unsigned_comparison_p;
7709 HOST_WIDE_INT const_op;
7710
7711 /* We only want to handle integral modes. This catches VOIDmode,
7712 CCmode, and the floating-point modes. An exception is that we
7713 can handle VOIDmode if OP0 is a COMPARE or a comparison
7714 operation. */
7715
7716 if (GET_MODE_CLASS (mode) != MODE_INT
7717 && ! (mode == VOIDmode
7718 && (GET_CODE (op0) == COMPARE
7719 || GET_RTX_CLASS (GET_CODE (op0)) == '<')))
7720 break;
7721
7722 /* Get the constant we are comparing against and turn off all bits
7723 not on in our mode. */
7724 const_op = INTVAL (op1);
7725 if (mode_width <= HOST_BITS_PER_WIDE_INT)
7726 const_op &= mask;
7727
7728 /* If we are comparing against a constant power of two and the value
7729 being compared has only that single significant bit (e.g., it was
7730 `and'ed with that bit), we can replace this with a comparison
7731 with zero. */
7732 if (const_op
7733 && (code == EQ || code == NE || code == GE || code == GEU
7734 || code == LT || code == LTU)
7735 && mode_width <= HOST_BITS_PER_WIDE_INT
7736 && exact_log2 (const_op) >= 0
7737 && significant_bits (op0, mode) == const_op)
7738 {
7739 code = (code == EQ || code == GE || code == GEU ? NE : EQ);
7740 op1 = const0_rtx, const_op = 0;
7741 }
7742
7743 /* Similarly, if we are comparing a value known to be either -1 or
7744 0 with -1, change it to the opposite comparison against zero. */
7745
7746 if (const_op == -1
7747 && (code == EQ || code == NE || code == GT || code == LE
7748 || code == GEU || code == LTU)
7749 && num_sign_bit_copies (op0, mode) == mode_width)
7750 {
7751 code = (code == EQ || code == LE || code == GEU ? NE : EQ);
7752 op1 = const0_rtx, const_op = 0;
7753 }
7754
7755 /* Do some canonicalizations based on the comparison code. We prefer
7756 comparisons against zero and then prefer equality comparisons.
7757 If we can reduce the size of a constant, we will do that too. */
7758
7759 switch (code)
7760 {
7761 case LT:
7762 /* < C is equivalent to <= (C - 1) */
7763 if (const_op > 0)
7764 {
7765 const_op -= 1;
7766 op1 = GEN_INT (const_op);
7767 code = LE;
7768 /* ... fall through to LE case below. */
7769 }
7770 else
7771 break;
7772
7773 case LE:
7774 /* <= C is equivalent to < (C + 1); we do this for C < 0 */
7775 if (const_op < 0)
7776 {
7777 const_op += 1;
7778 op1 = GEN_INT (const_op);
7779 code = LT;
7780 }
7781
7782 /* If we are doing a <= 0 comparison on a value known to have
7783 a zero sign bit, we can replace this with == 0. */
7784 else if (const_op == 0
7785 && mode_width <= HOST_BITS_PER_WIDE_INT
7786 && (significant_bits (op0, mode)
7787 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
7788 code = EQ;
7789 break;
7790
7791 case GE:
7792 /* >= C is equivalent to > (C - 1). */
7793 if (const_op > 0)
7794 {
7795 const_op -= 1;
7796 op1 = GEN_INT (const_op);
7797 code = GT;
7798 /* ... fall through to GT below. */
7799 }
7800 else
7801 break;
7802
7803 case GT:
7804 /* > C is equivalent to >= (C + 1); we do this for C < 0*/
7805 if (const_op < 0)
7806 {
7807 const_op += 1;
7808 op1 = GEN_INT (const_op);
7809 code = GE;
7810 }
7811
7812 /* If we are doing a > 0 comparison on a value known to have
7813 a zero sign bit, we can replace this with != 0. */
7814 else if (const_op == 0
7815 && mode_width <= HOST_BITS_PER_WIDE_INT
7816 && (significant_bits (op0, mode)
7817 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
7818 code = NE;
7819 break;
7820
7821 case LTU:
7822 /* < C is equivalent to <= (C - 1). */
7823 if (const_op > 0)
7824 {
7825 const_op -= 1;
7826 op1 = GEN_INT (const_op);
7827 code = LEU;
7828 /* ... fall through ... */
7829 }
7830
7831 /* (unsigned) < 0x80000000 is equivalent to >= 0. */
7832 else if (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1))
7833 {
7834 const_op = 0, op1 = const0_rtx;
7835 code = GE;
7836 break;
7837 }
7838 else
7839 break;
7840
7841 case LEU:
7842 /* unsigned <= 0 is equivalent to == 0 */
7843 if (const_op == 0)
7844 code = EQ;
7845
7846 /* (unsigned) <= 0x7fffffff is equivalent to >= 0. */
7847 else if (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1)
7848 {
7849 const_op = 0, op1 = const0_rtx;
7850 code = GE;
7851 }
7852 break;
7853
7854 case GEU:
7855 /* >= C is equivalent to < (C - 1). */
7856 if (const_op > 1)
7857 {
7858 const_op -= 1;
7859 op1 = GEN_INT (const_op);
7860 code = GTU;
7861 /* ... fall through ... */
7862 }
7863
7864 /* (unsigned) >= 0x80000000 is equivalent to < 0. */
7865 else if (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1))
7866 {
7867 const_op = 0, op1 = const0_rtx;
7868 code = LT;
7869 }
7870 else
7871 break;
7872
7873 case GTU:
7874 /* unsigned > 0 is equivalent to != 0 */
7875 if (const_op == 0)
7876 code = NE;
7877
7878 /* (unsigned) > 0x7fffffff is equivalent to < 0. */
7879 else if (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1)
7880 {
7881 const_op = 0, op1 = const0_rtx;
7882 code = LT;
7883 }
7884 break;
7885 }
7886
7887 /* Compute some predicates to simplify code below. */
7888
7889 equality_comparison_p = (code == EQ || code == NE);
7890 sign_bit_comparison_p = ((code == LT || code == GE) && const_op == 0);
7891 unsigned_comparison_p = (code == LTU || code == LEU || code == GTU
7892 || code == LEU);
7893
7894 /* Now try cases based on the opcode of OP0. If none of the cases
7895 does a "continue", we exit this loop immediately after the
7896 switch. */
7897
7898 switch (GET_CODE (op0))
7899 {
7900 case ZERO_EXTRACT:
7901 /* If we are extracting a single bit from a variable position in
7902 a constant that has only a single bit set and are comparing it
7903 with zero, we can convert this into an equality comparison
7904 between the position and the location of the single bit. We can't
7905 do this if bit endian and we don't have an extzv since we then
7906 can't know what mode to use for the endianness adjustment. */
7907
7908 #if ! BITS_BIG_ENDIAN || defined (HAVE_extzv)
7909 if (GET_CODE (XEXP (op0, 0)) == CONST_INT
7910 && XEXP (op0, 1) == const1_rtx
7911 && equality_comparison_p && const_op == 0
7912 && (i = exact_log2 (INTVAL (XEXP (op0, 0)))) >= 0)
7913 {
7914 #if BITS_BIG_ENDIAN
7915 i = (GET_MODE_BITSIZE
7916 (insn_operand_mode[(int) CODE_FOR_extzv][1]) - 1 - i);
7917 #endif
7918
7919 op0 = XEXP (op0, 2);
7920 op1 = GEN_INT (i);
7921 const_op = i;
7922
7923 /* Result is nonzero iff shift count is equal to I. */
7924 code = reverse_condition (code);
7925 continue;
7926 }
7927 #endif
7928
7929 /* ... fall through ... */
7930
7931 case SIGN_EXTRACT:
7932 tem = expand_compound_operation (op0);
7933 if (tem != op0)
7934 {
7935 op0 = tem;
7936 continue;
7937 }
7938 break;
7939
7940 case NOT:
7941 /* If testing for equality, we can take the NOT of the constant. */
7942 if (equality_comparison_p
7943 && (tem = simplify_unary_operation (NOT, mode, op1, mode)) != 0)
7944 {
7945 op0 = XEXP (op0, 0);
7946 op1 = tem;
7947 continue;
7948 }
7949
7950 /* If just looking at the sign bit, reverse the sense of the
7951 comparison. */
7952 if (sign_bit_comparison_p)
7953 {
7954 op0 = XEXP (op0, 0);
7955 code = (code == GE ? LT : GE);
7956 continue;
7957 }
7958 break;
7959
7960 case NEG:
7961 /* If testing for equality, we can take the NEG of the constant. */
7962 if (equality_comparison_p
7963 && (tem = simplify_unary_operation (NEG, mode, op1, mode)) != 0)
7964 {
7965 op0 = XEXP (op0, 0);
7966 op1 = tem;
7967 continue;
7968 }
7969
7970 /* The remaining cases only apply to comparisons with zero. */
7971 if (const_op != 0)
7972 break;
7973
7974 /* When X is ABS or is known positive,
7975 (neg X) is < 0 if and only if X != 0. */
7976
7977 if (sign_bit_comparison_p
7978 && (GET_CODE (XEXP (op0, 0)) == ABS
7979 || (mode_width <= HOST_BITS_PER_WIDE_INT
7980 && (significant_bits (XEXP (op0, 0), mode)
7981 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)))
7982 {
7983 op0 = XEXP (op0, 0);
7984 code = (code == LT ? NE : EQ);
7985 continue;
7986 }
7987
7988 /* If we have NEG of something that is the result of a
7989 SIGN_EXTEND, SIGN_EXTRACT, or ASHIFTRT, we know that the
7990 two high-order bits must be the same and hence that
7991 "(-a) < 0" is equivalent to "a > 0". Otherwise, we can't
7992 do this. */
7993 if (GET_CODE (XEXP (op0, 0)) == SIGN_EXTEND
7994 || (GET_CODE (XEXP (op0, 0)) == SIGN_EXTRACT
7995 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
7996 && (INTVAL (XEXP (XEXP (op0, 0), 1))
7997 < GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (op0, 0), 0)))))
7998 || (GET_CODE (XEXP (op0, 0)) == ASHIFTRT
7999 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
8000 && XEXP (XEXP (op0, 0), 1) != const0_rtx)
8001 || ((tem = get_last_value (XEXP (op0, 0))) != 0
8002 && (GET_CODE (tem) == SIGN_EXTEND
8003 || (GET_CODE (tem) == SIGN_EXTRACT
8004 && GET_CODE (XEXP (tem, 1)) == CONST_INT
8005 && (INTVAL (XEXP (tem, 1))
8006 < GET_MODE_BITSIZE (GET_MODE (XEXP (tem, 0)))))
8007 || (GET_CODE (tem) == ASHIFTRT
8008 && GET_CODE (XEXP (tem, 1)) == CONST_INT
8009 && XEXP (tem, 1) != const0_rtx))))
8010 {
8011 op0 = XEXP (op0, 0);
8012 code = swap_condition (code);
8013 continue;
8014 }
8015 break;
8016
8017 case ROTATE:
8018 /* If we are testing equality and our count is a constant, we
8019 can perform the inverse operation on our RHS. */
8020 if (equality_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
8021 && (tem = simplify_binary_operation (ROTATERT, mode,
8022 op1, XEXP (op0, 1))) != 0)
8023 {
8024 op0 = XEXP (op0, 0);
8025 op1 = tem;
8026 continue;
8027 }
8028
8029 /* If we are doing a < 0 or >= 0 comparison, it means we are testing
8030 a particular bit. Convert it to an AND of a constant of that
8031 bit. This will be converted into a ZERO_EXTRACT. */
8032 if (const_op == 0 && sign_bit_comparison_p
8033 && GET_CODE (XEXP (op0, 1)) == CONST_INT
8034 && mode_width <= HOST_BITS_PER_WIDE_INT)
8035 {
8036 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
8037 ((HOST_WIDE_INT) 1
8038 << (mode_width - 1
8039 - INTVAL (XEXP (op0, 1)))));
8040 code = (code == LT ? NE : EQ);
8041 continue;
8042 }
8043
8044 /* ... fall through ... */
8045
8046 case ABS:
8047 /* ABS is ignorable inside an equality comparison with zero. */
8048 if (const_op == 0 && equality_comparison_p)
8049 {
8050 op0 = XEXP (op0, 0);
8051 continue;
8052 }
8053 break;
8054
8055
8056 case SIGN_EXTEND:
8057 /* Can simplify (compare (zero/sign_extend FOO) CONST)
8058 to (compare FOO CONST) if CONST fits in FOO's mode and we
8059 are either testing inequality or have an unsigned comparison
8060 with ZERO_EXTEND or a signed comparison with SIGN_EXTEND. */
8061 if (! unsigned_comparison_p
8062 && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
8063 <= HOST_BITS_PER_WIDE_INT)
8064 && ((unsigned HOST_WIDE_INT) const_op
8065 < (((HOST_WIDE_INT) 1
8066 << (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0))) - 1)))))
8067 {
8068 op0 = XEXP (op0, 0);
8069 continue;
8070 }
8071 break;
8072
8073 case SUBREG:
8074 /* Check for the case where we are comparing A - C1 with C2,
8075 both constants are smaller than 1/2 the maxium positive
8076 value in MODE, and the comparison is equality or unsigned.
8077 In that case, if A is either zero-extended to MODE or has
8078 sufficient sign bits so that the high-order bit in MODE
8079 is a copy of the sign in the inner mode, we can prove that it is
8080 safe to do the operation in the wider mode. This simplifies
8081 many range checks. */
8082
8083 if (mode_width <= HOST_BITS_PER_WIDE_INT
8084 && subreg_lowpart_p (op0)
8085 && GET_CODE (SUBREG_REG (op0)) == PLUS
8086 && GET_CODE (XEXP (SUBREG_REG (op0), 1)) == CONST_INT
8087 && INTVAL (XEXP (SUBREG_REG (op0), 1)) < 0
8088 && (- INTVAL (XEXP (SUBREG_REG (op0), 1))
8089 < GET_MODE_MASK (mode) / 2)
8090 && (unsigned) const_op < GET_MODE_MASK (mode) / 2
8091 && (0 == (significant_bits (XEXP (SUBREG_REG (op0), 0),
8092 GET_MODE (SUBREG_REG (op0)))
8093 & ~ GET_MODE_MASK (mode))
8094 || (num_sign_bit_copies (XEXP (SUBREG_REG (op0), 0),
8095 GET_MODE (SUBREG_REG (op0)))
8096 > (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
8097 - GET_MODE_BITSIZE (mode)))))
8098 {
8099 op0 = SUBREG_REG (op0);
8100 continue;
8101 }
8102
8103 /* If the inner mode is narrower and we are extracting the low part,
8104 we can treat the SUBREG as if it were a ZERO_EXTEND. */
8105 if (subreg_lowpart_p (op0)
8106 && GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) < mode_width)
8107 /* Fall through */ ;
8108 else
8109 break;
8110
8111 /* ... fall through ... */
8112
8113 case ZERO_EXTEND:
8114 if ((unsigned_comparison_p || equality_comparison_p)
8115 && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
8116 <= HOST_BITS_PER_WIDE_INT)
8117 && ((unsigned HOST_WIDE_INT) const_op
8118 < GET_MODE_MASK (GET_MODE (XEXP (op0, 0)))))
8119 {
8120 op0 = XEXP (op0, 0);
8121 continue;
8122 }
8123 break;
8124
8125 case PLUS:
8126 /* (eq (plus X C1) C2) -> (eq X (minus C2 C1)). We can only do
8127 this for equality comparisons due to pathological cases involving
8128 overflows. */
8129 if (equality_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
8130 && (tem = simplify_binary_operation (MINUS, mode, op1,
8131 XEXP (op0, 1))) != 0)
8132 {
8133 op0 = XEXP (op0, 0);
8134 op1 = tem;
8135 continue;
8136 }
8137
8138 /* (plus (abs X) (const_int -1)) is < 0 if and only if X == 0. */
8139 if (const_op == 0 && XEXP (op0, 1) == constm1_rtx
8140 && GET_CODE (XEXP (op0, 0)) == ABS && sign_bit_comparison_p)
8141 {
8142 op0 = XEXP (XEXP (op0, 0), 0);
8143 code = (code == LT ? EQ : NE);
8144 continue;
8145 }
8146 break;
8147
8148 case MINUS:
8149 /* The sign bit of (minus (ashiftrt X C) X), where C is the number
8150 of bits in X minus 1, is one iff X > 0. */
8151 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == ASHIFTRT
8152 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
8153 && INTVAL (XEXP (XEXP (op0, 0), 1)) == mode_width - 1
8154 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
8155 {
8156 op0 = XEXP (op0, 1);
8157 code = (code == GE ? LE : GT);
8158 continue;
8159 }
8160 break;
8161
8162 case XOR:
8163 /* (eq (xor A B) C) -> (eq A (xor B C)). This is a simplification
8164 if C is zero or B is a constant. */
8165 if (equality_comparison_p
8166 && 0 != (tem = simplify_binary_operation (XOR, mode,
8167 XEXP (op0, 1), op1)))
8168 {
8169 op0 = XEXP (op0, 0);
8170 op1 = tem;
8171 continue;
8172 }
8173 break;
8174
8175 case EQ: case NE:
8176 case LT: case LTU: case LE: case LEU:
8177 case GT: case GTU: case GE: case GEU:
8178 /* We can't do anything if OP0 is a condition code value, rather
8179 than an actual data value. */
8180 if (const_op != 0
8181 #ifdef HAVE_cc0
8182 || XEXP (op0, 0) == cc0_rtx
8183 #endif
8184 || GET_MODE_CLASS (GET_MODE (XEXP (op0, 0))) == MODE_CC)
8185 break;
8186
8187 /* Get the two operands being compared. */
8188 if (GET_CODE (XEXP (op0, 0)) == COMPARE)
8189 tem = XEXP (XEXP (op0, 0), 0), tem1 = XEXP (XEXP (op0, 0), 1);
8190 else
8191 tem = XEXP (op0, 0), tem1 = XEXP (op0, 1);
8192
8193 /* Check for the cases where we simply want the result of the
8194 earlier test or the opposite of that result. */
8195 if (code == NE
8196 || (code == EQ && reversible_comparison_p (op0))
8197 || (GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
8198 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
8199 && (STORE_FLAG_VALUE
8200 & (((HOST_WIDE_INT) 1
8201 << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
8202 && (code == LT
8203 || (code == GE && reversible_comparison_p (op0)))))
8204 {
8205 code = (code == LT || code == NE
8206 ? GET_CODE (op0) : reverse_condition (GET_CODE (op0)));
8207 op0 = tem, op1 = tem1;
8208 continue;
8209 }
8210 break;
8211
8212 case IOR:
8213 /* The sign bit of (ior (plus X (const_int -1)) X) is non-zero
8214 iff X <= 0. */
8215 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == PLUS
8216 && XEXP (XEXP (op0, 0), 1) == constm1_rtx
8217 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
8218 {
8219 op0 = XEXP (op0, 1);
8220 code = (code == GE ? GT : LE);
8221 continue;
8222 }
8223 break;
8224
8225 case AND:
8226 /* Convert (and (xshift 1 X) Y) to (and (lshiftrt Y X) 1). This
8227 will be converted to a ZERO_EXTRACT later. */
8228 if (const_op == 0 && equality_comparison_p
8229 && (GET_CODE (XEXP (op0, 0)) == ASHIFT
8230 || GET_CODE (XEXP (op0, 0)) == LSHIFT)
8231 && XEXP (XEXP (op0, 0), 0) == const1_rtx)
8232 {
8233 op0 = simplify_and_const_int
8234 (op0, mode, gen_rtx_combine (LSHIFTRT, mode,
8235 XEXP (op0, 1),
8236 XEXP (XEXP (op0, 0), 1)),
8237 (HOST_WIDE_INT) 1);
8238 continue;
8239 }
8240
8241 /* If we are comparing (and (lshiftrt X C1) C2) for equality with
8242 zero and X is a comparison and C1 and C2 describe only bits set
8243 in STORE_FLAG_VALUE, we can compare with X. */
8244 if (const_op == 0 && equality_comparison_p
8245 && mode_width <= HOST_BITS_PER_WIDE_INT
8246 && GET_CODE (XEXP (op0, 1)) == CONST_INT
8247 && GET_CODE (XEXP (op0, 0)) == LSHIFTRT
8248 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
8249 && INTVAL (XEXP (XEXP (op0, 0), 1)) >= 0
8250 && INTVAL (XEXP (XEXP (op0, 0), 1)) < HOST_BITS_PER_WIDE_INT)
8251 {
8252 mask = ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
8253 << INTVAL (XEXP (XEXP (op0, 0), 1)));
8254 if ((~ STORE_FLAG_VALUE & mask) == 0
8255 && (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (op0, 0), 0))) == '<'
8256 || ((tem = get_last_value (XEXP (XEXP (op0, 0), 0))) != 0
8257 && GET_RTX_CLASS (GET_CODE (tem)) == '<')))
8258 {
8259 op0 = XEXP (XEXP (op0, 0), 0);
8260 continue;
8261 }
8262 }
8263
8264 /* If we are doing an equality comparison of an AND of a bit equal
8265 to the sign bit, replace this with a LT or GE comparison of
8266 the underlying value. */
8267 if (equality_comparison_p
8268 && const_op == 0
8269 && GET_CODE (XEXP (op0, 1)) == CONST_INT
8270 && mode_width <= HOST_BITS_PER_WIDE_INT
8271 && ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
8272 == (HOST_WIDE_INT) 1 << (mode_width - 1)))
8273 {
8274 op0 = XEXP (op0, 0);
8275 code = (code == EQ ? GE : LT);
8276 continue;
8277 }
8278
8279 /* If this AND operation is really a ZERO_EXTEND from a narrower
8280 mode, the constant fits within that mode, and this is either an
8281 equality or unsigned comparison, try to do this comparison in
8282 the narrower mode. */
8283 if ((equality_comparison_p || unsigned_comparison_p)
8284 && GET_CODE (XEXP (op0, 1)) == CONST_INT
8285 && (i = exact_log2 ((INTVAL (XEXP (op0, 1))
8286 & GET_MODE_MASK (mode))
8287 + 1)) >= 0
8288 && const_op >> i == 0
8289 && (tmode = mode_for_size (i, MODE_INT, 1)) != BLKmode)
8290 {
8291 op0 = gen_lowpart_for_combine (tmode, XEXP (op0, 0));
8292 continue;
8293 }
8294 break;
8295
8296 case ASHIFT:
8297 case LSHIFT:
8298 /* If we have (compare (xshift FOO N) (const_int C)) and
8299 the high order N bits of FOO (N+1 if an inequality comparison)
8300 are not significant, we can do this by comparing FOO with C
8301 shifted right N bits so long as the low-order N bits of C are
8302 zero. */
8303 if (GET_CODE (XEXP (op0, 1)) == CONST_INT
8304 && INTVAL (XEXP (op0, 1)) >= 0
8305 && ((INTVAL (XEXP (op0, 1)) + ! equality_comparison_p)
8306 < HOST_BITS_PER_WIDE_INT)
8307 && ((const_op
8308 & ((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1) == 0)
8309 && mode_width <= HOST_BITS_PER_WIDE_INT
8310 && (significant_bits (XEXP (op0, 0), mode)
8311 & ~ (mask >> (INTVAL (XEXP (op0, 1))
8312 + ! equality_comparison_p))) == 0)
8313 {
8314 const_op >>= INTVAL (XEXP (op0, 1));
8315 op1 = GEN_INT (const_op);
8316 op0 = XEXP (op0, 0);
8317 continue;
8318 }
8319
8320 /* If we are doing a sign bit comparison, it means we are testing
8321 a particular bit. Convert it to the appropriate AND. */
8322 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
8323 && mode_width <= HOST_BITS_PER_WIDE_INT)
8324 {
8325 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
8326 ((HOST_WIDE_INT) 1
8327 << (mode_width - 1
8328 - INTVAL (XEXP (op0, 1)))));
8329 code = (code == LT ? NE : EQ);
8330 continue;
8331 }
8332
8333 /* If this an equality comparison with zero and we are shifting
8334 the low bit to the sign bit, we can convert this to an AND of the
8335 low-order bit. */
8336 if (const_op == 0 && equality_comparison_p
8337 && GET_CODE (XEXP (op0, 1)) == CONST_INT
8338 && INTVAL (XEXP (op0, 1)) == mode_width - 1)
8339 {
8340 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
8341 (HOST_WIDE_INT) 1);
8342 continue;
8343 }
8344 break;
8345
8346 case ASHIFTRT:
8347 /* If this is an equality comparison with zero, we can do this
8348 as a logical shift, which might be much simpler. */
8349 if (equality_comparison_p && const_op == 0
8350 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
8351 {
8352 op0 = simplify_shift_const (NULL_RTX, LSHIFTRT, mode,
8353 XEXP (op0, 0),
8354 INTVAL (XEXP (op0, 1)));
8355 continue;
8356 }
8357
8358 /* If OP0 is a sign extension and CODE is not an unsigned comparison,
8359 do the comparison in a narrower mode. */
8360 if (! unsigned_comparison_p
8361 && GET_CODE (XEXP (op0, 1)) == CONST_INT
8362 && GET_CODE (XEXP (op0, 0)) == ASHIFT
8363 && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1)
8364 && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)),
8365 MODE_INT, 1)) != BLKmode
8366 && ((unsigned HOST_WIDE_INT) const_op <= GET_MODE_MASK (tmode)
8367 || ((unsigned HOST_WIDE_INT) - const_op
8368 <= GET_MODE_MASK (tmode))))
8369 {
8370 op0 = gen_lowpart_for_combine (tmode, XEXP (XEXP (op0, 0), 0));
8371 continue;
8372 }
8373
8374 /* ... fall through ... */
8375 case LSHIFTRT:
8376 /* If we have (compare (xshiftrt FOO N) (const_int C)) and
8377 the low order N bits of FOO are not significant, we can do this
8378 by comparing FOO with C shifted left N bits so long as no
8379 overflow occurs. */
8380 if (GET_CODE (XEXP (op0, 1)) == CONST_INT
8381 && INTVAL (XEXP (op0, 1)) >= 0
8382 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
8383 && mode_width <= HOST_BITS_PER_WIDE_INT
8384 && (significant_bits (XEXP (op0, 0), mode)
8385 & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0
8386 && (const_op == 0
8387 || (floor_log2 (const_op) + INTVAL (XEXP (op0, 1))
8388 < mode_width)))
8389 {
8390 const_op <<= INTVAL (XEXP (op0, 1));
8391 op1 = GEN_INT (const_op);
8392 op0 = XEXP (op0, 0);
8393 continue;
8394 }
8395
8396 /* If we are using this shift to extract just the sign bit, we
8397 can replace this with an LT or GE comparison. */
8398 if (const_op == 0
8399 && (equality_comparison_p || sign_bit_comparison_p)
8400 && GET_CODE (XEXP (op0, 1)) == CONST_INT
8401 && INTVAL (XEXP (op0, 1)) == mode_width - 1)
8402 {
8403 op0 = XEXP (op0, 0);
8404 code = (code == NE || code == GT ? LT : GE);
8405 continue;
8406 }
8407 break;
8408 }
8409
8410 break;
8411 }
8412
8413 /* Now make any compound operations involved in this comparison. Then,
8414 check for an outmost SUBREG on OP0 that isn't doing anything or is
8415 paradoxical. The latter case can only occur when it is known that the
8416 "extra" bits will be zero. Therefore, it is safe to remove the SUBREG.
8417 We can never remove a SUBREG for a non-equality comparison because the
8418 sign bit is in a different place in the underlying object. */
8419
8420 op0 = make_compound_operation (op0, op1 == const0_rtx ? COMPARE : SET);
8421 op1 = make_compound_operation (op1, SET);
8422
8423 if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
8424 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
8425 && (code == NE || code == EQ)
8426 && ((GET_MODE_SIZE (GET_MODE (op0))
8427 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0))))))
8428 {
8429 op0 = SUBREG_REG (op0);
8430 op1 = gen_lowpart_for_combine (GET_MODE (op0), op1);
8431 }
8432
8433 else if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
8434 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
8435 && (code == NE || code == EQ)
8436 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
8437 && (significant_bits (SUBREG_REG (op0), GET_MODE (SUBREG_REG (op0)))
8438 & ~ GET_MODE_MASK (GET_MODE (op0))) == 0
8439 && (tem = gen_lowpart_for_combine (GET_MODE (SUBREG_REG (op0)),
8440 op1),
8441 (significant_bits (tem, GET_MODE (SUBREG_REG (op0)))
8442 & ~ GET_MODE_MASK (GET_MODE (op0))) == 0))
8443 op0 = SUBREG_REG (op0), op1 = tem;
8444
8445 /* We now do the opposite procedure: Some machines don't have compare
8446 insns in all modes. If OP0's mode is an integer mode smaller than a
8447 word and we can't do a compare in that mode, see if there is a larger
8448 mode for which we can do the compare. There are a number of cases in
8449 which we can use the wider mode. */
8450
8451 mode = GET_MODE (op0);
8452 if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT
8453 && GET_MODE_SIZE (mode) < UNITS_PER_WORD
8454 && cmp_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
8455 for (tmode = GET_MODE_WIDER_MODE (mode);
8456 (tmode != VOIDmode
8457 && GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT);
8458 tmode = GET_MODE_WIDER_MODE (tmode))
8459 if (cmp_optab->handlers[(int) tmode].insn_code != CODE_FOR_nothing)
8460 {
8461 /* If the only significant bits in OP0 and OP1 are those in the
8462 narrower mode and this is an equality or unsigned comparison,
8463 we can use the wider mode. Similarly for sign-extended
8464 values and equality or signed comparisons. */
8465 if (((code == EQ || code == NE
8466 || code == GEU || code == GTU || code == LEU || code == LTU)
8467 && ((significant_bits (op0, tmode) & ~ GET_MODE_MASK (mode))
8468 == 0)
8469 && ((significant_bits (op1, tmode) & ~ GET_MODE_MASK (mode))
8470 == 0))
8471 || ((code == EQ || code == NE
8472 || code == GE || code == GT || code == LE || code == LT)
8473 && (num_sign_bit_copies (op0, tmode)
8474 > GET_MODE_BITSIZE (tmode) - GET_MODE_BITSIZE (mode))
8475 && (num_sign_bit_copies (op1, tmode)
8476 > GET_MODE_BITSIZE (tmode) - GET_MODE_BITSIZE (mode))))
8477 {
8478 op0 = gen_lowpart_for_combine (tmode, op0);
8479 op1 = gen_lowpart_for_combine (tmode, op1);
8480 break;
8481 }
8482
8483 /* If this is a test for negative, we can make an explicit
8484 test of the sign bit. */
8485
8486 if (op1 == const0_rtx && (code == LT || code == GE)
8487 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
8488 {
8489 op0 = gen_binary (AND, tmode,
8490 gen_lowpart_for_combine (tmode, op0),
8491 GEN_INT ((HOST_WIDE_INT) 1
8492 << (GET_MODE_BITSIZE (mode) - 1)));
8493 code = (code == LT) ? NE : EQ;
8494 break;
8495 }
8496 }
8497
8498 *pop0 = op0;
8499 *pop1 = op1;
8500
8501 return code;
8502 }
8503 \f
8504 /* Return 1 if we know that X, a comparison operation, is not operating
8505 on a floating-point value or is EQ or NE, meaning that we can safely
8506 reverse it. */
8507
8508 static int
8509 reversible_comparison_p (x)
8510 rtx x;
8511 {
8512 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
8513 || GET_CODE (x) == NE || GET_CODE (x) == EQ)
8514 return 1;
8515
8516 switch (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))))
8517 {
8518 case MODE_INT:
8519 return 1;
8520
8521 case MODE_CC:
8522 x = get_last_value (XEXP (x, 0));
8523 return (x && GET_CODE (x) == COMPARE
8524 && GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) == MODE_INT);
8525 }
8526
8527 return 0;
8528 }
8529 \f
8530 /* Utility function for following routine. Called when X is part of a value
8531 being stored into reg_last_set_value. Sets reg_last_set_table_tick
8532 for each register mentioned. Similar to mention_regs in cse.c */
8533
8534 static void
8535 update_table_tick (x)
8536 rtx x;
8537 {
8538 register enum rtx_code code = GET_CODE (x);
8539 register char *fmt = GET_RTX_FORMAT (code);
8540 register int i;
8541
8542 if (code == REG)
8543 {
8544 int regno = REGNO (x);
8545 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
8546 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
8547
8548 for (i = regno; i < endregno; i++)
8549 reg_last_set_table_tick[i] = label_tick;
8550
8551 return;
8552 }
8553
8554 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
8555 /* Note that we can't have an "E" in values stored; see
8556 get_last_value_validate. */
8557 if (fmt[i] == 'e')
8558 update_table_tick (XEXP (x, i));
8559 }
8560
8561 /* Record that REG is set to VALUE in insn INSN. If VALUE is zero, we
8562 are saying that the register is clobbered and we no longer know its
8563 value. If INSN is zero, don't update reg_last_set; this call is normally
8564 done with VALUE also zero to invalidate the register. */
8565
8566 static void
8567 record_value_for_reg (reg, insn, value)
8568 rtx reg;
8569 rtx insn;
8570 rtx value;
8571 {
8572 int regno = REGNO (reg);
8573 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
8574 ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1);
8575 int i;
8576
8577 /* If VALUE contains REG and we have a previous value for REG, substitute
8578 the previous value. */
8579 if (value && insn && reg_overlap_mentioned_p (reg, value))
8580 {
8581 rtx tem;
8582
8583 /* Set things up so get_last_value is allowed to see anything set up to
8584 our insn. */
8585 subst_low_cuid = INSN_CUID (insn);
8586 tem = get_last_value (reg);
8587
8588 if (tem)
8589 value = replace_rtx (copy_rtx (value), reg, tem);
8590 }
8591
8592 /* For each register modified, show we don't know its value, that
8593 its value has been updated, and that we don't know the location of
8594 the death of the register. */
8595 for (i = regno; i < endregno; i ++)
8596 {
8597 if (insn)
8598 reg_last_set[i] = insn;
8599 reg_last_set_value[i] = 0;
8600 reg_last_death[i] = 0;
8601 }
8602
8603 /* Mark registers that are being referenced in this value. */
8604 if (value)
8605 update_table_tick (value);
8606
8607 /* Now update the status of each register being set.
8608 If someone is using this register in this block, set this register
8609 to invalid since we will get confused between the two lives in this
8610 basic block. This makes using this register always invalid. In cse, we
8611 scan the table to invalidate all entries using this register, but this
8612 is too much work for us. */
8613
8614 for (i = regno; i < endregno; i++)
8615 {
8616 reg_last_set_label[i] = label_tick;
8617 if (value && reg_last_set_table_tick[i] == label_tick)
8618 reg_last_set_invalid[i] = 1;
8619 else
8620 reg_last_set_invalid[i] = 0;
8621 }
8622
8623 /* The value being assigned might refer to X (like in "x++;"). In that
8624 case, we must replace it with (clobber (const_int 0)) to prevent
8625 infinite loops. */
8626 if (value && ! get_last_value_validate (&value,
8627 reg_last_set_label[regno], 0))
8628 {
8629 value = copy_rtx (value);
8630 if (! get_last_value_validate (&value, reg_last_set_label[regno], 1))
8631 value = 0;
8632 }
8633
8634 /* For the main register being modified, update the value. */
8635 reg_last_set_value[regno] = value;
8636
8637 }
8638
8639 /* Used for communication between the following two routines. */
8640 static rtx record_dead_insn;
8641
8642 /* Called via note_stores from record_dead_and_set_regs to handle one
8643 SET or CLOBBER in an insn. */
8644
8645 static void
8646 record_dead_and_set_regs_1 (dest, setter)
8647 rtx dest, setter;
8648 {
8649 if (GET_CODE (dest) == REG)
8650 {
8651 /* If we are setting the whole register, we know its value. Otherwise
8652 show that we don't know the value. We can handle SUBREG in
8653 some cases. */
8654 if (GET_CODE (setter) == SET && dest == SET_DEST (setter))
8655 record_value_for_reg (dest, record_dead_insn, SET_SRC (setter));
8656 else if (GET_CODE (setter) == SET
8657 && GET_CODE (SET_DEST (setter)) == SUBREG
8658 && SUBREG_REG (SET_DEST (setter)) == dest
8659 && subreg_lowpart_p (SET_DEST (setter)))
8660 record_value_for_reg (dest, record_dead_insn,
8661 gen_lowpart_for_combine (GET_MODE (dest),
8662 SET_SRC (setter)));
8663 else
8664 record_value_for_reg (dest, record_dead_insn, NULL_RTX);
8665 }
8666 else if (GET_CODE (dest) == MEM
8667 /* Ignore pushes, they clobber nothing. */
8668 && ! push_operand (dest, GET_MODE (dest)))
8669 mem_last_set = INSN_CUID (record_dead_insn);
8670 }
8671
8672 /* Update the records of when each REG was most recently set or killed
8673 for the things done by INSN. This is the last thing done in processing
8674 INSN in the combiner loop.
8675
8676 We update reg_last_set, reg_last_set_value, reg_last_death, and also the
8677 similar information mem_last_set (which insn most recently modified memory)
8678 and last_call_cuid (which insn was the most recent subroutine call). */
8679
8680 static void
8681 record_dead_and_set_regs (insn)
8682 rtx insn;
8683 {
8684 register rtx link;
8685 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
8686 {
8687 if (REG_NOTE_KIND (link) == REG_DEAD)
8688 reg_last_death[REGNO (XEXP (link, 0))] = insn;
8689 else if (REG_NOTE_KIND (link) == REG_INC)
8690 record_value_for_reg (XEXP (link, 0), insn, NULL_RTX);
8691 }
8692
8693 if (GET_CODE (insn) == CALL_INSN)
8694 last_call_cuid = mem_last_set = INSN_CUID (insn);
8695
8696 record_dead_insn = insn;
8697 note_stores (PATTERN (insn), record_dead_and_set_regs_1);
8698 }
8699 \f
8700 /* Utility routine for the following function. Verify that all the registers
8701 mentioned in *LOC are valid when *LOC was part of a value set when
8702 label_tick == TICK. Return 0 if some are not.
8703
8704 If REPLACE is non-zero, replace the invalid reference with
8705 (clobber (const_int 0)) and return 1. This replacement is useful because
8706 we often can get useful information about the form of a value (e.g., if
8707 it was produced by a shift that always produces -1 or 0) even though
8708 we don't know exactly what registers it was produced from. */
8709
8710 static int
8711 get_last_value_validate (loc, tick, replace)
8712 rtx *loc;
8713 int tick;
8714 int replace;
8715 {
8716 rtx x = *loc;
8717 char *fmt = GET_RTX_FORMAT (GET_CODE (x));
8718 int len = GET_RTX_LENGTH (GET_CODE (x));
8719 int i;
8720
8721 if (GET_CODE (x) == REG)
8722 {
8723 int regno = REGNO (x);
8724 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
8725 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
8726 int j;
8727
8728 for (j = regno; j < endregno; j++)
8729 if (reg_last_set_invalid[j]
8730 /* If this is a pseudo-register that was only set once, it is
8731 always valid. */
8732 || (! (regno >= FIRST_PSEUDO_REGISTER && reg_n_sets[regno] == 1)
8733 && reg_last_set_label[j] > tick))
8734 {
8735 if (replace)
8736 *loc = gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
8737 return replace;
8738 }
8739
8740 return 1;
8741 }
8742
8743 for (i = 0; i < len; i++)
8744 if ((fmt[i] == 'e'
8745 && get_last_value_validate (&XEXP (x, i), tick, replace) == 0)
8746 /* Don't bother with these. They shouldn't occur anyway. */
8747 || fmt[i] == 'E')
8748 return 0;
8749
8750 /* If we haven't found a reason for it to be invalid, it is valid. */
8751 return 1;
8752 }
8753
8754 /* Get the last value assigned to X, if known. Some registers
8755 in the value may be replaced with (clobber (const_int 0)) if their value
8756 is known longer known reliably. */
8757
8758 static rtx
8759 get_last_value (x)
8760 rtx x;
8761 {
8762 int regno;
8763 rtx value;
8764
8765 /* If this is a non-paradoxical SUBREG, get the value of its operand and
8766 then convert it to the desired mode. If this is a paradoxical SUBREG,
8767 we cannot predict what values the "extra" bits might have. */
8768 if (GET_CODE (x) == SUBREG
8769 && subreg_lowpart_p (x)
8770 && (GET_MODE_SIZE (GET_MODE (x))
8771 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
8772 && (value = get_last_value (SUBREG_REG (x))) != 0)
8773 return gen_lowpart_for_combine (GET_MODE (x), value);
8774
8775 if (GET_CODE (x) != REG)
8776 return 0;
8777
8778 regno = REGNO (x);
8779 value = reg_last_set_value[regno];
8780
8781 /* If we don't have a value or if it isn't for this basic block, return 0. */
8782
8783 if (value == 0
8784 || (reg_n_sets[regno] != 1
8785 && (reg_last_set_label[regno] != label_tick)))
8786 return 0;
8787
8788 /* If the value was set in a later insn that the ones we are processing,
8789 we can't use it, but make a quick check to see if the previous insn
8790 set it to something. This is commonly the case when the same pseudo
8791 is used by repeated insns. */
8792
8793 if (reg_n_sets[regno] != 1
8794 && INSN_CUID (reg_last_set[regno]) >= subst_low_cuid)
8795 {
8796 rtx insn, set;
8797
8798 for (insn = prev_nonnote_insn (subst_insn);
8799 insn && INSN_CUID (insn) >= subst_low_cuid;
8800 insn = prev_nonnote_insn (insn))
8801 ;
8802
8803 if (insn
8804 && (set = single_set (insn)) != 0
8805 && rtx_equal_p (SET_DEST (set), x))
8806 {
8807 value = SET_SRC (set);
8808
8809 /* Make sure that VALUE doesn't reference X. Replace any
8810 expliit references with a CLOBBER. If there are any remaining
8811 references (rare), don't use the value. */
8812
8813 if (reg_mentioned_p (x, value))
8814 value = replace_rtx (copy_rtx (value), x,
8815 gen_rtx (CLOBBER, GET_MODE (x), const0_rtx));
8816
8817 if (reg_overlap_mentioned_p (x, value))
8818 return 0;
8819 }
8820 else
8821 return 0;
8822 }
8823
8824 /* If the value has all its registers valid, return it. */
8825 if (get_last_value_validate (&value, reg_last_set_label[regno], 0))
8826 return value;
8827
8828 /* Otherwise, make a copy and replace any invalid register with
8829 (clobber (const_int 0)). If that fails for some reason, return 0. */
8830
8831 value = copy_rtx (value);
8832 if (get_last_value_validate (&value, reg_last_set_label[regno], 1))
8833 return value;
8834
8835 return 0;
8836 }
8837 \f
8838 /* Return nonzero if expression X refers to a REG or to memory
8839 that is set in an instruction more recent than FROM_CUID. */
8840
8841 static int
8842 use_crosses_set_p (x, from_cuid)
8843 register rtx x;
8844 int from_cuid;
8845 {
8846 register char *fmt;
8847 register int i;
8848 register enum rtx_code code = GET_CODE (x);
8849
8850 if (code == REG)
8851 {
8852 register int regno = REGNO (x);
8853 #ifdef PUSH_ROUNDING
8854 /* Don't allow uses of the stack pointer to be moved,
8855 because we don't know whether the move crosses a push insn. */
8856 if (regno == STACK_POINTER_REGNUM)
8857 return 1;
8858 #endif
8859 return (reg_last_set[regno]
8860 && INSN_CUID (reg_last_set[regno]) > from_cuid);
8861 }
8862
8863 if (code == MEM && mem_last_set > from_cuid)
8864 return 1;
8865
8866 fmt = GET_RTX_FORMAT (code);
8867
8868 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
8869 {
8870 if (fmt[i] == 'E')
8871 {
8872 register int j;
8873 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
8874 if (use_crosses_set_p (XVECEXP (x, i, j), from_cuid))
8875 return 1;
8876 }
8877 else if (fmt[i] == 'e'
8878 && use_crosses_set_p (XEXP (x, i), from_cuid))
8879 return 1;
8880 }
8881 return 0;
8882 }
8883 \f
8884 /* Define three variables used for communication between the following
8885 routines. */
8886
8887 static int reg_dead_regno, reg_dead_endregno;
8888 static int reg_dead_flag;
8889
8890 /* Function called via note_stores from reg_dead_at_p.
8891
8892 If DEST is within [reg_dead_rengno, reg_dead_endregno), set
8893 reg_dead_flag to 1 if X is a CLOBBER and to -1 it is a SET. */
8894
8895 static void
8896 reg_dead_at_p_1 (dest, x)
8897 rtx dest;
8898 rtx x;
8899 {
8900 int regno, endregno;
8901
8902 if (GET_CODE (dest) != REG)
8903 return;
8904
8905 regno = REGNO (dest);
8906 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
8907 ? HARD_REGNO_NREGS (regno, GET_MODE (dest)) : 1);
8908
8909 if (reg_dead_endregno > regno && reg_dead_regno < endregno)
8910 reg_dead_flag = (GET_CODE (x) == CLOBBER) ? 1 : -1;
8911 }
8912
8913 /* Return non-zero if REG is known to be dead at INSN.
8914
8915 We scan backwards from INSN. If we hit a REG_DEAD note or a CLOBBER
8916 referencing REG, it is dead. If we hit a SET referencing REG, it is
8917 live. Otherwise, see if it is live or dead at the start of the basic
8918 block we are in. */
8919
8920 static int
8921 reg_dead_at_p (reg, insn)
8922 rtx reg;
8923 rtx insn;
8924 {
8925 int block, i;
8926
8927 /* Set variables for reg_dead_at_p_1. */
8928 reg_dead_regno = REGNO (reg);
8929 reg_dead_endregno = reg_dead_regno + (reg_dead_regno < FIRST_PSEUDO_REGISTER
8930 ? HARD_REGNO_NREGS (reg_dead_regno,
8931 GET_MODE (reg))
8932 : 1);
8933
8934 reg_dead_flag = 0;
8935
8936 /* Scan backwards until we find a REG_DEAD note, SET, CLOBBER, label, or
8937 beginning of function. */
8938 for (; insn && GET_CODE (insn) != CODE_LABEL;
8939 insn = prev_nonnote_insn (insn))
8940 {
8941 note_stores (PATTERN (insn), reg_dead_at_p_1);
8942 if (reg_dead_flag)
8943 return reg_dead_flag == 1 ? 1 : 0;
8944
8945 if (find_regno_note (insn, REG_DEAD, reg_dead_regno))
8946 return 1;
8947 }
8948
8949 /* Get the basic block number that we were in. */
8950 if (insn == 0)
8951 block = 0;
8952 else
8953 {
8954 for (block = 0; block < n_basic_blocks; block++)
8955 if (insn == basic_block_head[block])
8956 break;
8957
8958 if (block == n_basic_blocks)
8959 return 0;
8960 }
8961
8962 for (i = reg_dead_regno; i < reg_dead_endregno; i++)
8963 if (basic_block_live_at_start[block][i / REGSET_ELT_BITS]
8964 & ((REGSET_ELT_TYPE) 1 << (i % REGSET_ELT_BITS)))
8965 return 0;
8966
8967 return 1;
8968 }
8969 \f
8970 /* Remove register number REGNO from the dead registers list of INSN.
8971
8972 Return the note used to record the death, if there was one. */
8973
8974 rtx
8975 remove_death (regno, insn)
8976 int regno;
8977 rtx insn;
8978 {
8979 register rtx note = find_regno_note (insn, REG_DEAD, regno);
8980
8981 if (note)
8982 {
8983 reg_n_deaths[regno]--;
8984 remove_note (insn, note);
8985 }
8986
8987 return note;
8988 }
8989
8990 /* For each register (hardware or pseudo) used within expression X, if its
8991 death is in an instruction with cuid between FROM_CUID (inclusive) and
8992 TO_INSN (exclusive), put a REG_DEAD note for that register in the
8993 list headed by PNOTES.
8994
8995 This is done when X is being merged by combination into TO_INSN. These
8996 notes will then be distributed as needed. */
8997
8998 static void
8999 move_deaths (x, from_cuid, to_insn, pnotes)
9000 rtx x;
9001 int from_cuid;
9002 rtx to_insn;
9003 rtx *pnotes;
9004 {
9005 register char *fmt;
9006 register int len, i;
9007 register enum rtx_code code = GET_CODE (x);
9008
9009 if (code == REG)
9010 {
9011 register int regno = REGNO (x);
9012 register rtx where_dead = reg_last_death[regno];
9013
9014 if (where_dead && INSN_CUID (where_dead) >= from_cuid
9015 && INSN_CUID (where_dead) < INSN_CUID (to_insn))
9016 {
9017 rtx note = remove_death (regno, reg_last_death[regno]);
9018
9019 /* It is possible for the call above to return 0. This can occur
9020 when reg_last_death points to I2 or I1 that we combined with.
9021 In that case make a new note. */
9022
9023 if (note)
9024 {
9025 XEXP (note, 1) = *pnotes;
9026 *pnotes = note;
9027 }
9028 else
9029 *pnotes = gen_rtx (EXPR_LIST, REG_DEAD, x, *pnotes);
9030
9031 reg_n_deaths[regno]++;
9032 }
9033
9034 return;
9035 }
9036
9037 else if (GET_CODE (x) == SET)
9038 {
9039 rtx dest = SET_DEST (x);
9040
9041 move_deaths (SET_SRC (x), from_cuid, to_insn, pnotes);
9042
9043 /* In the case of a ZERO_EXTRACT, a STRICT_LOW_PART, or a SUBREG
9044 that accesses one word of a multi-word item, some
9045 piece of everything register in the expression is used by
9046 this insn, so remove any old death. */
9047
9048 if (GET_CODE (dest) == ZERO_EXTRACT
9049 || GET_CODE (dest) == STRICT_LOW_PART
9050 || (GET_CODE (dest) == SUBREG
9051 && (((GET_MODE_SIZE (GET_MODE (dest))
9052 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
9053 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
9054 + UNITS_PER_WORD - 1) / UNITS_PER_WORD))))
9055 {
9056 move_deaths (dest, from_cuid, to_insn, pnotes);
9057 return;
9058 }
9059
9060 /* If this is some other SUBREG, we know it replaces the entire
9061 value, so use that as the destination. */
9062 if (GET_CODE (dest) == SUBREG)
9063 dest = SUBREG_REG (dest);
9064
9065 /* If this is a MEM, adjust deaths of anything used in the address.
9066 For a REG (the only other possibility), the entire value is
9067 being replaced so the old value is not used in this insn. */
9068
9069 if (GET_CODE (dest) == MEM)
9070 move_deaths (XEXP (dest, 0), from_cuid, to_insn, pnotes);
9071 return;
9072 }
9073
9074 else if (GET_CODE (x) == CLOBBER)
9075 return;
9076
9077 len = GET_RTX_LENGTH (code);
9078 fmt = GET_RTX_FORMAT (code);
9079
9080 for (i = 0; i < len; i++)
9081 {
9082 if (fmt[i] == 'E')
9083 {
9084 register int j;
9085 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
9086 move_deaths (XVECEXP (x, i, j), from_cuid, to_insn, pnotes);
9087 }
9088 else if (fmt[i] == 'e')
9089 move_deaths (XEXP (x, i), from_cuid, to_insn, pnotes);
9090 }
9091 }
9092 \f
9093 /* Return 1 if X is the target of a bit-field assignment in BODY, the
9094 pattern of an insn. X must be a REG. */
9095
9096 static int
9097 reg_bitfield_target_p (x, body)
9098 rtx x;
9099 rtx body;
9100 {
9101 int i;
9102
9103 if (GET_CODE (body) == SET)
9104 {
9105 rtx dest = SET_DEST (body);
9106 rtx target;
9107 int regno, tregno, endregno, endtregno;
9108
9109 if (GET_CODE (dest) == ZERO_EXTRACT)
9110 target = XEXP (dest, 0);
9111 else if (GET_CODE (dest) == STRICT_LOW_PART)
9112 target = SUBREG_REG (XEXP (dest, 0));
9113 else
9114 return 0;
9115
9116 if (GET_CODE (target) == SUBREG)
9117 target = SUBREG_REG (target);
9118
9119 if (GET_CODE (target) != REG)
9120 return 0;
9121
9122 tregno = REGNO (target), regno = REGNO (x);
9123 if (tregno >= FIRST_PSEUDO_REGISTER || regno >= FIRST_PSEUDO_REGISTER)
9124 return target == x;
9125
9126 endtregno = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (target));
9127 endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
9128
9129 return endregno > tregno && regno < endtregno;
9130 }
9131
9132 else if (GET_CODE (body) == PARALLEL)
9133 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
9134 if (reg_bitfield_target_p (x, XVECEXP (body, 0, i)))
9135 return 1;
9136
9137 return 0;
9138 }
9139 \f
9140 /* Given a chain of REG_NOTES originally from FROM_INSN, try to place them
9141 as appropriate. I3 and I2 are the insns resulting from the combination
9142 insns including FROM (I2 may be zero).
9143
9144 ELIM_I2 and ELIM_I1 are either zero or registers that we know will
9145 not need REG_DEAD notes because they are being substituted for. This
9146 saves searching in the most common cases.
9147
9148 Each note in the list is either ignored or placed on some insns, depending
9149 on the type of note. */
9150
9151 static void
9152 distribute_notes (notes, from_insn, i3, i2, elim_i2, elim_i1)
9153 rtx notes;
9154 rtx from_insn;
9155 rtx i3, i2;
9156 rtx elim_i2, elim_i1;
9157 {
9158 rtx note, next_note;
9159 rtx tem;
9160
9161 for (note = notes; note; note = next_note)
9162 {
9163 rtx place = 0, place2 = 0;
9164
9165 /* If this NOTE references a pseudo register, ensure it references
9166 the latest copy of that register. */
9167 if (XEXP (note, 0) && GET_CODE (XEXP (note, 0)) == REG
9168 && REGNO (XEXP (note, 0)) >= FIRST_PSEUDO_REGISTER)
9169 XEXP (note, 0) = regno_reg_rtx[REGNO (XEXP (note, 0))];
9170
9171 next_note = XEXP (note, 1);
9172 switch (REG_NOTE_KIND (note))
9173 {
9174 case REG_UNUSED:
9175 /* If this register is set or clobbered in I3, put the note there
9176 unless there is one already. */
9177 if (reg_set_p (XEXP (note, 0), PATTERN (i3)))
9178 {
9179 if (! (GET_CODE (XEXP (note, 0)) == REG
9180 ? find_regno_note (i3, REG_UNUSED, REGNO (XEXP (note, 0)))
9181 : find_reg_note (i3, REG_UNUSED, XEXP (note, 0))))
9182 place = i3;
9183 }
9184 /* Otherwise, if this register is used by I3, then this register
9185 now dies here, so we must put a REG_DEAD note here unless there
9186 is one already. */
9187 else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3))
9188 && ! (GET_CODE (XEXP (note, 0)) == REG
9189 ? find_regno_note (i3, REG_DEAD, REGNO (XEXP (note, 0)))
9190 : find_reg_note (i3, REG_DEAD, XEXP (note, 0))))
9191 {
9192 PUT_REG_NOTE_KIND (note, REG_DEAD);
9193 place = i3;
9194 }
9195 break;
9196
9197 case REG_EQUAL:
9198 case REG_EQUIV:
9199 case REG_NONNEG:
9200 /* These notes say something about results of an insn. We can
9201 only support them if they used to be on I3 in which case they
9202 remain on I3. Otherwise they are ignored.
9203
9204 If the note refers to an expression that is not a constant, we
9205 must also ignore the note since we cannot tell whether the
9206 equivalence is still true. It might be possible to do
9207 slightly better than this (we only have a problem if I2DEST
9208 or I1DEST is present in the expression), but it doesn't
9209 seem worth the trouble. */
9210
9211 if (from_insn == i3
9212 && (XEXP (note, 0) == 0 || CONSTANT_P (XEXP (note, 0))))
9213 place = i3;
9214 break;
9215
9216 case REG_INC:
9217 case REG_NO_CONFLICT:
9218 case REG_LABEL:
9219 /* These notes say something about how a register is used. They must
9220 be present on any use of the register in I2 or I3. */
9221 if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3)))
9222 place = i3;
9223
9224 if (i2 && reg_mentioned_p (XEXP (note, 0), PATTERN (i2)))
9225 {
9226 if (place)
9227 place2 = i2;
9228 else
9229 place = i2;
9230 }
9231 break;
9232
9233 case REG_WAS_0:
9234 /* It is too much trouble to try to see if this note is still
9235 correct in all situations. It is better to simply delete it. */
9236 break;
9237
9238 case REG_RETVAL:
9239 /* If the insn previously containing this note still exists,
9240 put it back where it was. Otherwise move it to the previous
9241 insn. Adjust the corresponding REG_LIBCALL note. */
9242 if (GET_CODE (from_insn) != NOTE)
9243 place = from_insn;
9244 else
9245 {
9246 tem = find_reg_note (XEXP (note, 0), REG_LIBCALL, NULL_RTX);
9247 place = prev_real_insn (from_insn);
9248 if (tem && place)
9249 XEXP (tem, 0) = place;
9250 }
9251 break;
9252
9253 case REG_LIBCALL:
9254 /* This is handled similarly to REG_RETVAL. */
9255 if (GET_CODE (from_insn) != NOTE)
9256 place = from_insn;
9257 else
9258 {
9259 tem = find_reg_note (XEXP (note, 0), REG_RETVAL, NULL_RTX);
9260 place = next_real_insn (from_insn);
9261 if (tem && place)
9262 XEXP (tem, 0) = place;
9263 }
9264 break;
9265
9266 case REG_DEAD:
9267 /* If the register is used as an input in I3, it dies there.
9268 Similarly for I2, if it is non-zero and adjacent to I3.
9269
9270 If the register is not used as an input in either I3 or I2
9271 and it is not one of the registers we were supposed to eliminate,
9272 there are two possibilities. We might have a non-adjacent I2
9273 or we might have somehow eliminated an additional register
9274 from a computation. For example, we might have had A & B where
9275 we discover that B will always be zero. In this case we will
9276 eliminate the reference to A.
9277
9278 In both cases, we must search to see if we can find a previous
9279 use of A and put the death note there. */
9280
9281 if (reg_referenced_p (XEXP (note, 0), PATTERN (i3)))
9282 place = i3;
9283 else if (i2 != 0 && next_nonnote_insn (i2) == i3
9284 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
9285 place = i2;
9286
9287 if (XEXP (note, 0) == elim_i2 || XEXP (note, 0) == elim_i1)
9288 break;
9289
9290 /* If the register is used in both I2 and I3 and it dies in I3,
9291 we might have added another reference to it. If reg_n_refs
9292 was 2, bump it to 3. This has to be correct since the
9293 register must have been set somewhere. The reason this is
9294 done is because local-alloc.c treats 2 references as a
9295 special case. */
9296
9297 if (place == i3 && i2 != 0 && GET_CODE (XEXP (note, 0)) == REG
9298 && reg_n_refs[REGNO (XEXP (note, 0))]== 2
9299 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
9300 reg_n_refs[REGNO (XEXP (note, 0))] = 3;
9301
9302 if (place == 0)
9303 for (tem = prev_nonnote_insn (i3);
9304 tem && (GET_CODE (tem) == INSN
9305 || GET_CODE (tem) == CALL_INSN);
9306 tem = prev_nonnote_insn (tem))
9307 {
9308 /* If the register is being set at TEM, see if that is all
9309 TEM is doing. If so, delete TEM. Otherwise, make this
9310 into a REG_UNUSED note instead. */
9311 if (reg_set_p (XEXP (note, 0), PATTERN (tem)))
9312 {
9313 rtx set = single_set (tem);
9314
9315 /* Verify that it was the set, and not a clobber that
9316 modified the register. */
9317
9318 if (set != 0 && ! side_effects_p (SET_SRC (set))
9319 && rtx_equal_p (XEXP (note, 0), SET_DEST (set)))
9320 {
9321 /* Move the notes and links of TEM elsewhere.
9322 This might delete other dead insns recursively.
9323 First set the pattern to something that won't use
9324 any register. */
9325
9326 PATTERN (tem) = pc_rtx;
9327
9328 distribute_notes (REG_NOTES (tem), tem, tem,
9329 NULL_RTX, NULL_RTX, NULL_RTX);
9330 distribute_links (LOG_LINKS (tem));
9331
9332 PUT_CODE (tem, NOTE);
9333 NOTE_LINE_NUMBER (tem) = NOTE_INSN_DELETED;
9334 NOTE_SOURCE_FILE (tem) = 0;
9335 }
9336 else
9337 {
9338 PUT_REG_NOTE_KIND (note, REG_UNUSED);
9339
9340 /* If there isn't already a REG_UNUSED note, put one
9341 here. */
9342 if (! find_regno_note (tem, REG_UNUSED,
9343 REGNO (XEXP (note, 0))))
9344 place = tem;
9345 break;
9346 }
9347 }
9348 else if (reg_referenced_p (XEXP (note, 0), PATTERN (tem)))
9349 {
9350 place = tem;
9351 break;
9352 }
9353 }
9354
9355 /* If the register is set or already dead at PLACE, we needn't do
9356 anything with this note if it is still a REG_DEAD note.
9357
9358 Note that we cannot use just `dead_or_set_p' here since we can
9359 convert an assignment to a register into a bit-field assignment.
9360 Therefore, we must also omit the note if the register is the
9361 target of a bitfield assignment. */
9362
9363 if (place && REG_NOTE_KIND (note) == REG_DEAD)
9364 {
9365 int regno = REGNO (XEXP (note, 0));
9366
9367 if (dead_or_set_p (place, XEXP (note, 0))
9368 || reg_bitfield_target_p (XEXP (note, 0), PATTERN (place)))
9369 {
9370 /* Unless the register previously died in PLACE, clear
9371 reg_last_death. [I no longer understand why this is
9372 being done.] */
9373 if (reg_last_death[regno] != place)
9374 reg_last_death[regno] = 0;
9375 place = 0;
9376 }
9377 else
9378 reg_last_death[regno] = place;
9379
9380 /* If this is a death note for a hard reg that is occupying
9381 multiple registers, ensure that we are still using all
9382 parts of the object. If we find a piece of the object
9383 that is unused, we must add a USE for that piece before
9384 PLACE and put the appropriate REG_DEAD note on it.
9385
9386 An alternative would be to put a REG_UNUSED for the pieces
9387 on the insn that set the register, but that can't be done if
9388 it is not in the same block. It is simpler, though less
9389 efficient, to add the USE insns. */
9390
9391 if (place && regno < FIRST_PSEUDO_REGISTER
9392 && HARD_REGNO_NREGS (regno, GET_MODE (XEXP (note, 0))) > 1)
9393 {
9394 int endregno
9395 = regno + HARD_REGNO_NREGS (regno,
9396 GET_MODE (XEXP (note, 0)));
9397 int all_used = 1;
9398 int i;
9399
9400 for (i = regno; i < endregno; i++)
9401 if (! refers_to_regno_p (i, i + 1, PATTERN (place), 0))
9402 {
9403 rtx piece = gen_rtx (REG, word_mode, i);
9404 rtx p;
9405
9406 /* See if we already placed a USE note for this
9407 register in front of PLACE. */
9408 for (p = place;
9409 GET_CODE (PREV_INSN (p)) == INSN
9410 && GET_CODE (PATTERN (PREV_INSN (p))) == USE;
9411 p = PREV_INSN (p))
9412 if (rtx_equal_p (piece,
9413 XEXP (PATTERN (PREV_INSN (p)), 0)))
9414 {
9415 p = 0;
9416 break;
9417 }
9418
9419 if (p)
9420 {
9421 rtx use_insn
9422 = emit_insn_before (gen_rtx (USE, VOIDmode,
9423 piece),
9424 p);
9425 REG_NOTES (use_insn)
9426 = gen_rtx (EXPR_LIST, REG_DEAD, piece,
9427 REG_NOTES (use_insn));
9428 }
9429
9430 all_used = 0;
9431 }
9432
9433 if (! all_used)
9434 {
9435 /* Put only REG_DEAD notes for pieces that are
9436 still used and that are not already dead or set. */
9437
9438 for (i = regno; i < endregno; i++)
9439 {
9440 rtx piece = gen_rtx (REG, word_mode, i);
9441
9442 if (reg_referenced_p (piece, PATTERN (place))
9443 && ! dead_or_set_p (place, piece)
9444 && ! reg_bitfield_target_p (piece,
9445 PATTERN (place)))
9446 REG_NOTES (place) = gen_rtx (EXPR_LIST, REG_DEAD,
9447 piece,
9448 REG_NOTES (place));
9449 }
9450
9451 place = 0;
9452 }
9453 }
9454 }
9455 break;
9456
9457 default:
9458 /* Any other notes should not be present at this point in the
9459 compilation. */
9460 abort ();
9461 }
9462
9463 if (place)
9464 {
9465 XEXP (note, 1) = REG_NOTES (place);
9466 REG_NOTES (place) = note;
9467 }
9468 else if ((REG_NOTE_KIND (note) == REG_DEAD
9469 || REG_NOTE_KIND (note) == REG_UNUSED)
9470 && GET_CODE (XEXP (note, 0)) == REG)
9471 reg_n_deaths[REGNO (XEXP (note, 0))]--;
9472
9473 if (place2)
9474 {
9475 if ((REG_NOTE_KIND (note) == REG_DEAD
9476 || REG_NOTE_KIND (note) == REG_UNUSED)
9477 && GET_CODE (XEXP (note, 0)) == REG)
9478 reg_n_deaths[REGNO (XEXP (note, 0))]++;
9479
9480 REG_NOTES (place2) = gen_rtx (GET_CODE (note), REG_NOTE_KIND (note),
9481 XEXP (note, 0), REG_NOTES (place2));
9482 }
9483 }
9484 }
9485 \f
9486 /* Similarly to above, distribute the LOG_LINKS that used to be present on
9487 I3, I2, and I1 to new locations. This is also called in one case to
9488 add a link pointing at I3 when I3's destination is changed. */
9489
9490 static void
9491 distribute_links (links)
9492 rtx links;
9493 {
9494 rtx link, next_link;
9495
9496 for (link = links; link; link = next_link)
9497 {
9498 rtx place = 0;
9499 rtx insn;
9500 rtx set, reg;
9501
9502 next_link = XEXP (link, 1);
9503
9504 /* If the insn that this link points to is a NOTE or isn't a single
9505 set, ignore it. In the latter case, it isn't clear what we
9506 can do other than ignore the link, since we can't tell which
9507 register it was for. Such links wouldn't be used by combine
9508 anyway.
9509
9510 It is not possible for the destination of the target of the link to
9511 have been changed by combine. The only potential of this is if we
9512 replace I3, I2, and I1 by I3 and I2. But in that case the
9513 destination of I2 also remains unchanged. */
9514
9515 if (GET_CODE (XEXP (link, 0)) == NOTE
9516 || (set = single_set (XEXP (link, 0))) == 0)
9517 continue;
9518
9519 reg = SET_DEST (set);
9520 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
9521 || GET_CODE (reg) == SIGN_EXTRACT
9522 || GET_CODE (reg) == STRICT_LOW_PART)
9523 reg = XEXP (reg, 0);
9524
9525 /* A LOG_LINK is defined as being placed on the first insn that uses
9526 a register and points to the insn that sets the register. Start
9527 searching at the next insn after the target of the link and stop
9528 when we reach a set of the register or the end of the basic block.
9529
9530 Note that this correctly handles the link that used to point from
9531 I3 to I2. Also note that not much searching is typically done here
9532 since most links don't point very far away. */
9533
9534 for (insn = NEXT_INSN (XEXP (link, 0));
9535 (insn && GET_CODE (insn) != CODE_LABEL
9536 && GET_CODE (PREV_INSN (insn)) != JUMP_INSN);
9537 insn = NEXT_INSN (insn))
9538 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
9539 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
9540 {
9541 if (reg_referenced_p (reg, PATTERN (insn)))
9542 place = insn;
9543 break;
9544 }
9545
9546 /* If we found a place to put the link, place it there unless there
9547 is already a link to the same insn as LINK at that point. */
9548
9549 if (place)
9550 {
9551 rtx link2;
9552
9553 for (link2 = LOG_LINKS (place); link2; link2 = XEXP (link2, 1))
9554 if (XEXP (link2, 0) == XEXP (link, 0))
9555 break;
9556
9557 if (link2 == 0)
9558 {
9559 XEXP (link, 1) = LOG_LINKS (place);
9560 LOG_LINKS (place) = link;
9561 }
9562 }
9563 }
9564 }
9565 \f
9566 void
9567 dump_combine_stats (file)
9568 FILE *file;
9569 {
9570 fprintf
9571 (file,
9572 ";; Combiner statistics: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n\n",
9573 combine_attempts, combine_merges, combine_extras, combine_successes);
9574 }
9575
9576 void
9577 dump_combine_total_stats (file)
9578 FILE *file;
9579 {
9580 fprintf
9581 (file,
9582 "\n;; Combiner totals: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n",
9583 total_attempts, total_merges, total_extras, total_successes);
9584 }
This page took 0.574899 seconds and 5 git commands to generate.