]> gcc.gnu.org Git - gcc.git/blob - gcc/combine.c
(try_combine): Fix typo in last change involving single_set.
[gcc.git] / gcc / combine.c
1 /* Optimize by combining instructions for GNU compiler.
2 Copyright (C) 1987, 1988, 1992 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21 /* This module is essentially the "combiner" phase of the U. of Arizona
22 Portable Optimizer, but redone to work on our list-structured
23 representation for RTL instead of their string representation.
24
25 The LOG_LINKS of each insn identify the most recent assignment
26 to each REG used in the insn. It is a list of previous insns,
27 each of which contains a SET for a REG that is used in this insn
28 and not used or set in between. LOG_LINKs never cross basic blocks.
29 They were set up by the preceding pass (lifetime analysis).
30
31 We try to combine each pair of insns joined by a logical link.
32 We also try to combine triples of insns A, B and C when
33 C has a link back to B and B has a link back to A.
34
35 LOG_LINKS does not have links for use of the CC0. They don't
36 need to, because the insn that sets the CC0 is always immediately
37 before the insn that tests it. So we always regard a branch
38 insn as having a logical link to the preceding insn. The same is true
39 for an insn explicitly using CC0.
40
41 We check (with use_crosses_set_p) to avoid combining in such a way
42 as to move a computation to a place where its value would be different.
43
44 Combination is done by mathematically substituting the previous
45 insn(s) values for the regs they set into the expressions in
46 the later insns that refer to these regs. If the result is a valid insn
47 for our target machine, according to the machine description,
48 we install it, delete the earlier insns, and update the data flow
49 information (LOG_LINKS and REG_NOTES) for what we did.
50
51 There are a few exceptions where the dataflow information created by
52 flow.c aren't completely updated:
53
54 - reg_live_length is not updated
55 - reg_n_refs is not adjusted in the rare case when a register is
56 no longer required in a computation
57 - there are extremely rare cases (see distribute_regnotes) when a
58 REG_DEAD note is lost
59 - a LOG_LINKS entry that refers to an insn with multiple SETs may be
60 removed because there is no way to know which register it was
61 linking
62
63 To simplify substitution, we combine only when the earlier insn(s)
64 consist of only a single assignment. To simplify updating afterward,
65 we never combine when a subroutine call appears in the middle.
66
67 Since we do not represent assignments to CC0 explicitly except when that
68 is all an insn does, there is no LOG_LINKS entry in an insn that uses
69 the condition code for the insn that set the condition code.
70 Fortunately, these two insns must be consecutive.
71 Therefore, every JUMP_INSN is taken to have an implicit logical link
72 to the preceding insn. This is not quite right, since non-jumps can
73 also use the condition code; but in practice such insns would not
74 combine anyway. */
75
76 #include "config.h"
77 #include "gvarargs.h"
78 #include "rtl.h"
79 #include "flags.h"
80 #include "regs.h"
81 #include "expr.h"
82 #include "basic-block.h"
83 #include "insn-config.h"
84 #include "insn-flags.h"
85 #include "insn-codes.h"
86 #include "insn-attr.h"
87 #include "recog.h"
88 #include "real.h"
89 #include <stdio.h>
90
91 /* It is not safe to use ordinary gen_lowpart in combine.
92 Use gen_lowpart_for_combine instead. See comments there. */
93 #define gen_lowpart dont_use_gen_lowpart_you_dummy
94
95 /* Number of attempts to combine instructions in this function. */
96
97 static int combine_attempts;
98
99 /* Number of attempts that got as far as substitution in this function. */
100
101 static int combine_merges;
102
103 /* Number of instructions combined with added SETs in this function. */
104
105 static int combine_extras;
106
107 /* Number of instructions combined in this function. */
108
109 static int combine_successes;
110
111 /* Totals over entire compilation. */
112
113 static int total_attempts, total_merges, total_extras, total_successes;
114 \f
115 /* Vector mapping INSN_UIDs to cuids.
116 The cuids are like uids but increase monotonically always.
117 Combine always uses cuids so that it can compare them.
118 But actually renumbering the uids, which we used to do,
119 proves to be a bad idea because it makes it hard to compare
120 the dumps produced by earlier passes with those from later passes. */
121
122 static int *uid_cuid;
123
124 /* Get the cuid of an insn. */
125
126 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
127
128 /* Maximum register number, which is the size of the tables below. */
129
130 static int combine_max_regno;
131
132 /* Record last point of death of (hard or pseudo) register n. */
133
134 static rtx *reg_last_death;
135
136 /* Record last point of modification of (hard or pseudo) register n. */
137
138 static rtx *reg_last_set;
139
140 /* Record the cuid of the last insn that invalidated memory
141 (anything that writes memory, and subroutine calls, but not pushes). */
142
143 static int mem_last_set;
144
145 /* Record the cuid of the last CALL_INSN
146 so we can tell whether a potential combination crosses any calls. */
147
148 static int last_call_cuid;
149
150 /* When `subst' is called, this is the insn that is being modified
151 (by combining in a previous insn). The PATTERN of this insn
152 is still the old pattern partially modified and it should not be
153 looked at, but this may be used to examine the successors of the insn
154 to judge whether a simplification is valid. */
155
156 static rtx subst_insn;
157
158 /* This is the lowest CUID that `subst' is currently dealing with.
159 get_last_value will not return a value if the register was set at or
160 after this CUID. If not for this mechanism, we could get confused if
161 I2 or I1 in try_combine were an insn that used the old value of a register
162 to obtain a new value. In that case, we might erroneously get the
163 new value of the register when we wanted the old one. */
164
165 static int subst_low_cuid;
166
167 /* This is the value of undobuf.num_undo when we started processing this
168 substitution. This will prevent gen_rtx_combine from re-used a piece
169 from the previous expression. Doing so can produce circular rtl
170 structures. */
171
172 static int previous_num_undos;
173 \f
174 /* The next group of arrays allows the recording of the last value assigned
175 to (hard or pseudo) register n. We use this information to see if a
176 operation being processed is redundant given a prior operation performed
177 on the register. For example, an `and' with a constant is redundant if
178 all the zero bits are already known to be turned off.
179
180 We use an approach similar to that used by cse, but change it in the
181 following ways:
182
183 (1) We do not want to reinitialize at each label.
184 (2) It is useful, but not critical, to know the actual value assigned
185 to a register. Often just its form is helpful.
186
187 Therefore, we maintain the following arrays:
188
189 reg_last_set_value the last value assigned
190 reg_last_set_label records the value of label_tick when the
191 register was assigned
192 reg_last_set_table_tick records the value of label_tick when a
193 value using the register is assigned
194 reg_last_set_invalid set to non-zero when it is not valid
195 to use the value of this register in some
196 register's value
197
198 To understand the usage of these tables, it is important to understand
199 the distinction between the value in reg_last_set_value being valid
200 and the register being validly contained in some other expression in the
201 table.
202
203 Entry I in reg_last_set_value is valid if it is non-zero, and either
204 reg_n_sets[i] is 1 or reg_last_set_label[i] == label_tick.
205
206 Register I may validly appear in any expression returned for the value
207 of another register if reg_n_sets[i] is 1. It may also appear in the
208 value for register J if reg_last_set_label[i] < reg_last_set_label[j] or
209 reg_last_set_invalid[j] is zero.
210
211 If an expression is found in the table containing a register which may
212 not validly appear in an expression, the register is replaced by
213 something that won't match, (clobber (const_int 0)).
214
215 reg_last_set_invalid[i] is set non-zero when register I is being assigned
216 to and reg_last_set_table_tick[i] == label_tick. */
217
218 /* Record last value assigned to (hard or pseudo) register n. */
219
220 static rtx *reg_last_set_value;
221
222 /* Record the value of label_tick when the value for register n is placed in
223 reg_last_set_value[n]. */
224
225 static short *reg_last_set_label;
226
227 /* Record the value of label_tick when an expression involving register n
228 is placed in reg_last_set_value. */
229
230 static short *reg_last_set_table_tick;
231
232 /* Set non-zero if references to register n in expressions should not be
233 used. */
234
235 static char *reg_last_set_invalid;
236
237 /* Incremented for each label. */
238
239 static short label_tick;
240
241 /* Some registers that are set more than once and used in more than one
242 basic block are nevertheless always set in similar ways. For example,
243 a QImode register may be loaded from memory in two places on a machine
244 where byte loads zero extend.
245
246 We record in the following array what we know about the significant
247 bits of a register, specifically which bits are known to be zero.
248
249 If an entry is zero, it means that we don't know anything special. */
250
251 static HOST_WIDE_INT *reg_significant;
252
253 /* Mode used to compute significance in reg_significant. It is the largest
254 integer mode that can fit in HOST_BITS_PER_WIDE_INT. */
255
256 static enum machine_mode significant_mode;
257
258 /* Nonzero if we know that a register has some leading bits that are always
259 equal to the sign bit. */
260
261 static char *reg_sign_bit_copies;
262
263 /* Nonzero when reg_significant and reg_sign_bit_copies can be safely used.
264 It is zero while computing them and after combine has completed. This
265 former test prevents propagating values based on previously set values,
266 which can be incorrect if a variable is modified in a loop. */
267
268 static int significant_valid;
269 \f
270 /* Record one modification to rtl structure
271 to be undone by storing old_contents into *where.
272 is_int is 1 if the contents are an int. */
273
274 struct undo
275 {
276 int is_int;
277 union {rtx rtx; int i;} old_contents;
278 union {rtx *rtx; int *i;} where;
279 };
280
281 /* Record a bunch of changes to be undone, up to MAX_UNDO of them.
282 num_undo says how many are currently recorded.
283
284 storage is nonzero if we must undo the allocation of new storage.
285 The value of storage is what to pass to obfree.
286
287 other_insn is nonzero if we have modified some other insn in the process
288 of working on subst_insn. It must be verified too. */
289
290 #define MAX_UNDO 50
291
292 struct undobuf
293 {
294 int num_undo;
295 char *storage;
296 struct undo undo[MAX_UNDO];
297 rtx other_insn;
298 };
299
300 static struct undobuf undobuf;
301
302 /* Substitute NEWVAL, an rtx expression, into INTO, a place in some
303 insn. The substitution can be undone by undo_all. If INTO is already
304 set to NEWVAL, do not record this change. Because computing NEWVAL might
305 also call SUBST, we have to compute it before we put anything into
306 the undo table. */
307
308 #define SUBST(INTO, NEWVAL) \
309 do { rtx _new = (NEWVAL); \
310 if (undobuf.num_undo < MAX_UNDO) \
311 { \
312 undobuf.undo[undobuf.num_undo].is_int = 0; \
313 undobuf.undo[undobuf.num_undo].where.rtx = &INTO; \
314 undobuf.undo[undobuf.num_undo].old_contents.rtx = INTO; \
315 INTO = _new; \
316 if (undobuf.undo[undobuf.num_undo].old_contents.rtx != INTO) \
317 undobuf.num_undo++; \
318 } \
319 } while (0)
320
321 /* Similar to SUBST, but NEWVAL is an int. INTO will normally be an XINT
322 expression.
323 Note that substitution for the value of a CONST_INT is not safe. */
324
325 #define SUBST_INT(INTO, NEWVAL) \
326 do { if (undobuf.num_undo < MAX_UNDO) \
327 { \
328 undobuf.undo[undobuf.num_undo].is_int = 1; \
329 undobuf.undo[undobuf.num_undo].where.i = (int *) &INTO; \
330 undobuf.undo[undobuf.num_undo].old_contents.i = INTO; \
331 INTO = NEWVAL; \
332 if (undobuf.undo[undobuf.num_undo].old_contents.i != INTO) \
333 undobuf.num_undo++; \
334 } \
335 } while (0)
336
337 /* Number of times the pseudo being substituted for
338 was found and replaced. */
339
340 static int n_occurrences;
341
342 static void set_significant ();
343 static void move_deaths ();
344 rtx remove_death ();
345 static void record_value_for_reg ();
346 static void record_dead_and_set_regs ();
347 static int use_crosses_set_p ();
348 static rtx try_combine ();
349 static rtx *find_split_point ();
350 static rtx subst ();
351 static void undo_all ();
352 static int reg_dead_at_p ();
353 static rtx expand_compound_operation ();
354 static rtx expand_field_assignment ();
355 static rtx make_extraction ();
356 static int get_pos_from_mask ();
357 static rtx force_to_mode ();
358 static rtx known_cond ();
359 static rtx make_field_assignment ();
360 static rtx make_compound_operation ();
361 static rtx apply_distributive_law ();
362 static rtx simplify_and_const_int ();
363 static unsigned HOST_WIDE_INT significant_bits ();
364 static int num_sign_bit_copies ();
365 static int merge_outer_ops ();
366 static rtx simplify_shift_const ();
367 static int recog_for_combine ();
368 static rtx gen_lowpart_for_combine ();
369 static rtx gen_rtx_combine ();
370 static rtx gen_binary ();
371 static rtx gen_unary ();
372 static enum rtx_code simplify_comparison ();
373 static int reversible_comparison_p ();
374 static int get_last_value_validate ();
375 static rtx get_last_value ();
376 static void distribute_notes ();
377 static void distribute_links ();
378 \f
379 /* Main entry point for combiner. F is the first insn of the function.
380 NREGS is the first unused pseudo-reg number. */
381
382 void
383 combine_instructions (f, nregs)
384 rtx f;
385 int nregs;
386 {
387 register rtx insn, next, prev;
388 register int i;
389 register rtx links, nextlinks;
390
391 combine_attempts = 0;
392 combine_merges = 0;
393 combine_extras = 0;
394 combine_successes = 0;
395
396 combine_max_regno = nregs;
397
398 reg_last_death = (rtx *) alloca (nregs * sizeof (rtx));
399 reg_last_set = (rtx *) alloca (nregs * sizeof (rtx));
400 reg_last_set_value = (rtx *) alloca (nregs * sizeof (rtx));
401 reg_last_set_table_tick = (short *) alloca (nregs * sizeof (short));
402 reg_last_set_label = (short *) alloca (nregs * sizeof (short));
403 reg_last_set_invalid = (char *) alloca (nregs * sizeof (char));
404 reg_significant = (HOST_WIDE_INT *) alloca (nregs * sizeof (HOST_WIDE_INT));
405 reg_sign_bit_copies = (char *) alloca (nregs * sizeof (char));
406
407 bzero (reg_last_death, nregs * sizeof (rtx));
408 bzero (reg_last_set, nregs * sizeof (rtx));
409 bzero (reg_last_set_value, nregs * sizeof (rtx));
410 bzero (reg_last_set_table_tick, nregs * sizeof (short));
411 bzero (reg_last_set_invalid, nregs * sizeof (char));
412 bzero (reg_significant, nregs * sizeof (HOST_WIDE_INT));
413 bzero (reg_sign_bit_copies, nregs * sizeof (char));
414
415 init_recog_no_volatile ();
416
417 /* Compute maximum uid value so uid_cuid can be allocated. */
418
419 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
420 if (INSN_UID (insn) > i)
421 i = INSN_UID (insn);
422
423 uid_cuid = (int *) alloca ((i + 1) * sizeof (int));
424
425 significant_mode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
426
427 /* Don't use reg_significant when computing it. This can cause problems
428 when, for example, we have j <<= 1 in a loop. */
429
430 significant_valid = 0;
431
432 /* Compute the mapping from uids to cuids.
433 Cuids are numbers assigned to insns, like uids,
434 except that cuids increase monotonically through the code.
435
436 Scan all SETs and see if we can deduce anything about what
437 bits are significant for some registers. */
438
439 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
440 {
441 INSN_CUID (insn) = ++i;
442 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
443 note_stores (PATTERN (insn), set_significant);
444 }
445
446 significant_valid = 1;
447
448 /* Now scan all the insns in forward order. */
449
450 label_tick = 1;
451 last_call_cuid = 0;
452 mem_last_set = 0;
453
454 for (insn = f; insn; insn = next ? next : NEXT_INSN (insn))
455 {
456 next = 0;
457
458 if (GET_CODE (insn) == CODE_LABEL)
459 label_tick++;
460
461 else if (GET_CODE (insn) == INSN
462 || GET_CODE (insn) == CALL_INSN
463 || GET_CODE (insn) == JUMP_INSN)
464 {
465 /* Try this insn with each insn it links back to. */
466
467 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
468 if ((next = try_combine (insn, XEXP (links, 0), NULL_RTX)) != 0)
469 goto retry;
470
471 /* Try each sequence of three linked insns ending with this one. */
472
473 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
474 for (nextlinks = LOG_LINKS (XEXP (links, 0)); nextlinks;
475 nextlinks = XEXP (nextlinks, 1))
476 if ((next = try_combine (insn, XEXP (links, 0),
477 XEXP (nextlinks, 0))) != 0)
478 goto retry;
479
480 #ifdef HAVE_cc0
481 /* Try to combine a jump insn that uses CC0
482 with a preceding insn that sets CC0, and maybe with its
483 logical predecessor as well.
484 This is how we make decrement-and-branch insns.
485 We need this special code because data flow connections
486 via CC0 do not get entered in LOG_LINKS. */
487
488 if (GET_CODE (insn) == JUMP_INSN
489 && (prev = prev_nonnote_insn (insn)) != 0
490 && GET_CODE (prev) == INSN
491 && sets_cc0_p (PATTERN (prev)))
492 {
493 if ((next = try_combine (insn, prev, NULL_RTX)) != 0)
494 goto retry;
495
496 for (nextlinks = LOG_LINKS (prev); nextlinks;
497 nextlinks = XEXP (nextlinks, 1))
498 if ((next = try_combine (insn, prev,
499 XEXP (nextlinks, 0))) != 0)
500 goto retry;
501 }
502
503 /* Do the same for an insn that explicitly references CC0. */
504 if (GET_CODE (insn) == INSN
505 && (prev = prev_nonnote_insn (insn)) != 0
506 && GET_CODE (prev) == INSN
507 && sets_cc0_p (PATTERN (prev))
508 && GET_CODE (PATTERN (insn)) == SET
509 && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (insn))))
510 {
511 if ((next = try_combine (insn, prev, NULL_RTX)) != 0)
512 goto retry;
513
514 for (nextlinks = LOG_LINKS (prev); nextlinks;
515 nextlinks = XEXP (nextlinks, 1))
516 if ((next = try_combine (insn, prev,
517 XEXP (nextlinks, 0))) != 0)
518 goto retry;
519 }
520
521 /* Finally, see if any of the insns that this insn links to
522 explicitly references CC0. If so, try this insn, that insn,
523 and its predecessor if it sets CC0. */
524 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
525 if (GET_CODE (XEXP (links, 0)) == INSN
526 && GET_CODE (PATTERN (XEXP (links, 0))) == SET
527 && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (XEXP (links, 0))))
528 && (prev = prev_nonnote_insn (XEXP (links, 0))) != 0
529 && GET_CODE (prev) == INSN
530 && sets_cc0_p (PATTERN (prev))
531 && (next = try_combine (insn, XEXP (links, 0), prev)) != 0)
532 goto retry;
533 #endif
534
535 /* Try combining an insn with two different insns whose results it
536 uses. */
537 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
538 for (nextlinks = XEXP (links, 1); nextlinks;
539 nextlinks = XEXP (nextlinks, 1))
540 if ((next = try_combine (insn, XEXP (links, 0),
541 XEXP (nextlinks, 0))) != 0)
542 goto retry;
543
544 if (GET_CODE (insn) != NOTE)
545 record_dead_and_set_regs (insn);
546
547 retry:
548 ;
549 }
550 }
551
552 total_attempts += combine_attempts;
553 total_merges += combine_merges;
554 total_extras += combine_extras;
555 total_successes += combine_successes;
556
557 significant_valid = 0;
558 }
559 \f
560 /* Called via note_stores. If X is a pseudo that is used in more than
561 one basic block, is narrower that HOST_BITS_PER_WIDE_INT, and is being
562 set, record what bits are significant. If we are clobbering X,
563 ignore this "set" because the clobbered value won't be used.
564
565 If we are setting only a portion of X and we can't figure out what
566 portion, assume all bits will be used since we don't know what will
567 be happening.
568
569 Similarly, set how many bits of X are known to be copies of the sign bit
570 at all locations in the function. This is the smallest number implied
571 by any set of X. */
572
573 static void
574 set_significant (x, set)
575 rtx x;
576 rtx set;
577 {
578 int num;
579
580 if (GET_CODE (x) == REG
581 && REGNO (x) >= FIRST_PSEUDO_REGISTER
582 && reg_n_sets[REGNO (x)] > 1
583 && reg_basic_block[REGNO (x)] < 0
584 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
585 {
586 if (GET_CODE (set) == CLOBBER)
587 return;
588
589 /* If this is a complex assignment, see if we can convert it into a
590 simple assignment. */
591 set = expand_field_assignment (set);
592 if (SET_DEST (set) == x)
593 {
594 reg_significant[REGNO (x)]
595 |= significant_bits (SET_SRC (set), significant_mode);
596 num = num_sign_bit_copies (SET_SRC (set), GET_MODE (x));
597 if (reg_sign_bit_copies[REGNO (x)] == 0
598 || reg_sign_bit_copies[REGNO (x)] > num)
599 reg_sign_bit_copies[REGNO (x)] = num;
600 }
601 else
602 {
603 reg_significant[REGNO (x)] = GET_MODE_MASK (GET_MODE (x));
604 reg_sign_bit_copies[REGNO (x)] = 0;
605 }
606 }
607 }
608 \f
609 /* See if INSN can be combined into I3. PRED and SUCC are optionally
610 insns that were previously combined into I3 or that will be combined
611 into the merger of INSN and I3.
612
613 Return 0 if the combination is not allowed for any reason.
614
615 If the combination is allowed, *PDEST will be set to the single
616 destination of INSN and *PSRC to the single source, and this function
617 will return 1. */
618
619 static int
620 can_combine_p (insn, i3, pred, succ, pdest, psrc)
621 rtx insn;
622 rtx i3;
623 rtx pred, succ;
624 rtx *pdest, *psrc;
625 {
626 int i;
627 rtx set = 0, src, dest;
628 rtx p, link;
629 int all_adjacent = (succ ? (next_active_insn (insn) == succ
630 && next_active_insn (succ) == i3)
631 : next_active_insn (insn) == i3);
632
633 /* Can combine only if previous insn is a SET of a REG, a SUBREG or CC0.
634 or a PARALLEL consisting of such a SET and CLOBBERs.
635
636 If INSN has CLOBBER parallel parts, ignore them for our processing.
637 By definition, these happen during the execution of the insn. When it
638 is merged with another insn, all bets are off. If they are, in fact,
639 needed and aren't also supplied in I3, they may be added by
640 recog_for_combine. Otherwise, it won't match.
641
642 We can also ignore a SET whose SET_DEST is mentioned in a REG_UNUSED
643 note.
644
645 Get the source and destination of INSN. If more than one, can't
646 combine. */
647
648 if (GET_CODE (PATTERN (insn)) == SET)
649 set = PATTERN (insn);
650 else if (GET_CODE (PATTERN (insn)) == PARALLEL
651 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
652 {
653 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
654 {
655 rtx elt = XVECEXP (PATTERN (insn), 0, i);
656
657 switch (GET_CODE (elt))
658 {
659 /* We can ignore CLOBBERs. */
660 case CLOBBER:
661 break;
662
663 case SET:
664 /* Ignore SETs whose result isn't used but not those that
665 have side-effects. */
666 if (find_reg_note (insn, REG_UNUSED, SET_DEST (elt))
667 && ! side_effects_p (elt))
668 break;
669
670 /* If we have already found a SET, this is a second one and
671 so we cannot combine with this insn. */
672 if (set)
673 return 0;
674
675 set = elt;
676 break;
677
678 default:
679 /* Anything else means we can't combine. */
680 return 0;
681 }
682 }
683
684 if (set == 0
685 /* If SET_SRC is an ASM_OPERANDS we can't throw away these CLOBBERs,
686 so don't do anything with it. */
687 || GET_CODE (SET_SRC (set)) == ASM_OPERANDS)
688 return 0;
689 }
690 else
691 return 0;
692
693 if (set == 0)
694 return 0;
695
696 set = expand_field_assignment (set);
697 src = SET_SRC (set), dest = SET_DEST (set);
698
699 /* Don't eliminate a store in the stack pointer. */
700 if (dest == stack_pointer_rtx
701 /* Don't install a subreg involving two modes not tieable.
702 It can worsen register allocation, and can even make invalid reload
703 insns, since the reg inside may need to be copied from in the
704 outside mode, and that may be invalid if it is an fp reg copied in
705 integer mode. As a special exception, we can allow this if
706 I3 is simply copying DEST, a REG, to CC0. */
707 || (GET_CODE (src) == SUBREG
708 && ! MODES_TIEABLE_P (GET_MODE (src), GET_MODE (SUBREG_REG (src)))
709 #ifdef HAVE_cc0
710 && ! (GET_CODE (i3) == INSN && GET_CODE (PATTERN (i3)) == SET
711 && SET_DEST (PATTERN (i3)) == cc0_rtx
712 && GET_CODE (dest) == REG && dest == SET_SRC (PATTERN (i3)))
713 #endif
714 )
715 /* If we couldn't eliminate a field assignment, we can't combine. */
716 || GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == STRICT_LOW_PART
717 /* Don't combine with an insn that sets a register to itself if it has
718 a REG_EQUAL note. This may be part of a REG_NO_CONFLICT sequence. */
719 || (rtx_equal_p (src, dest) && find_reg_note (insn, REG_EQUAL, NULL_RTX))
720 /* Can't merge a function call. */
721 || GET_CODE (src) == CALL
722 /* Don't substitute into an incremented register. */
723 || FIND_REG_INC_NOTE (i3, dest)
724 || (succ && FIND_REG_INC_NOTE (succ, dest))
725 /* Don't combine the end of a libcall into anything. */
726 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
727 /* Make sure that DEST is not used after SUCC but before I3. */
728 || (succ && ! all_adjacent
729 && reg_used_between_p (dest, succ, i3))
730 /* Make sure that the value that is to be substituted for the register
731 does not use any registers whose values alter in between. However,
732 If the insns are adjacent, a use can't cross a set even though we
733 think it might (this can happen for a sequence of insns each setting
734 the same destination; reg_last_set of that register might point to
735 a NOTE). Also, don't move a volatile asm across any other insns. */
736 || (! all_adjacent
737 && (use_crosses_set_p (src, INSN_CUID (insn))
738 || (GET_CODE (src) == ASM_OPERANDS && MEM_VOLATILE_P (src))))
739 /* If there is a REG_NO_CONFLICT note for DEST in I3 or SUCC, we get
740 better register allocation by not doing the combine. */
741 || find_reg_note (i3, REG_NO_CONFLICT, dest)
742 || (succ && find_reg_note (succ, REG_NO_CONFLICT, dest))
743 /* Don't combine across a CALL_INSN, because that would possibly
744 change whether the life span of some REGs crosses calls or not,
745 and it is a pain to update that information.
746 Exception: if source is a constant, moving it later can't hurt.
747 Accept that special case, because it helps -fforce-addr a lot. */
748 || (INSN_CUID (insn) < last_call_cuid && ! CONSTANT_P (src)))
749 return 0;
750
751 /* DEST must either be a REG or CC0. */
752 if (GET_CODE (dest) == REG)
753 {
754 /* If register alignment is being enforced for multi-word items in all
755 cases except for parameters, it is possible to have a register copy
756 insn referencing a hard register that is not allowed to contain the
757 mode being copied and which would not be valid as an operand of most
758 insns. Eliminate this problem by not combining with such an insn.
759
760 Also, on some machines we don't want to extend the life of a hard
761 register. */
762
763 if (GET_CODE (src) == REG
764 && ((REGNO (dest) < FIRST_PSEUDO_REGISTER
765 && ! HARD_REGNO_MODE_OK (REGNO (dest), GET_MODE (dest)))
766 #ifdef SMALL_REGISTER_CLASSES
767 /* Don't extend the life of a hard register. */
768 || REGNO (src) < FIRST_PSEUDO_REGISTER
769 #else
770 || (REGNO (src) < FIRST_PSEUDO_REGISTER
771 && ! HARD_REGNO_MODE_OK (REGNO (src), GET_MODE (src)))
772 #endif
773 ))
774 return 0;
775 }
776 else if (GET_CODE (dest) != CC0)
777 return 0;
778
779 /* Don't substitute for a register intended as a clobberable operand. */
780 if (GET_CODE (PATTERN (i3)) == PARALLEL)
781 for (i = XVECLEN (PATTERN (i3), 0) - 1; i >= 0; i--)
782 if (GET_CODE (XVECEXP (PATTERN (i3), 0, i)) == CLOBBER
783 && rtx_equal_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0), dest))
784 return 0;
785
786 /* If INSN contains anything volatile, or is an `asm' (whether volatile
787 or not), reject, unless nothing volatile comes between it and I3,
788 with the exception of SUCC. */
789
790 if (GET_CODE (src) == ASM_OPERANDS || volatile_refs_p (src))
791 for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
792 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
793 && p != succ && volatile_refs_p (PATTERN (p)))
794 return 0;
795
796 /* If INSN or I2 contains an autoincrement or autodecrement,
797 make sure that register is not used between there and I3,
798 and not already used in I3 either.
799 Also insist that I3 not be a jump; if it were one
800 and the incremented register were spilled, we would lose. */
801
802 #ifdef AUTO_INC_DEC
803 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
804 if (REG_NOTE_KIND (link) == REG_INC
805 && (GET_CODE (i3) == JUMP_INSN
806 || reg_used_between_p (XEXP (link, 0), insn, i3)
807 || reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i3))))
808 return 0;
809 #endif
810
811 #ifdef HAVE_cc0
812 /* Don't combine an insn that follows a CC0-setting insn.
813 An insn that uses CC0 must not be separated from the one that sets it.
814 We do, however, allow I2 to follow a CC0-setting insn if that insn
815 is passed as I1; in that case it will be deleted also.
816 We also allow combining in this case if all the insns are adjacent
817 because that would leave the two CC0 insns adjacent as well.
818 It would be more logical to test whether CC0 occurs inside I1 or I2,
819 but that would be much slower, and this ought to be equivalent. */
820
821 p = prev_nonnote_insn (insn);
822 if (p && p != pred && GET_CODE (p) == INSN && sets_cc0_p (PATTERN (p))
823 && ! all_adjacent)
824 return 0;
825 #endif
826
827 /* If we get here, we have passed all the tests and the combination is
828 to be allowed. */
829
830 *pdest = dest;
831 *psrc = src;
832
833 return 1;
834 }
835 \f
836 /* LOC is the location within I3 that contains its pattern or the component
837 of a PARALLEL of the pattern. We validate that it is valid for combining.
838
839 One problem is if I3 modifies its output, as opposed to replacing it
840 entirely, we can't allow the output to contain I2DEST or I1DEST as doing
841 so would produce an insn that is not equivalent to the original insns.
842
843 Consider:
844
845 (set (reg:DI 101) (reg:DI 100))
846 (set (subreg:SI (reg:DI 101) 0) <foo>)
847
848 This is NOT equivalent to:
849
850 (parallel [(set (subreg:SI (reg:DI 100) 0) <foo>)
851 (set (reg:DI 101) (reg:DI 100))])
852
853 Not only does this modify 100 (in which case it might still be valid
854 if 100 were dead in I2), it sets 101 to the ORIGINAL value of 100.
855
856 We can also run into a problem if I2 sets a register that I1
857 uses and I1 gets directly substituted into I3 (not via I2). In that
858 case, we would be getting the wrong value of I2DEST into I3, so we
859 must reject the combination. This case occurs when I2 and I1 both
860 feed into I3, rather than when I1 feeds into I2, which feeds into I3.
861 If I1_NOT_IN_SRC is non-zero, it means that finding I1 in the source
862 of a SET must prevent combination from occurring.
863
864 On machines where SMALL_REGISTER_CLASSES is defined, we don't combine
865 if the destination of a SET is a hard register.
866
867 Before doing the above check, we first try to expand a field assignment
868 into a set of logical operations.
869
870 If PI3_DEST_KILLED is non-zero, it is a pointer to a location in which
871 we place a register that is both set and used within I3. If more than one
872 such register is detected, we fail.
873
874 Return 1 if the combination is valid, zero otherwise. */
875
876 static int
877 combinable_i3pat (i3, loc, i2dest, i1dest, i1_not_in_src, pi3dest_killed)
878 rtx i3;
879 rtx *loc;
880 rtx i2dest;
881 rtx i1dest;
882 int i1_not_in_src;
883 rtx *pi3dest_killed;
884 {
885 rtx x = *loc;
886
887 if (GET_CODE (x) == SET)
888 {
889 rtx set = expand_field_assignment (x);
890 rtx dest = SET_DEST (set);
891 rtx src = SET_SRC (set);
892 rtx inner_dest = dest, inner_src = src;
893
894 SUBST (*loc, set);
895
896 while (GET_CODE (inner_dest) == STRICT_LOW_PART
897 || GET_CODE (inner_dest) == SUBREG
898 || GET_CODE (inner_dest) == ZERO_EXTRACT)
899 inner_dest = XEXP (inner_dest, 0);
900
901 /* We probably don't need this any more now that LIMIT_RELOAD_CLASS
902 was added. */
903 #if 0
904 while (GET_CODE (inner_src) == STRICT_LOW_PART
905 || GET_CODE (inner_src) == SUBREG
906 || GET_CODE (inner_src) == ZERO_EXTRACT)
907 inner_src = XEXP (inner_src, 0);
908
909 /* If it is better that two different modes keep two different pseudos,
910 avoid combining them. This avoids producing the following pattern
911 on a 386:
912 (set (subreg:SI (reg/v:QI 21) 0)
913 (lshiftrt:SI (reg/v:SI 20)
914 (const_int 24)))
915 If that were made, reload could not handle the pair of
916 reg 20/21, since it would try to get any GENERAL_REGS
917 but some of them don't handle QImode. */
918
919 if (rtx_equal_p (inner_src, i2dest)
920 && GET_CODE (inner_dest) == REG
921 && ! MODES_TIEABLE_P (GET_MODE (i2dest), GET_MODE (inner_dest)))
922 return 0;
923 #endif
924
925 /* Check for the case where I3 modifies its output, as
926 discussed above. */
927 if ((inner_dest != dest
928 && (reg_overlap_mentioned_p (i2dest, inner_dest)
929 || (i1dest && reg_overlap_mentioned_p (i1dest, inner_dest))))
930 /* This is the same test done in can_combine_p except that we
931 allow a hard register with SMALL_REGISTER_CLASSES if SRC is a
932 CALL operation. */
933 || (GET_CODE (inner_dest) == REG
934 && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER
935 #ifdef SMALL_REGISTER_CLASSES
936 && GET_CODE (src) != CALL
937 #else
938 && ! HARD_REGNO_MODE_OK (REGNO (inner_dest),
939 GET_MODE (inner_dest))
940 #endif
941 )
942
943 || (i1_not_in_src && reg_overlap_mentioned_p (i1dest, src)))
944 return 0;
945
946 /* If DEST is used in I3, it is being killed in this insn,
947 so record that for later. */
948 if (pi3dest_killed && GET_CODE (dest) == REG
949 && reg_referenced_p (dest, PATTERN (i3)))
950 {
951 if (*pi3dest_killed)
952 return 0;
953
954 *pi3dest_killed = dest;
955 }
956 }
957
958 else if (GET_CODE (x) == PARALLEL)
959 {
960 int i;
961
962 for (i = 0; i < XVECLEN (x, 0); i++)
963 if (! combinable_i3pat (i3, &XVECEXP (x, 0, i), i2dest, i1dest,
964 i1_not_in_src, pi3dest_killed))
965 return 0;
966 }
967
968 return 1;
969 }
970 \f
971 /* Try to combine the insns I1 and I2 into I3.
972 Here I1 and I2 appear earlier than I3.
973 I1 can be zero; then we combine just I2 into I3.
974
975 It we are combining three insns and the resulting insn is not recognized,
976 try splitting it into two insns. If that happens, I2 and I3 are retained
977 and I1 is pseudo-deleted by turning it into a NOTE. Otherwise, I1 and I2
978 are pseudo-deleted.
979
980 If we created two insns, return I2; otherwise return I3.
981 Return 0 if the combination does not work. Then nothing is changed. */
982
983 static rtx
984 try_combine (i3, i2, i1)
985 register rtx i3, i2, i1;
986 {
987 /* New patterns for I3 and I3, respectively. */
988 rtx newpat, newi2pat = 0;
989 /* Indicates need to preserve SET in I1 or I2 in I3 if it is not dead. */
990 int added_sets_1, added_sets_2;
991 /* Total number of SETs to put into I3. */
992 int total_sets;
993 /* Nonzero is I2's body now appears in I3. */
994 int i2_is_used;
995 /* INSN_CODEs for new I3, new I2, and user of condition code. */
996 int insn_code_number, i2_code_number, other_code_number;
997 /* Contains I3 if the destination of I3 is used in its source, which means
998 that the old life of I3 is being killed. If that usage is placed into
999 I2 and not in I3, a REG_DEAD note must be made. */
1000 rtx i3dest_killed = 0;
1001 /* SET_DEST and SET_SRC of I2 and I1. */
1002 rtx i2dest, i2src, i1dest = 0, i1src = 0;
1003 /* PATTERN (I2), or a copy of it in certain cases. */
1004 rtx i2pat;
1005 /* Indicates if I2DEST or I1DEST is in I2SRC or I1_SRC. */
1006 int i2dest_in_i2src, i1dest_in_i1src = 0, i2dest_in_i1src = 0;
1007 int i1_feeds_i3 = 0;
1008 /* Notes that must be added to REG_NOTES in I3 and I2. */
1009 rtx new_i3_notes, new_i2_notes;
1010
1011 int maxreg;
1012 rtx temp;
1013 register rtx link;
1014 int i;
1015
1016 /* If any of I1, I2, and I3 isn't really an insn, we can't do anything.
1017 This can occur when flow deletes an insn that it has merged into an
1018 auto-increment address. We also can't do anything if I3 has a
1019 REG_LIBCALL note since we don't want to disrupt the contiguity of a
1020 libcall. */
1021
1022 if (GET_RTX_CLASS (GET_CODE (i3)) != 'i'
1023 || GET_RTX_CLASS (GET_CODE (i2)) != 'i'
1024 || (i1 && GET_RTX_CLASS (GET_CODE (i1)) != 'i')
1025 || find_reg_note (i3, REG_LIBCALL, NULL_RTX))
1026 return 0;
1027
1028 combine_attempts++;
1029
1030 undobuf.num_undo = previous_num_undos = 0;
1031 undobuf.other_insn = 0;
1032
1033 /* Save the current high-water-mark so we can free storage if we didn't
1034 accept this combination. */
1035 undobuf.storage = (char *) oballoc (0);
1036
1037 /* If I1 and I2 both feed I3, they can be in any order. To simplify the
1038 code below, set I1 to be the earlier of the two insns. */
1039 if (i1 && INSN_CUID (i1) > INSN_CUID (i2))
1040 temp = i1, i1 = i2, i2 = temp;
1041
1042 /* First check for one important special-case that the code below will
1043 not handle. Namely, the case where I1 is zero, I2 has multiple sets,
1044 and I3 is a SET whose SET_SRC is a SET_DEST in I2. In that case,
1045 we may be able to replace that destination with the destination of I3.
1046 This occurs in the common code where we compute both a quotient and
1047 remainder into a structure, in which case we want to do the computation
1048 directly into the structure to avoid register-register copies.
1049
1050 We make very conservative checks below and only try to handle the
1051 most common cases of this. For example, we only handle the case
1052 where I2 and I3 are adjacent to avoid making difficult register
1053 usage tests. */
1054
1055 if (i1 == 0 && GET_CODE (i3) == INSN && GET_CODE (PATTERN (i3)) == SET
1056 && GET_CODE (SET_SRC (PATTERN (i3))) == REG
1057 && REGNO (SET_SRC (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER
1058 #ifdef SMALL_REGISTER_CLASSES
1059 && (GET_CODE (SET_DEST (PATTERN (i3))) != REG
1060 || REGNO (SET_DEST (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER)
1061 #endif
1062 && find_reg_note (i3, REG_DEAD, SET_SRC (PATTERN (i3)))
1063 && GET_CODE (PATTERN (i2)) == PARALLEL
1064 && ! side_effects_p (SET_DEST (PATTERN (i3)))
1065 /* If the dest of I3 is a ZERO_EXTRACT or STRICT_LOW_PART, the code
1066 below would need to check what is inside (and reg_overlap_mentioned_p
1067 doesn't support those codes anyway). Don't allow those destinations;
1068 the resulting insn isn't likely to be recognized anyway. */
1069 && GET_CODE (SET_DEST (PATTERN (i3))) != ZERO_EXTRACT
1070 && GET_CODE (SET_DEST (PATTERN (i3))) != STRICT_LOW_PART
1071 && ! reg_overlap_mentioned_p (SET_SRC (PATTERN (i3)),
1072 SET_DEST (PATTERN (i3)))
1073 && next_real_insn (i2) == i3)
1074 {
1075 rtx p2 = PATTERN (i2);
1076
1077 /* Make sure that the destination of I3,
1078 which we are going to substitute into one output of I2,
1079 is not used within another output of I2. We must avoid making this:
1080 (parallel [(set (mem (reg 69)) ...)
1081 (set (reg 69) ...)])
1082 which is not well-defined as to order of actions.
1083 (Besides, reload can't handle output reloads for this.)
1084
1085 The problem can also happen if the dest of I3 is a memory ref,
1086 if another dest in I2 is an indirect memory ref. */
1087 for (i = 0; i < XVECLEN (p2, 0); i++)
1088 if (GET_CODE (XVECEXP (p2, 0, i)) == SET
1089 && reg_overlap_mentioned_p (SET_DEST (PATTERN (i3)),
1090 SET_DEST (XVECEXP (p2, 0, i))))
1091 break;
1092
1093 if (i == XVECLEN (p2, 0))
1094 for (i = 0; i < XVECLEN (p2, 0); i++)
1095 if (SET_DEST (XVECEXP (p2, 0, i)) == SET_SRC (PATTERN (i3)))
1096 {
1097 combine_merges++;
1098
1099 subst_insn = i3;
1100 subst_low_cuid = INSN_CUID (i2);
1101
1102 added_sets_2 = 0;
1103 i2dest = SET_SRC (PATTERN (i3));
1104
1105 /* Replace the dest in I2 with our dest and make the resulting
1106 insn the new pattern for I3. Then skip to where we
1107 validate the pattern. Everything was set up above. */
1108 SUBST (SET_DEST (XVECEXP (p2, 0, i)),
1109 SET_DEST (PATTERN (i3)));
1110
1111 newpat = p2;
1112 goto validate_replacement;
1113 }
1114 }
1115
1116 #ifndef HAVE_cc0
1117 /* If we have no I1 and I2 looks like:
1118 (parallel [(set (reg:CC X) (compare:CC OP (const_int 0)))
1119 (set Y OP)])
1120 make up a dummy I1 that is
1121 (set Y OP)
1122 and change I2 to be
1123 (set (reg:CC X) (compare:CC Y (const_int 0)))
1124
1125 (We can ignore any trailing CLOBBERs.)
1126
1127 This undoes a previous combination and allows us to match a branch-and-
1128 decrement insn. */
1129
1130 if (i1 == 0 && GET_CODE (PATTERN (i2)) == PARALLEL
1131 && XVECLEN (PATTERN (i2), 0) >= 2
1132 && GET_CODE (XVECEXP (PATTERN (i2), 0, 0)) == SET
1133 && (GET_MODE_CLASS (GET_MODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 0))))
1134 == MODE_CC)
1135 && GET_CODE (SET_SRC (XVECEXP (PATTERN (i2), 0, 0))) == COMPARE
1136 && XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 1) == const0_rtx
1137 && GET_CODE (XVECEXP (PATTERN (i2), 0, 1)) == SET
1138 && GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 1))) == REG
1139 && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 0),
1140 SET_SRC (XVECEXP (PATTERN (i2), 0, 1))))
1141 {
1142 for (i = XVECLEN (PATTERN (i2), 0) - 1; i >= 2; i--)
1143 if (GET_CODE (XVECEXP (PATTERN (i2), 0, i)) != CLOBBER)
1144 break;
1145
1146 if (i == 1)
1147 {
1148 /* We make I1 with the same INSN_UID as I2. This gives it
1149 the same INSN_CUID for value tracking. Our fake I1 will
1150 never appear in the insn stream so giving it the same INSN_UID
1151 as I2 will not cause a problem. */
1152
1153 i1 = gen_rtx (INSN, VOIDmode, INSN_UID (i2), 0, i2,
1154 XVECEXP (PATTERN (i2), 0, 1), -1, 0, 0);
1155
1156 SUBST (PATTERN (i2), XVECEXP (PATTERN (i2), 0, 0));
1157 SUBST (XEXP (SET_SRC (PATTERN (i2)), 0),
1158 SET_DEST (PATTERN (i1)));
1159 }
1160 }
1161 #endif
1162
1163 /* Verify that I2 and I1 are valid for combining. */
1164 if (! can_combine_p (i2, i3, i1, NULL_RTX, &i2dest, &i2src)
1165 || (i1 && ! can_combine_p (i1, i3, NULL_RTX, i2, &i1dest, &i1src)))
1166 {
1167 undo_all ();
1168 return 0;
1169 }
1170
1171 /* Record whether I2DEST is used in I2SRC and similarly for the other
1172 cases. Knowing this will help in register status updating below. */
1173 i2dest_in_i2src = reg_overlap_mentioned_p (i2dest, i2src);
1174 i1dest_in_i1src = i1 && reg_overlap_mentioned_p (i1dest, i1src);
1175 i2dest_in_i1src = i1 && reg_overlap_mentioned_p (i2dest, i1src);
1176
1177 /* See if I1 directly feeds into I3. It does if I1DEST is not used
1178 in I2SRC. */
1179 i1_feeds_i3 = i1 && ! reg_overlap_mentioned_p (i1dest, i2src);
1180
1181 /* Ensure that I3's pattern can be the destination of combines. */
1182 if (! combinable_i3pat (i3, &PATTERN (i3), i2dest, i1dest,
1183 i1 && i2dest_in_i1src && i1_feeds_i3,
1184 &i3dest_killed))
1185 {
1186 undo_all ();
1187 return 0;
1188 }
1189
1190 /* If I3 has an inc, then give up if I1 or I2 uses the reg that is inc'd.
1191 We used to do this EXCEPT in one case: I3 has a post-inc in an
1192 output operand. However, that exception can give rise to insns like
1193 mov r3,(r3)+
1194 which is a famous insn on the PDP-11 where the value of r3 used as the
1195 source was model-dependent. Avoid this sort of thing. */
1196
1197 #if 0
1198 if (!(GET_CODE (PATTERN (i3)) == SET
1199 && GET_CODE (SET_SRC (PATTERN (i3))) == REG
1200 && GET_CODE (SET_DEST (PATTERN (i3))) == MEM
1201 && (GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_INC
1202 || GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_DEC)))
1203 /* It's not the exception. */
1204 #endif
1205 #ifdef AUTO_INC_DEC
1206 for (link = REG_NOTES (i3); link; link = XEXP (link, 1))
1207 if (REG_NOTE_KIND (link) == REG_INC
1208 && (reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i2))
1209 || (i1 != 0
1210 && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i1)))))
1211 {
1212 undo_all ();
1213 return 0;
1214 }
1215 #endif
1216
1217 /* See if the SETs in I1 or I2 need to be kept around in the merged
1218 instruction: whenever the value set there is still needed past I3.
1219 For the SETs in I2, this is easy: we see if I2DEST dies or is set in I3.
1220
1221 For the SET in I1, we have two cases: If I1 and I2 independently
1222 feed into I3, the set in I1 needs to be kept around if I1DEST dies
1223 or is set in I3. Otherwise (if I1 feeds I2 which feeds I3), the set
1224 in I1 needs to be kept around unless I1DEST dies or is set in either
1225 I2 or I3. We can distinguish these cases by seeing if I2SRC mentions
1226 I1DEST. If so, we know I1 feeds into I2. */
1227
1228 added_sets_2 = ! dead_or_set_p (i3, i2dest);
1229
1230 added_sets_1
1231 = i1 && ! (i1_feeds_i3 ? dead_or_set_p (i3, i1dest)
1232 : (dead_or_set_p (i3, i1dest) || dead_or_set_p (i2, i1dest)));
1233
1234 /* If the set in I2 needs to be kept around, we must make a copy of
1235 PATTERN (I2), so that when we substitute I1SRC for I1DEST in
1236 PATTERN (I2), we are only substituting for the original I1DEST, not into
1237 an already-substituted copy. This also prevents making self-referential
1238 rtx. If I2 is a PARALLEL, we just need the piece that assigns I2SRC to
1239 I2DEST. */
1240
1241 i2pat = (GET_CODE (PATTERN (i2)) == PARALLEL
1242 ? gen_rtx (SET, VOIDmode, i2dest, i2src)
1243 : PATTERN (i2));
1244
1245 if (added_sets_2)
1246 i2pat = copy_rtx (i2pat);
1247
1248 combine_merges++;
1249
1250 /* Substitute in the latest insn for the regs set by the earlier ones. */
1251
1252 maxreg = max_reg_num ();
1253
1254 subst_insn = i3;
1255
1256 /* It is possible that the source of I2 or I1 may be performing an
1257 unneeded operation, such as a ZERO_EXTEND of something that is known
1258 to have the high part zero. Handle that case by letting subst look at
1259 the innermost one of them.
1260
1261 Another way to do this would be to have a function that tries to
1262 simplify a single insn instead of merging two or more insns. We don't
1263 do this because of the potential of infinite loops and because
1264 of the potential extra memory required. However, doing it the way
1265 we are is a bit of a kludge and doesn't catch all cases.
1266
1267 But only do this if -fexpensive-optimizations since it slows things down
1268 and doesn't usually win. */
1269
1270 if (flag_expensive_optimizations)
1271 {
1272 /* Pass pc_rtx so no substitutions are done, just simplifications.
1273 The cases that we are interested in here do not involve the few
1274 cases were is_replaced is checked. */
1275 if (i1)
1276 {
1277 subst_low_cuid = INSN_CUID (i1);
1278 i1src = subst (i1src, pc_rtx, pc_rtx, 0, 0);
1279 }
1280 else
1281 {
1282 subst_low_cuid = INSN_CUID (i2);
1283 i2src = subst (i2src, pc_rtx, pc_rtx, 0, 0);
1284 }
1285
1286 previous_num_undos = undobuf.num_undo;
1287 }
1288
1289 #ifndef HAVE_cc0
1290 /* Many machines that don't use CC0 have insns that can both perform an
1291 arithmetic operation and set the condition code. These operations will
1292 be represented as a PARALLEL with the first element of the vector
1293 being a COMPARE of an arithmetic operation with the constant zero.
1294 The second element of the vector will set some pseudo to the result
1295 of the same arithmetic operation. If we simplify the COMPARE, we won't
1296 match such a pattern and so will generate an extra insn. Here we test
1297 for this case, where both the comparison and the operation result are
1298 needed, and make the PARALLEL by just replacing I2DEST in I3SRC with
1299 I2SRC. Later we will make the PARALLEL that contains I2. */
1300
1301 if (i1 == 0 && added_sets_2 && GET_CODE (PATTERN (i3)) == SET
1302 && GET_CODE (SET_SRC (PATTERN (i3))) == COMPARE
1303 && XEXP (SET_SRC (PATTERN (i3)), 1) == const0_rtx
1304 && rtx_equal_p (XEXP (SET_SRC (PATTERN (i3)), 0), i2dest))
1305 {
1306 rtx *cc_use;
1307 enum machine_mode compare_mode;
1308
1309 newpat = PATTERN (i3);
1310 SUBST (XEXP (SET_SRC (newpat), 0), i2src);
1311
1312 i2_is_used = 1;
1313
1314 #ifdef EXTRA_CC_MODES
1315 /* See if a COMPARE with the operand we substituted in should be done
1316 with the mode that is currently being used. If not, do the same
1317 processing we do in `subst' for a SET; namely, if the destination
1318 is used only once, try to replace it with a register of the proper
1319 mode and also replace the COMPARE. */
1320 if (undobuf.other_insn == 0
1321 && (cc_use = find_single_use (SET_DEST (newpat), i3,
1322 &undobuf.other_insn))
1323 && ((compare_mode = SELECT_CC_MODE (GET_CODE (*cc_use),
1324 i2src, const0_rtx))
1325 != GET_MODE (SET_DEST (newpat))))
1326 {
1327 int regno = REGNO (SET_DEST (newpat));
1328 rtx new_dest = gen_rtx (REG, compare_mode, regno);
1329
1330 if (regno < FIRST_PSEUDO_REGISTER
1331 || (reg_n_sets[regno] == 1 && ! added_sets_2
1332 && ! REG_USERVAR_P (SET_DEST (newpat))))
1333 {
1334 if (regno >= FIRST_PSEUDO_REGISTER)
1335 SUBST (regno_reg_rtx[regno], new_dest);
1336
1337 SUBST (SET_DEST (newpat), new_dest);
1338 SUBST (XEXP (*cc_use, 0), new_dest);
1339 SUBST (SET_SRC (newpat),
1340 gen_rtx_combine (COMPARE, compare_mode,
1341 i2src, const0_rtx));
1342 }
1343 else
1344 undobuf.other_insn = 0;
1345 }
1346 #endif
1347 }
1348 else
1349 #endif
1350 {
1351 n_occurrences = 0; /* `subst' counts here */
1352
1353 /* If I1 feeds into I2 (not into I3) and I1DEST is in I1SRC, we
1354 need to make a unique copy of I2SRC each time we substitute it
1355 to avoid self-referential rtl. */
1356
1357 subst_low_cuid = INSN_CUID (i2);
1358 newpat = subst (PATTERN (i3), i2dest, i2src, 0,
1359 ! i1_feeds_i3 && i1dest_in_i1src);
1360 previous_num_undos = undobuf.num_undo;
1361
1362 /* Record whether i2's body now appears within i3's body. */
1363 i2_is_used = n_occurrences;
1364 }
1365
1366 /* If we already got a failure, don't try to do more. Otherwise,
1367 try to substitute in I1 if we have it. */
1368
1369 if (i1 && GET_CODE (newpat) != CLOBBER)
1370 {
1371 /* Before we can do this substitution, we must redo the test done
1372 above (see detailed comments there) that ensures that I1DEST
1373 isn't mentioned in any SETs in NEWPAT that are field assignments. */
1374
1375 if (! combinable_i3pat (NULL_RTX, &newpat, i1dest, NULL_RTX,
1376 0, NULL_PTR))
1377 {
1378 undo_all ();
1379 return 0;
1380 }
1381
1382 n_occurrences = 0;
1383 subst_low_cuid = INSN_CUID (i1);
1384 newpat = subst (newpat, i1dest, i1src, 0, 0);
1385 previous_num_undos = undobuf.num_undo;
1386 }
1387
1388 /* Fail if an autoincrement side-effect has been duplicated. Be careful
1389 to count all the ways that I2SRC and I1SRC can be used. */
1390 if ((FIND_REG_INC_NOTE (i2, NULL_RTX) != 0
1391 && i2_is_used + added_sets_2 > 1)
1392 || (i1 != 0 && FIND_REG_INC_NOTE (i1, NULL_RTX) != 0
1393 && (n_occurrences + added_sets_1 + (added_sets_2 && ! i1_feeds_i3)
1394 > 1))
1395 /* Fail if we tried to make a new register (we used to abort, but there's
1396 really no reason to). */
1397 || max_reg_num () != maxreg
1398 /* Fail if we couldn't do something and have a CLOBBER. */
1399 || GET_CODE (newpat) == CLOBBER)
1400 {
1401 undo_all ();
1402 return 0;
1403 }
1404
1405 /* If the actions of the earlier insns must be kept
1406 in addition to substituting them into the latest one,
1407 we must make a new PARALLEL for the latest insn
1408 to hold additional the SETs. */
1409
1410 if (added_sets_1 || added_sets_2)
1411 {
1412 combine_extras++;
1413
1414 if (GET_CODE (newpat) == PARALLEL)
1415 {
1416 rtvec old = XVEC (newpat, 0);
1417 total_sets = XVECLEN (newpat, 0) + added_sets_1 + added_sets_2;
1418 newpat = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (total_sets));
1419 bcopy (&old->elem[0], &XVECEXP (newpat, 0, 0),
1420 sizeof (old->elem[0]) * old->num_elem);
1421 }
1422 else
1423 {
1424 rtx old = newpat;
1425 total_sets = 1 + added_sets_1 + added_sets_2;
1426 newpat = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (total_sets));
1427 XVECEXP (newpat, 0, 0) = old;
1428 }
1429
1430 if (added_sets_1)
1431 XVECEXP (newpat, 0, --total_sets)
1432 = (GET_CODE (PATTERN (i1)) == PARALLEL
1433 ? gen_rtx (SET, VOIDmode, i1dest, i1src) : PATTERN (i1));
1434
1435 if (added_sets_2)
1436 {
1437 /* If there is no I1, use I2's body as is. We used to also not do
1438 the subst call below if I2 was substituted into I3,
1439 but that could lose a simplification. */
1440 if (i1 == 0)
1441 XVECEXP (newpat, 0, --total_sets) = i2pat;
1442 else
1443 /* See comment where i2pat is assigned. */
1444 XVECEXP (newpat, 0, --total_sets)
1445 = subst (i2pat, i1dest, i1src, 0, 0);
1446 }
1447 }
1448
1449 /* We come here when we are replacing a destination in I2 with the
1450 destination of I3. */
1451 validate_replacement:
1452
1453 /* Is the result of combination a valid instruction? */
1454 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1455
1456 /* If the result isn't valid, see if it is a PARALLEL of two SETs where
1457 the second SET's destination is a register that is unused. In that case,
1458 we just need the first SET. This can occur when simplifying a divmod
1459 insn. We *must* test for this case here because the code below that
1460 splits two independent SETs doesn't handle this case correctly when it
1461 updates the register status. Also check the case where the first
1462 SET's destination is unused. That would not cause incorrect code, but
1463 does cause an unneeded insn to remain. */
1464
1465 if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
1466 && XVECLEN (newpat, 0) == 2
1467 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1468 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1469 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == REG
1470 && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 1)))
1471 && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 1)))
1472 && asm_noperands (newpat) < 0)
1473 {
1474 newpat = XVECEXP (newpat, 0, 0);
1475 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1476 }
1477
1478 else if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
1479 && XVECLEN (newpat, 0) == 2
1480 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1481 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1482 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) == REG
1483 && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 0)))
1484 && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 0)))
1485 && asm_noperands (newpat) < 0)
1486 {
1487 newpat = XVECEXP (newpat, 0, 1);
1488 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1489 }
1490
1491 /* See if this is an XOR. If so, perhaps the problem is that the
1492 constant is out of range. Replace it with a complemented XOR with
1493 a complemented constant; it might be in range. */
1494
1495 else if (insn_code_number < 0 && GET_CODE (newpat) == SET
1496 && GET_CODE (SET_SRC (newpat)) == XOR
1497 && GET_CODE (XEXP (SET_SRC (newpat), 1)) == CONST_INT
1498 && ((temp = simplify_unary_operation (NOT,
1499 GET_MODE (SET_SRC (newpat)),
1500 XEXP (SET_SRC (newpat), 1),
1501 GET_MODE (SET_SRC (newpat))))
1502 != 0))
1503 {
1504 enum machine_mode i_mode = GET_MODE (SET_SRC (newpat));
1505 rtx pat
1506 = gen_rtx_combine (SET, VOIDmode, SET_DEST (newpat),
1507 gen_unary (NOT, i_mode,
1508 gen_binary (XOR, i_mode,
1509 XEXP (SET_SRC (newpat), 0),
1510 temp)));
1511
1512 insn_code_number = recog_for_combine (&pat, i3, &new_i3_notes);
1513 if (insn_code_number >= 0)
1514 newpat = pat;
1515 }
1516
1517 /* If we were combining three insns and the result is a simple SET
1518 with no ASM_OPERANDS that wasn't recognized, try to split it into two
1519 insns. There are two ways to do this. It can be split using a
1520 machine-specific method (like when you have an addition of a large
1521 constant) or by combine in the function find_split_point. */
1522
1523 if (i1 && insn_code_number < 0 && GET_CODE (newpat) == SET
1524 && asm_noperands (newpat) < 0)
1525 {
1526 rtx m_split, *split;
1527 rtx ni2dest = i2dest;
1528
1529 /* See if the MD file can split NEWPAT. If it can't, see if letting it
1530 use I2DEST as a scratch register will help. In the latter case,
1531 convert I2DEST to the mode of the source of NEWPAT if we can. */
1532
1533 m_split = split_insns (newpat, i3);
1534 if (m_split == 0)
1535 {
1536 /* If I2DEST is a hard register or the only use of a pseudo,
1537 we can change its mode. */
1538 if (GET_MODE (SET_DEST (newpat)) != GET_MODE (i2dest)
1539 && GET_MODE (SET_DEST (newpat)) != VOIDmode
1540 && GET_CODE (i2dest) == REG
1541 && (REGNO (i2dest) < FIRST_PSEUDO_REGISTER
1542 || (reg_n_sets[REGNO (i2dest)] == 1 && ! added_sets_2
1543 && ! REG_USERVAR_P (i2dest))))
1544 ni2dest = gen_rtx (REG, GET_MODE (SET_DEST (newpat)),
1545 REGNO (i2dest));
1546
1547 m_split = split_insns (gen_rtx (PARALLEL, VOIDmode,
1548 gen_rtvec (2, newpat,
1549 gen_rtx (CLOBBER,
1550 VOIDmode,
1551 ni2dest))),
1552 i3);
1553 }
1554
1555 if (m_split && GET_CODE (m_split) == SEQUENCE
1556 && XVECLEN (m_split, 0) == 2
1557 && (next_real_insn (i2) == i3
1558 || ! use_crosses_set_p (PATTERN (XVECEXP (m_split, 0, 0)),
1559 INSN_CUID (i2))))
1560 {
1561 rtx i2set, i3set;
1562 rtx newi3pat = PATTERN (XVECEXP (m_split, 0, 1));
1563 newi2pat = PATTERN (XVECEXP (m_split, 0, 0));
1564
1565 i3set = single_set (XVECEXP (m_split, 0, 1));
1566 i2set = single_set (XVECEXP (m_split, 0, 0));
1567
1568 /* In case we changed the mode of I2DEST, replace it in the
1569 pseudo-register table here. We can't do it above in case this
1570 code doesn't get executed and we do a split the other way. */
1571
1572 if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
1573 SUBST (regno_reg_rtx[REGNO (i2dest)], ni2dest);
1574
1575 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
1576
1577 /* If I2 or I3 has multiple SETs, we won't know how to track
1578 register status, so don't use these insns. */
1579
1580 if (i2_code_number >= 0 && i2set && i3set)
1581 insn_code_number = recog_for_combine (&newi3pat, i3,
1582 &new_i3_notes);
1583
1584 if (insn_code_number >= 0)
1585 newpat = newi3pat;
1586
1587 /* It is possible that both insns now set the destination of I3.
1588 If so, we must show an extra use of it. */
1589
1590 if (insn_code_number >= 0 && GET_CODE (SET_DEST (i3set)) == REG
1591 && GET_CODE (SET_DEST (i2set)) == REG
1592 && REGNO (SET_DEST (i3set)) == REGNO (SET_DEST (i2set)))
1593 reg_n_sets[REGNO (SET_DEST (i2set))]++;
1594 }
1595
1596 /* If we can split it and use I2DEST, go ahead and see if that
1597 helps things be recognized. Verify that none of the registers
1598 are set between I2 and I3. */
1599 if (insn_code_number < 0 && (split = find_split_point (&newpat, i3)) != 0
1600 #ifdef HAVE_cc0
1601 && GET_CODE (i2dest) == REG
1602 #endif
1603 /* We need I2DEST in the proper mode. If it is a hard register
1604 or the only use of a pseudo, we can change its mode. */
1605 && (GET_MODE (*split) == GET_MODE (i2dest)
1606 || GET_MODE (*split) == VOIDmode
1607 || REGNO (i2dest) < FIRST_PSEUDO_REGISTER
1608 || (reg_n_sets[REGNO (i2dest)] == 1 && ! added_sets_2
1609 && ! REG_USERVAR_P (i2dest)))
1610 && (next_real_insn (i2) == i3
1611 || ! use_crosses_set_p (*split, INSN_CUID (i2)))
1612 /* We can't overwrite I2DEST if its value is still used by
1613 NEWPAT. */
1614 && ! reg_referenced_p (i2dest, newpat))
1615 {
1616 rtx newdest = i2dest;
1617
1618 /* Get NEWDEST as a register in the proper mode. We have already
1619 validated that we can do this. */
1620 if (GET_MODE (i2dest) != GET_MODE (*split)
1621 && GET_MODE (*split) != VOIDmode)
1622 {
1623 newdest = gen_rtx (REG, GET_MODE (*split), REGNO (i2dest));
1624
1625 if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
1626 SUBST (regno_reg_rtx[REGNO (i2dest)], newdest);
1627 }
1628
1629 /* If *SPLIT is a (mult FOO (const_int pow2)), convert it to
1630 an ASHIFT. This can occur if it was inside a PLUS and hence
1631 appeared to be a memory address. This is a kludge. */
1632 if (GET_CODE (*split) == MULT
1633 && GET_CODE (XEXP (*split, 1)) == CONST_INT
1634 && (i = exact_log2 (INTVAL (XEXP (*split, 1)))) >= 0)
1635 SUBST (*split, gen_rtx_combine (ASHIFT, GET_MODE (*split),
1636 XEXP (*split, 0), GEN_INT (i)));
1637
1638 #ifdef INSN_SCHEDULING
1639 /* If *SPLIT is a paradoxical SUBREG, when we split it, it should
1640 be written as a ZERO_EXTEND. */
1641 if (GET_CODE (*split) == SUBREG
1642 && GET_CODE (SUBREG_REG (*split)) == MEM)
1643 SUBST (*split, gen_rtx_combine (ZERO_EXTEND, GET_MODE (*split),
1644 XEXP (*split, 0)));
1645 #endif
1646
1647 newi2pat = gen_rtx_combine (SET, VOIDmode, newdest, *split);
1648 SUBST (*split, newdest);
1649 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
1650 if (i2_code_number >= 0)
1651 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1652 }
1653 }
1654
1655 /* Check for a case where we loaded from memory in a narrow mode and
1656 then sign extended it, but we need both registers. In that case,
1657 we have a PARALLEL with both loads from the same memory location.
1658 We can split this into a load from memory followed by a register-register
1659 copy. This saves at least one insn, more if register allocation can
1660 eliminate the copy. */
1661
1662 else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
1663 && GET_CODE (newpat) == PARALLEL
1664 && XVECLEN (newpat, 0) == 2
1665 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1666 && GET_CODE (SET_SRC (XVECEXP (newpat, 0, 0))) == SIGN_EXTEND
1667 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1668 && rtx_equal_p (SET_SRC (XVECEXP (newpat, 0, 1)),
1669 XEXP (SET_SRC (XVECEXP (newpat, 0, 0)), 0))
1670 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
1671 INSN_CUID (i2))
1672 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
1673 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
1674 && ! reg_overlap_mentioned_p (SET_DEST (XVECEXP (newpat, 0, 1)),
1675 SET_SRC (XVECEXP (newpat, 0, 1)))
1676 && ! find_reg_note (i3, REG_UNUSED,
1677 SET_DEST (XVECEXP (newpat, 0, 0))))
1678 {
1679 newi2pat = XVECEXP (newpat, 0, 0);
1680 newpat = XVECEXP (newpat, 0, 1);
1681 SUBST (SET_SRC (newpat),
1682 gen_lowpart_for_combine (GET_MODE (SET_SRC (newpat)),
1683 SET_DEST (newi2pat)));
1684 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
1685 if (i2_code_number >= 0)
1686 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1687
1688 if (insn_code_number >= 0)
1689 {
1690 rtx insn;
1691 rtx link;
1692
1693 /* If we will be able to accept this, we have made a change to the
1694 destination of I3. This can invalidate a LOG_LINKS pointing
1695 to I3. No other part of combine.c makes such a transformation.
1696
1697 The new I3 will have a destination that was previously the
1698 destination of I1 or I2 and which was used in i2 or I3. Call
1699 distribute_links to make a LOG_LINK from the next use of
1700 that destination. */
1701
1702 PATTERN (i3) = newpat;
1703 distribute_links (gen_rtx (INSN_LIST, VOIDmode, i3, NULL_RTX));
1704
1705 /* I3 now uses what used to be its destination and which is
1706 now I2's destination. That means we need a LOG_LINK from
1707 I3 to I2. But we used to have one, so we still will.
1708
1709 However, some later insn might be using I2's dest and have
1710 a LOG_LINK pointing at I3. We must remove this link.
1711 The simplest way to remove the link is to point it at I1,
1712 which we know will be a NOTE. */
1713
1714 for (insn = NEXT_INSN (i3);
1715 insn && GET_CODE (insn) != CODE_LABEL
1716 && GET_CODE (PREV_INSN (insn)) != JUMP_INSN;
1717 insn = NEXT_INSN (insn))
1718 {
1719 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
1720 && reg_referenced_p (SET_DEST (newi2pat), PATTERN (insn)))
1721 {
1722 for (link = LOG_LINKS (insn); link;
1723 link = XEXP (link, 1))
1724 if (XEXP (link, 0) == i3)
1725 XEXP (link, 0) = i1;
1726
1727 break;
1728 }
1729 }
1730 }
1731 }
1732
1733 /* Similarly, check for a case where we have a PARALLEL of two independent
1734 SETs but we started with three insns. In this case, we can do the sets
1735 as two separate insns. This case occurs when some SET allows two
1736 other insns to combine, but the destination of that SET is still live. */
1737
1738 else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
1739 && GET_CODE (newpat) == PARALLEL
1740 && XVECLEN (newpat, 0) == 2
1741 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1742 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != ZERO_EXTRACT
1743 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != STRICT_LOW_PART
1744 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1745 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
1746 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
1747 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
1748 INSN_CUID (i2))
1749 /* Don't pass sets with (USE (MEM ...)) dests to the following. */
1750 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != USE
1751 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != USE
1752 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 1)),
1753 XVECEXP (newpat, 0, 0))
1754 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 0)),
1755 XVECEXP (newpat, 0, 1)))
1756 {
1757 newi2pat = XVECEXP (newpat, 0, 1);
1758 newpat = XVECEXP (newpat, 0, 0);
1759
1760 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
1761 if (i2_code_number >= 0)
1762 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1763 }
1764
1765 /* If it still isn't recognized, fail and change things back the way they
1766 were. */
1767 if ((insn_code_number < 0
1768 /* Is the result a reasonable ASM_OPERANDS? */
1769 && (! check_asm_operands (newpat) || added_sets_1 || added_sets_2)))
1770 {
1771 undo_all ();
1772 return 0;
1773 }
1774
1775 /* If we had to change another insn, make sure it is valid also. */
1776 if (undobuf.other_insn)
1777 {
1778 rtx other_notes = REG_NOTES (undobuf.other_insn);
1779 rtx other_pat = PATTERN (undobuf.other_insn);
1780 rtx new_other_notes;
1781 rtx note, next;
1782
1783 other_code_number = recog_for_combine (&other_pat, undobuf.other_insn,
1784 &new_other_notes);
1785
1786 if (other_code_number < 0 && ! check_asm_operands (other_pat))
1787 {
1788 undo_all ();
1789 return 0;
1790 }
1791
1792 PATTERN (undobuf.other_insn) = other_pat;
1793
1794 /* If any of the notes in OTHER_INSN were REG_UNUSED, ensure that they
1795 are still valid. Then add any non-duplicate notes added by
1796 recog_for_combine. */
1797 for (note = REG_NOTES (undobuf.other_insn); note; note = next)
1798 {
1799 next = XEXP (note, 1);
1800
1801 if (REG_NOTE_KIND (note) == REG_UNUSED
1802 && ! reg_set_p (XEXP (note, 0), PATTERN (undobuf.other_insn)))
1803 {
1804 if (GET_CODE (XEXP (note, 0)) == REG)
1805 reg_n_deaths[REGNO (XEXP (note, 0))]--;
1806
1807 remove_note (undobuf.other_insn, note);
1808 }
1809 }
1810
1811 for (note = new_other_notes; note; note = XEXP (note, 1))
1812 if (GET_CODE (XEXP (note, 0)) == REG)
1813 reg_n_deaths[REGNO (XEXP (note, 0))]++;
1814
1815 distribute_notes (new_other_notes, undobuf.other_insn,
1816 undobuf.other_insn, NULL_RTX, NULL_RTX, NULL_RTX);
1817 }
1818
1819 /* We now know that we can do this combination. Merge the insns and
1820 update the status of registers and LOG_LINKS. */
1821
1822 {
1823 rtx i3notes, i2notes, i1notes = 0;
1824 rtx i3links, i2links, i1links = 0;
1825 rtx midnotes = 0;
1826 int all_adjacent = (next_real_insn (i2) == i3
1827 && (i1 == 0 || next_real_insn (i1) == i2));
1828 register int regno;
1829 /* Compute which registers we expect to eliminate. */
1830 rtx elim_i2 = (newi2pat || i2dest_in_i2src || i2dest_in_i1src
1831 ? 0 : i2dest);
1832 rtx elim_i1 = i1 == 0 || i1dest_in_i1src ? 0 : i1dest;
1833
1834 /* Get the old REG_NOTES and LOG_LINKS from all our insns and
1835 clear them. */
1836 i3notes = REG_NOTES (i3), i3links = LOG_LINKS (i3);
1837 i2notes = REG_NOTES (i2), i2links = LOG_LINKS (i2);
1838 if (i1)
1839 i1notes = REG_NOTES (i1), i1links = LOG_LINKS (i1);
1840
1841 /* Ensure that we do not have something that should not be shared but
1842 occurs multiple times in the new insns. Check this by first
1843 resetting all the `used' flags and then copying anything is shared. */
1844
1845 reset_used_flags (i3notes);
1846 reset_used_flags (i2notes);
1847 reset_used_flags (i1notes);
1848 reset_used_flags (newpat);
1849 reset_used_flags (newi2pat);
1850 if (undobuf.other_insn)
1851 reset_used_flags (PATTERN (undobuf.other_insn));
1852
1853 i3notes = copy_rtx_if_shared (i3notes);
1854 i2notes = copy_rtx_if_shared (i2notes);
1855 i1notes = copy_rtx_if_shared (i1notes);
1856 newpat = copy_rtx_if_shared (newpat);
1857 newi2pat = copy_rtx_if_shared (newi2pat);
1858 if (undobuf.other_insn)
1859 reset_used_flags (PATTERN (undobuf.other_insn));
1860
1861 INSN_CODE (i3) = insn_code_number;
1862 PATTERN (i3) = newpat;
1863 if (undobuf.other_insn)
1864 INSN_CODE (undobuf.other_insn) = other_code_number;
1865
1866 /* We had one special case above where I2 had more than one set and
1867 we replaced a destination of one of those sets with the destination
1868 of I3. In that case, we have to update LOG_LINKS of insns later
1869 in this basic block. Note that this (expensive) case is rare. */
1870
1871 if (GET_CODE (PATTERN (i2)) == PARALLEL)
1872 for (i = 0; i < XVECLEN (PATTERN (i2), 0); i++)
1873 if (GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, i))) == REG
1874 && SET_DEST (XVECEXP (PATTERN (i2), 0, i)) != i2dest
1875 && ! find_reg_note (i2, REG_UNUSED,
1876 SET_DEST (XVECEXP (PATTERN (i2), 0, i))))
1877 {
1878 register rtx insn;
1879
1880 for (insn = NEXT_INSN (i2); insn; insn = NEXT_INSN (insn))
1881 {
1882 if (insn != i3 && GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1883 for (link = LOG_LINKS (insn); link; link = XEXP (link, 1))
1884 if (XEXP (link, 0) == i2)
1885 XEXP (link, 0) = i3;
1886
1887 if (GET_CODE (insn) == CODE_LABEL
1888 || GET_CODE (insn) == JUMP_INSN)
1889 break;
1890 }
1891 }
1892
1893 LOG_LINKS (i3) = 0;
1894 REG_NOTES (i3) = 0;
1895 LOG_LINKS (i2) = 0;
1896 REG_NOTES (i2) = 0;
1897
1898 if (newi2pat)
1899 {
1900 INSN_CODE (i2) = i2_code_number;
1901 PATTERN (i2) = newi2pat;
1902 }
1903 else
1904 {
1905 PUT_CODE (i2, NOTE);
1906 NOTE_LINE_NUMBER (i2) = NOTE_INSN_DELETED;
1907 NOTE_SOURCE_FILE (i2) = 0;
1908 }
1909
1910 if (i1)
1911 {
1912 LOG_LINKS (i1) = 0;
1913 REG_NOTES (i1) = 0;
1914 PUT_CODE (i1, NOTE);
1915 NOTE_LINE_NUMBER (i1) = NOTE_INSN_DELETED;
1916 NOTE_SOURCE_FILE (i1) = 0;
1917 }
1918
1919 /* Get death notes for everything that is now used in either I3 or
1920 I2 and used to die in a previous insn. */
1921
1922 move_deaths (newpat, i1 ? INSN_CUID (i1) : INSN_CUID (i2), i3, &midnotes);
1923 if (newi2pat)
1924 move_deaths (newi2pat, INSN_CUID (i1), i2, &midnotes);
1925
1926 /* Distribute all the LOG_LINKS and REG_NOTES from I1, I2, and I3. */
1927 if (i3notes)
1928 distribute_notes (i3notes, i3, i3, newi2pat ? i2 : NULL_RTX,
1929 elim_i2, elim_i1);
1930 if (i2notes)
1931 distribute_notes (i2notes, i2, i3, newi2pat ? i2 : NULL_RTX,
1932 elim_i2, elim_i1);
1933 if (i1notes)
1934 distribute_notes (i1notes, i1, i3, newi2pat ? i2 : NULL_RTX,
1935 elim_i2, elim_i1);
1936 if (midnotes)
1937 distribute_notes (midnotes, NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
1938 elim_i2, elim_i1);
1939
1940 /* Distribute any notes added to I2 or I3 by recog_for_combine. We
1941 know these are REG_UNUSED and want them to go to the desired insn,
1942 so we always pass it as i3. We have not counted the notes in
1943 reg_n_deaths yet, so we need to do so now. */
1944
1945 if (newi2pat && new_i2_notes)
1946 {
1947 for (temp = new_i2_notes; temp; temp = XEXP (temp, 1))
1948 if (GET_CODE (XEXP (temp, 0)) == REG)
1949 reg_n_deaths[REGNO (XEXP (temp, 0))]++;
1950
1951 distribute_notes (new_i2_notes, i2, i2, NULL_RTX, NULL_RTX, NULL_RTX);
1952 }
1953
1954 if (new_i3_notes)
1955 {
1956 for (temp = new_i3_notes; temp; temp = XEXP (temp, 1))
1957 if (GET_CODE (XEXP (temp, 0)) == REG)
1958 reg_n_deaths[REGNO (XEXP (temp, 0))]++;
1959
1960 distribute_notes (new_i3_notes, i3, i3, NULL_RTX, NULL_RTX, NULL_RTX);
1961 }
1962
1963 /* If I3DEST was used in I3SRC, it really died in I3. We may need to
1964 put a REG_DEAD note for it somewhere. Similarly for I2 and I1.
1965 Show an additional death due to the REG_DEAD note we make here. If
1966 we discard it in distribute_notes, we will decrement it again. */
1967
1968 if (i3dest_killed)
1969 {
1970 if (GET_CODE (i3dest_killed) == REG)
1971 reg_n_deaths[REGNO (i3dest_killed)]++;
1972
1973 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i3dest_killed,
1974 NULL_RTX),
1975 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
1976 NULL_RTX, NULL_RTX);
1977 }
1978
1979 /* For I2 and I1, we have to be careful. If NEWI2PAT exists and sets
1980 I2DEST or I1DEST, the death must be somewhere before I2, not I3. If
1981 we passed I3 in that case, it might delete I2. */
1982
1983 if (i2dest_in_i2src)
1984 {
1985 if (GET_CODE (i2dest) == REG)
1986 reg_n_deaths[REGNO (i2dest)]++;
1987
1988 if (newi2pat && reg_set_p (i2dest, newi2pat))
1989 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i2dest, NULL_RTX),
1990 NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
1991 else
1992 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i2dest, NULL_RTX),
1993 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
1994 NULL_RTX, NULL_RTX);
1995 }
1996
1997 if (i1dest_in_i1src)
1998 {
1999 if (GET_CODE (i1dest) == REG)
2000 reg_n_deaths[REGNO (i1dest)]++;
2001
2002 if (newi2pat && reg_set_p (i1dest, newi2pat))
2003 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i1dest, NULL_RTX),
2004 NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2005 else
2006 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i1dest, NULL_RTX),
2007 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2008 NULL_RTX, NULL_RTX);
2009 }
2010
2011 distribute_links (i3links);
2012 distribute_links (i2links);
2013 distribute_links (i1links);
2014
2015 if (GET_CODE (i2dest) == REG)
2016 {
2017 rtx link;
2018 rtx i2_insn = 0, i2_val = 0, set;
2019
2020 /* The insn that used to set this register doesn't exist, and
2021 this life of the register may not exist either. See if one of
2022 I3's links points to an insn that sets I2DEST. If it does,
2023 that is now the last known value for I2DEST. If we don't update
2024 this and I2 set the register to a value that depended on its old
2025 contents, we will get confused. If this insn is used, thing
2026 will be set correctly in combine_instructions. */
2027
2028 for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2029 if ((set = single_set (XEXP (link, 0))) != 0
2030 && rtx_equal_p (i2dest, SET_DEST (set)))
2031 i2_insn = XEXP (link, 0), i2_val = SET_SRC (set);
2032
2033 record_value_for_reg (i2dest, i2_insn, i2_val);
2034
2035 /* If the reg formerly set in I2 died only once and that was in I3,
2036 zero its use count so it won't make `reload' do any work. */
2037 if (! added_sets_2 && newi2pat == 0)
2038 {
2039 regno = REGNO (i2dest);
2040 reg_n_sets[regno]--;
2041 if (reg_n_sets[regno] == 0
2042 && ! (basic_block_live_at_start[0][regno / REGSET_ELT_BITS]
2043 & ((REGSET_ELT_TYPE) 1 << (regno % REGSET_ELT_BITS))))
2044 reg_n_refs[regno] = 0;
2045 }
2046 }
2047
2048 if (i1 && GET_CODE (i1dest) == REG)
2049 {
2050 rtx link;
2051 rtx i1_insn = 0, i1_val = 0, set;
2052
2053 for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2054 if ((set = single_set (XEXP (link, 0))) != 0
2055 && rtx_equal_p (i1dest, SET_DEST (set)))
2056 i1_insn = XEXP (link, 0), i1_val = SET_SRC (set);
2057
2058 record_value_for_reg (i1dest, i1_insn, i1_val);
2059
2060 regno = REGNO (i1dest);
2061 if (! added_sets_1)
2062 {
2063 reg_n_sets[regno]--;
2064 if (reg_n_sets[regno] == 0
2065 && ! (basic_block_live_at_start[0][regno / REGSET_ELT_BITS]
2066 & ((REGSET_ELT_TYPE) 1 << (regno % REGSET_ELT_BITS))))
2067 reg_n_refs[regno] = 0;
2068 }
2069 }
2070
2071 /* Update reg_significant et al for any changes that may have been made
2072 to this insn. */
2073
2074 note_stores (newpat, set_significant);
2075 if (newi2pat)
2076 note_stores (newi2pat, set_significant);
2077
2078 /* If I3 is now an unconditional jump, ensure that it has a
2079 BARRIER following it since it may have initially been a
2080 conditional jump. */
2081
2082 if ((GET_CODE (newpat) == RETURN || simplejump_p (i3))
2083 && GET_CODE (next_nonnote_insn (i3)) != BARRIER)
2084 emit_barrier_after (i3);
2085 }
2086
2087 combine_successes++;
2088
2089 return newi2pat ? i2 : i3;
2090 }
2091 \f
2092 /* Undo all the modifications recorded in undobuf. */
2093
2094 static void
2095 undo_all ()
2096 {
2097 register int i;
2098 if (undobuf.num_undo > MAX_UNDO)
2099 undobuf.num_undo = MAX_UNDO;
2100 for (i = undobuf.num_undo - 1; i >= 0; i--)
2101 {
2102 if (undobuf.undo[i].is_int)
2103 *undobuf.undo[i].where.i = undobuf.undo[i].old_contents.i;
2104 else
2105 *undobuf.undo[i].where.rtx = undobuf.undo[i].old_contents.rtx;
2106
2107 }
2108
2109 obfree (undobuf.storage);
2110 undobuf.num_undo = 0;
2111 }
2112 \f
2113 /* Find the innermost point within the rtx at LOC, possibly LOC itself,
2114 where we have an arithmetic expression and return that point. LOC will
2115 be inside INSN.
2116
2117 try_combine will call this function to see if an insn can be split into
2118 two insns. */
2119
2120 static rtx *
2121 find_split_point (loc, insn)
2122 rtx *loc;
2123 rtx insn;
2124 {
2125 rtx x = *loc;
2126 enum rtx_code code = GET_CODE (x);
2127 rtx *split;
2128 int len = 0, pos, unsignedp;
2129 rtx inner;
2130
2131 /* First special-case some codes. */
2132 switch (code)
2133 {
2134 case SUBREG:
2135 #ifdef INSN_SCHEDULING
2136 /* If we are making a paradoxical SUBREG invalid, it becomes a split
2137 point. */
2138 if (GET_CODE (SUBREG_REG (x)) == MEM)
2139 return loc;
2140 #endif
2141 return find_split_point (&SUBREG_REG (x), insn);
2142
2143 case MEM:
2144 #ifdef HAVE_lo_sum
2145 /* If we have (mem (const ..)) or (mem (symbol_ref ...)), split it
2146 using LO_SUM and HIGH. */
2147 if (GET_CODE (XEXP (x, 0)) == CONST
2148 || GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
2149 {
2150 SUBST (XEXP (x, 0),
2151 gen_rtx_combine (LO_SUM, Pmode,
2152 gen_rtx_combine (HIGH, Pmode, XEXP (x, 0)),
2153 XEXP (x, 0)));
2154 return &XEXP (XEXP (x, 0), 0);
2155 }
2156 #endif
2157
2158 /* If we have a PLUS whose second operand is a constant and the
2159 address is not valid, perhaps will can split it up using
2160 the machine-specific way to split large constants. We use
2161 the first psuedo-reg (one of the virtual regs) as a placeholder;
2162 it will not remain in the result. */
2163 if (GET_CODE (XEXP (x, 0)) == PLUS
2164 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2165 && ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
2166 {
2167 rtx reg = regno_reg_rtx[FIRST_PSEUDO_REGISTER];
2168 rtx seq = split_insns (gen_rtx (SET, VOIDmode, reg, XEXP (x, 0)),
2169 subst_insn);
2170
2171 /* This should have produced two insns, each of which sets our
2172 placeholder. If the source of the second is a valid address,
2173 we can make put both sources together and make a split point
2174 in the middle. */
2175
2176 if (seq && XVECLEN (seq, 0) == 2
2177 && GET_CODE (XVECEXP (seq, 0, 0)) == INSN
2178 && GET_CODE (PATTERN (XVECEXP (seq, 0, 0))) == SET
2179 && SET_DEST (PATTERN (XVECEXP (seq, 0, 0))) == reg
2180 && ! reg_mentioned_p (reg,
2181 SET_SRC (PATTERN (XVECEXP (seq, 0, 0))))
2182 && GET_CODE (XVECEXP (seq, 0, 1)) == INSN
2183 && GET_CODE (PATTERN (XVECEXP (seq, 0, 1))) == SET
2184 && SET_DEST (PATTERN (XVECEXP (seq, 0, 1))) == reg
2185 && memory_address_p (GET_MODE (x),
2186 SET_SRC (PATTERN (XVECEXP (seq, 0, 1)))))
2187 {
2188 rtx src1 = SET_SRC (PATTERN (XVECEXP (seq, 0, 0)));
2189 rtx src2 = SET_SRC (PATTERN (XVECEXP (seq, 0, 1)));
2190
2191 /* Replace the placeholder in SRC2 with SRC1. If we can
2192 find where in SRC2 it was placed, that can become our
2193 split point and we can replace this address with SRC2.
2194 Just try two obvious places. */
2195
2196 src2 = replace_rtx (src2, reg, src1);
2197 split = 0;
2198 if (XEXP (src2, 0) == src1)
2199 split = &XEXP (src2, 0);
2200 else if (GET_RTX_FORMAT (GET_CODE (XEXP (src2, 0)))[0] == 'e'
2201 && XEXP (XEXP (src2, 0), 0) == src1)
2202 split = &XEXP (XEXP (src2, 0), 0);
2203
2204 if (split)
2205 {
2206 SUBST (XEXP (x, 0), src2);
2207 return split;
2208 }
2209 }
2210
2211 /* If that didn't work, perhaps the first operand is complex and
2212 needs to be computed separately, so make a split point there.
2213 This will occur on machines that just support REG + CONST
2214 and have a constant moved through some previous computation. */
2215
2216 else if (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (x, 0), 0))) != 'o'
2217 && ! (GET_CODE (XEXP (XEXP (x, 0), 0)) == SUBREG
2218 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (XEXP (x, 0), 0))))
2219 == 'o')))
2220 return &XEXP (XEXP (x, 0), 0);
2221 }
2222 break;
2223
2224 case SET:
2225 #ifdef HAVE_cc0
2226 /* If SET_DEST is CC0 and SET_SRC is not an operand, a COMPARE, or a
2227 ZERO_EXTRACT, the most likely reason why this doesn't match is that
2228 we need to put the operand into a register. So split at that
2229 point. */
2230
2231 if (SET_DEST (x) == cc0_rtx
2232 && GET_CODE (SET_SRC (x)) != COMPARE
2233 && GET_CODE (SET_SRC (x)) != ZERO_EXTRACT
2234 && GET_RTX_CLASS (GET_CODE (SET_SRC (x))) != 'o'
2235 && ! (GET_CODE (SET_SRC (x)) == SUBREG
2236 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (SET_SRC (x)))) == 'o'))
2237 return &SET_SRC (x);
2238 #endif
2239
2240 /* See if we can split SET_SRC as it stands. */
2241 split = find_split_point (&SET_SRC (x), insn);
2242 if (split && split != &SET_SRC (x))
2243 return split;
2244
2245 /* See if this is a bitfield assignment with everything constant. If
2246 so, this is an IOR of an AND, so split it into that. */
2247 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2248 && (GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
2249 <= HOST_BITS_PER_WIDE_INT)
2250 && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT
2251 && GET_CODE (XEXP (SET_DEST (x), 2)) == CONST_INT
2252 && GET_CODE (SET_SRC (x)) == CONST_INT
2253 && ((INTVAL (XEXP (SET_DEST (x), 1))
2254 + INTVAL (XEXP (SET_DEST (x), 2)))
2255 <= GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0))))
2256 && ! side_effects_p (XEXP (SET_DEST (x), 0)))
2257 {
2258 int pos = INTVAL (XEXP (SET_DEST (x), 2));
2259 int len = INTVAL (XEXP (SET_DEST (x), 1));
2260 int src = INTVAL (SET_SRC (x));
2261 rtx dest = XEXP (SET_DEST (x), 0);
2262 enum machine_mode mode = GET_MODE (dest);
2263 unsigned HOST_WIDE_INT mask = ((HOST_WIDE_INT) 1 << len) - 1;
2264
2265 #if BITS_BIG_ENDIAN
2266 pos = GET_MODE_BITSIZE (mode) - len - pos;
2267 #endif
2268
2269 if (src == mask)
2270 SUBST (SET_SRC (x),
2271 gen_binary (IOR, mode, dest, GEN_INT (src << pos)));
2272 else
2273 SUBST (SET_SRC (x),
2274 gen_binary (IOR, mode,
2275 gen_binary (AND, mode, dest,
2276 GEN_INT (~ (mask << pos)
2277 & GET_MODE_MASK (mode))),
2278 GEN_INT (src << pos)));
2279
2280 SUBST (SET_DEST (x), dest);
2281
2282 split = find_split_point (&SET_SRC (x), insn);
2283 if (split && split != &SET_SRC (x))
2284 return split;
2285 }
2286
2287 /* Otherwise, see if this is an operation that we can split into two.
2288 If so, try to split that. */
2289 code = GET_CODE (SET_SRC (x));
2290
2291 switch (code)
2292 {
2293 case AND:
2294 /* If we are AND'ing with a large constant that is only a single
2295 bit and the result is only being used in a context where we
2296 need to know if it is zero or non-zero, replace it with a bit
2297 extraction. This will avoid the large constant, which might
2298 have taken more than one insn to make. If the constant were
2299 not a valid argument to the AND but took only one insn to make,
2300 this is no worse, but if it took more than one insn, it will
2301 be better. */
2302
2303 if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
2304 && GET_CODE (XEXP (SET_SRC (x), 0)) == REG
2305 && (pos = exact_log2 (INTVAL (XEXP (SET_SRC (x), 1)))) >= 7
2306 && GET_CODE (SET_DEST (x)) == REG
2307 && (split = find_single_use (SET_DEST (x), insn, NULL_PTR)) != 0
2308 && (GET_CODE (*split) == EQ || GET_CODE (*split) == NE)
2309 && XEXP (*split, 0) == SET_DEST (x)
2310 && XEXP (*split, 1) == const0_rtx)
2311 {
2312 SUBST (SET_SRC (x),
2313 make_extraction (GET_MODE (SET_DEST (x)),
2314 XEXP (SET_SRC (x), 0),
2315 pos, NULL_RTX, 1, 1, 0, 0));
2316 return find_split_point (loc, insn);
2317 }
2318 break;
2319
2320 case SIGN_EXTEND:
2321 inner = XEXP (SET_SRC (x), 0);
2322 pos = 0;
2323 len = GET_MODE_BITSIZE (GET_MODE (inner));
2324 unsignedp = 0;
2325 break;
2326
2327 case SIGN_EXTRACT:
2328 case ZERO_EXTRACT:
2329 if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
2330 && GET_CODE (XEXP (SET_SRC (x), 2)) == CONST_INT)
2331 {
2332 inner = XEXP (SET_SRC (x), 0);
2333 len = INTVAL (XEXP (SET_SRC (x), 1));
2334 pos = INTVAL (XEXP (SET_SRC (x), 2));
2335
2336 #if BITS_BIG_ENDIAN
2337 pos = GET_MODE_BITSIZE (GET_MODE (inner)) - len - pos;
2338 #endif
2339 unsignedp = (code == ZERO_EXTRACT);
2340 }
2341 break;
2342 }
2343
2344 if (len && pos >= 0 && pos + len <= GET_MODE_BITSIZE (GET_MODE (inner)))
2345 {
2346 enum machine_mode mode = GET_MODE (SET_SRC (x));
2347
2348 /* For unsigned, we have a choice of a shift followed by an
2349 AND or two shifts. Use two shifts for field sizes where the
2350 constant might be too large. We assume here that we can
2351 always at least get 8-bit constants in an AND insn, which is
2352 true for every current RISC. */
2353
2354 if (unsignedp && len <= 8)
2355 {
2356 SUBST (SET_SRC (x),
2357 gen_rtx_combine
2358 (AND, mode,
2359 gen_rtx_combine (LSHIFTRT, mode,
2360 gen_lowpart_for_combine (mode, inner),
2361 GEN_INT (pos)),
2362 GEN_INT (((HOST_WIDE_INT) 1 << len) - 1)));
2363
2364 split = find_split_point (&SET_SRC (x), insn);
2365 if (split && split != &SET_SRC (x))
2366 return split;
2367 }
2368 else
2369 {
2370 SUBST (SET_SRC (x),
2371 gen_rtx_combine
2372 (unsignedp ? LSHIFTRT : ASHIFTRT, mode,
2373 gen_rtx_combine (ASHIFT, mode,
2374 gen_lowpart_for_combine (mode, inner),
2375 GEN_INT (GET_MODE_BITSIZE (mode)
2376 - len - pos)),
2377 GEN_INT (GET_MODE_BITSIZE (mode) - len)));
2378
2379 split = find_split_point (&SET_SRC (x), insn);
2380 if (split && split != &SET_SRC (x))
2381 return split;
2382 }
2383 }
2384
2385 /* See if this is a simple operation with a constant as the second
2386 operand. It might be that this constant is out of range and hence
2387 could be used as a split point. */
2388 if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
2389 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
2390 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<')
2391 && CONSTANT_P (XEXP (SET_SRC (x), 1))
2392 && (GET_RTX_CLASS (GET_CODE (XEXP (SET_SRC (x), 0))) == 'o'
2393 || (GET_CODE (XEXP (SET_SRC (x), 0)) == SUBREG
2394 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (SET_SRC (x), 0))))
2395 == 'o'))))
2396 return &XEXP (SET_SRC (x), 1);
2397
2398 /* Finally, see if this is a simple operation with its first operand
2399 not in a register. The operation might require this operand in a
2400 register, so return it as a split point. We can always do this
2401 because if the first operand were another operation, we would have
2402 already found it as a split point. */
2403 if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
2404 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
2405 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<'
2406 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '1')
2407 && ! register_operand (XEXP (SET_SRC (x), 0), VOIDmode))
2408 return &XEXP (SET_SRC (x), 0);
2409
2410 return 0;
2411
2412 case AND:
2413 case IOR:
2414 /* We write NOR as (and (not A) (not B)), but if we don't have a NOR,
2415 it is better to write this as (not (ior A B)) so we can split it.
2416 Similarly for IOR. */
2417 if (GET_CODE (XEXP (x, 0)) == NOT && GET_CODE (XEXP (x, 1)) == NOT)
2418 {
2419 SUBST (*loc,
2420 gen_rtx_combine (NOT, GET_MODE (x),
2421 gen_rtx_combine (code == IOR ? AND : IOR,
2422 GET_MODE (x),
2423 XEXP (XEXP (x, 0), 0),
2424 XEXP (XEXP (x, 1), 0))));
2425 return find_split_point (loc, insn);
2426 }
2427
2428 /* Many RISC machines have a large set of logical insns. If the
2429 second operand is a NOT, put it first so we will try to split the
2430 other operand first. */
2431 if (GET_CODE (XEXP (x, 1)) == NOT)
2432 {
2433 rtx tem = XEXP (x, 0);
2434 SUBST (XEXP (x, 0), XEXP (x, 1));
2435 SUBST (XEXP (x, 1), tem);
2436 }
2437 break;
2438 }
2439
2440 /* Otherwise, select our actions depending on our rtx class. */
2441 switch (GET_RTX_CLASS (code))
2442 {
2443 case 'b': /* This is ZERO_EXTRACT and SIGN_EXTRACT. */
2444 case '3':
2445 split = find_split_point (&XEXP (x, 2), insn);
2446 if (split)
2447 return split;
2448 /* ... fall through ... */
2449 case '2':
2450 case 'c':
2451 case '<':
2452 split = find_split_point (&XEXP (x, 1), insn);
2453 if (split)
2454 return split;
2455 /* ... fall through ... */
2456 case '1':
2457 /* Some machines have (and (shift ...) ...) insns. If X is not
2458 an AND, but XEXP (X, 0) is, use it as our split point. */
2459 if (GET_CODE (x) != AND && GET_CODE (XEXP (x, 0)) == AND)
2460 return &XEXP (x, 0);
2461
2462 split = find_split_point (&XEXP (x, 0), insn);
2463 if (split)
2464 return split;
2465 return loc;
2466 }
2467
2468 /* Otherwise, we don't have a split point. */
2469 return 0;
2470 }
2471 \f
2472 /* Throughout X, replace FROM with TO, and return the result.
2473 The result is TO if X is FROM;
2474 otherwise the result is X, but its contents may have been modified.
2475 If they were modified, a record was made in undobuf so that
2476 undo_all will (among other things) return X to its original state.
2477
2478 If the number of changes necessary is too much to record to undo,
2479 the excess changes are not made, so the result is invalid.
2480 The changes already made can still be undone.
2481 undobuf.num_undo is incremented for such changes, so by testing that
2482 the caller can tell whether the result is valid.
2483
2484 `n_occurrences' is incremented each time FROM is replaced.
2485
2486 IN_DEST is non-zero if we are processing the SET_DEST of a SET.
2487
2488 UNIQUE_COPY is non-zero if each substitution must be unique. We do this
2489 by copying if `n_occurrences' is non-zero. */
2490
2491 static rtx
2492 subst (x, from, to, in_dest, unique_copy)
2493 register rtx x, from, to;
2494 int in_dest;
2495 int unique_copy;
2496 {
2497 register char *fmt;
2498 register int len, i;
2499 register enum rtx_code code = GET_CODE (x), orig_code = code;
2500 rtx temp;
2501 enum machine_mode mode = GET_MODE (x);
2502 enum machine_mode op0_mode = VOIDmode;
2503 rtx other_insn;
2504 rtx *cc_use;
2505 int n_restarts = 0;
2506
2507 /* FAKE_EXTEND_SAFE_P (MODE, FROM) is 1 if (subreg:MODE FROM 0) is a safe
2508 replacement for (zero_extend:MODE FROM) or (sign_extend:MODE FROM).
2509 If it is 0, that cannot be done. We can now do this for any MEM
2510 because (SUBREG (MEM...)) is guaranteed to cause the MEM to be reloaded.
2511 If not for that, MEM's would very rarely be safe. */
2512
2513 /* Reject MODEs bigger than a word, because we might not be able
2514 to reference a two-register group starting with an arbitrary register
2515 (and currently gen_lowpart might crash for a SUBREG). */
2516
2517 #define FAKE_EXTEND_SAFE_P(MODE, FROM) \
2518 (GET_MODE_SIZE (MODE) <= UNITS_PER_WORD)
2519
2520 /* Two expressions are equal if they are identical copies of a shared
2521 RTX or if they are both registers with the same register number
2522 and mode. */
2523
2524 #define COMBINE_RTX_EQUAL_P(X,Y) \
2525 ((X) == (Y) \
2526 || (GET_CODE (X) == REG && GET_CODE (Y) == REG \
2527 && REGNO (X) == REGNO (Y) && GET_MODE (X) == GET_MODE (Y)))
2528
2529 if (! in_dest && COMBINE_RTX_EQUAL_P (x, from))
2530 {
2531 n_occurrences++;
2532 return (unique_copy && n_occurrences > 1 ? copy_rtx (to) : to);
2533 }
2534
2535 /* If X and FROM are the same register but different modes, they will
2536 not have been seen as equal above. However, flow.c will make a
2537 LOG_LINKS entry for that case. If we do nothing, we will try to
2538 rerecognize our original insn and, when it succeeds, we will
2539 delete the feeding insn, which is incorrect.
2540
2541 So force this insn not to match in this (rare) case. */
2542 if (! in_dest && code == REG && GET_CODE (from) == REG
2543 && REGNO (x) == REGNO (from))
2544 return gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
2545
2546 /* If this is an object, we are done unless it is a MEM or LO_SUM, both
2547 of which may contain things that can be combined. */
2548 if (code != MEM && code != LO_SUM && GET_RTX_CLASS (code) == 'o')
2549 return x;
2550
2551 /* It is possible to have a subexpression appear twice in the insn.
2552 Suppose that FROM is a register that appears within TO.
2553 Then, after that subexpression has been scanned once by `subst',
2554 the second time it is scanned, TO may be found. If we were
2555 to scan TO here, we would find FROM within it and create a
2556 self-referent rtl structure which is completely wrong. */
2557 if (COMBINE_RTX_EQUAL_P (x, to))
2558 return to;
2559
2560 len = GET_RTX_LENGTH (code);
2561 fmt = GET_RTX_FORMAT (code);
2562
2563 /* We don't need to process a SET_DEST that is a register, CC0, or PC, so
2564 set up to skip this common case. All other cases where we want to
2565 suppress replacing something inside a SET_SRC are handled via the
2566 IN_DEST operand. */
2567 if (code == SET
2568 && (GET_CODE (SET_DEST (x)) == REG
2569 || GET_CODE (SET_DEST (x)) == CC0
2570 || GET_CODE (SET_DEST (x)) == PC))
2571 fmt = "ie";
2572
2573 /* Get the mode of operand 0 in case X is now a SIGN_EXTEND of a constant. */
2574 if (fmt[0] == 'e')
2575 op0_mode = GET_MODE (XEXP (x, 0));
2576
2577 for (i = 0; i < len; i++)
2578 {
2579 if (fmt[i] == 'E')
2580 {
2581 register int j;
2582 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2583 {
2584 register rtx new;
2585 if (COMBINE_RTX_EQUAL_P (XVECEXP (x, i, j), from))
2586 {
2587 new = (unique_copy && n_occurrences ? copy_rtx (to) : to);
2588 n_occurrences++;
2589 }
2590 else
2591 {
2592 new = subst (XVECEXP (x, i, j), from, to, 0, unique_copy);
2593
2594 /* If this substitution failed, this whole thing fails. */
2595 if (GET_CODE (new) == CLOBBER && XEXP (new, 0) == const0_rtx)
2596 return new;
2597 }
2598
2599 SUBST (XVECEXP (x, i, j), new);
2600 }
2601 }
2602 else if (fmt[i] == 'e')
2603 {
2604 register rtx new;
2605
2606 if (COMBINE_RTX_EQUAL_P (XEXP (x, i), from))
2607 {
2608 new = (unique_copy && n_occurrences ? copy_rtx (to) : to);
2609 n_occurrences++;
2610 }
2611 else
2612 /* If we are in a SET_DEST, suppress most cases unless we
2613 have gone inside a MEM, in which case we want to
2614 simplify the address. We assume here that things that
2615 are actually part of the destination have their inner
2616 parts in the first expression. This is true for SUBREG,
2617 STRICT_LOW_PART, and ZERO_EXTRACT, which are the only
2618 things aside from REG and MEM that should appear in a
2619 SET_DEST. */
2620 new = subst (XEXP (x, i), from, to,
2621 (((in_dest
2622 && (code == SUBREG || code == STRICT_LOW_PART
2623 || code == ZERO_EXTRACT))
2624 || code == SET)
2625 && i == 0), unique_copy);
2626
2627 /* If we found that we will have to reject this combination,
2628 indicate that by returning the CLOBBER ourselves, rather than
2629 an expression containing it. This will speed things up as
2630 well as prevent accidents where two CLOBBERs are considered
2631 to be equal, thus producing an incorrect simplification. */
2632
2633 if (GET_CODE (new) == CLOBBER && XEXP (new, 0) == const0_rtx)
2634 return new;
2635
2636 SUBST (XEXP (x, i), new);
2637 }
2638 }
2639
2640 /* We come back to here if we have replaced the expression with one of
2641 a different code and it is likely that further simplification will be
2642 possible. */
2643
2644 restart:
2645
2646 /* If we have restarted more than 4 times, we are probably looping, so
2647 give up. */
2648 if (++n_restarts > 4)
2649 return x;
2650
2651 /* If we are restarting at all, it means that we no longer know the
2652 original mode of operand 0 (since we have probably changed the
2653 form of X). */
2654
2655 if (n_restarts > 1)
2656 op0_mode = VOIDmode;
2657
2658 code = GET_CODE (x);
2659
2660 /* If this is a commutative operation, put a constant last and a complex
2661 expression first. We don't need to do this for comparisons here. */
2662 if (GET_RTX_CLASS (code) == 'c'
2663 && ((CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
2664 || (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == 'o'
2665 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o')
2666 || (GET_CODE (XEXP (x, 0)) == SUBREG
2667 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0)))) == 'o'
2668 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o')))
2669 {
2670 temp = XEXP (x, 0);
2671 SUBST (XEXP (x, 0), XEXP (x, 1));
2672 SUBST (XEXP (x, 1), temp);
2673 }
2674
2675 /* If this is a PLUS, MINUS, or MULT, and the first operand is the
2676 sign extension of a PLUS with a constant, reverse the order of the sign
2677 extension and the addition. Note that this not the same as the original
2678 code, but overflow is undefined for signed values. Also note that the
2679 PLUS will have been partially moved "inside" the sign-extension, so that
2680 the first operand of X will really look like:
2681 (ashiftrt (plus (ashift A C4) C5) C4).
2682 We convert this to
2683 (plus (ashiftrt (ashift A C4) C2) C4)
2684 and replace the first operand of X with that expression. Later parts
2685 of this function may simplify the expression further.
2686
2687 For example, if we start with (mult (sign_extend (plus A C1)) C2),
2688 we swap the SIGN_EXTEND and PLUS. Later code will apply the
2689 distributive law to produce (plus (mult (sign_extend X) C1) C3).
2690
2691 We do this to simplify address expressions. */
2692
2693 if ((code == PLUS || code == MINUS || code == MULT)
2694 && GET_CODE (XEXP (x, 0)) == ASHIFTRT
2695 && GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS
2696 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ASHIFT
2697 && GET_CODE (XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1)) == CONST_INT
2698 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2699 && XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1) == XEXP (XEXP (x, 0), 1)
2700 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
2701 && (temp = simplify_binary_operation (ASHIFTRT, mode,
2702 XEXP (XEXP (XEXP (x, 0), 0), 1),
2703 XEXP (XEXP (x, 0), 1))) != 0)
2704 {
2705 rtx new
2706 = simplify_shift_const (NULL_RTX, ASHIFT, mode,
2707 XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 0),
2708 INTVAL (XEXP (XEXP (x, 0), 1)));
2709
2710 new = simplify_shift_const (NULL_RTX, ASHIFTRT, mode, new,
2711 INTVAL (XEXP (XEXP (x, 0), 1)));
2712
2713 SUBST (XEXP (x, 0), gen_binary (PLUS, mode, new, temp));
2714 }
2715
2716 /* If this is a simple operation applied to an IF_THEN_ELSE, try
2717 applying it to the arms of the IF_THEN_ELSE. This often simplifies
2718 things. Don't deal with operations that change modes here. */
2719
2720 if ((GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c')
2721 && GET_CODE (XEXP (x, 0)) == IF_THEN_ELSE)
2722 {
2723 /* Don't do this by using SUBST inside X since we might be messing
2724 up a shared expression. */
2725 rtx cond = XEXP (XEXP (x, 0), 0);
2726 rtx t_arm = subst (gen_binary (code, mode, XEXP (XEXP (x, 0), 1),
2727 XEXP (x, 1)),
2728 pc_rtx, pc_rtx, 0, 0);
2729 rtx f_arm = subst (gen_binary (code, mode, XEXP (XEXP (x, 0), 2),
2730 XEXP (x, 1)),
2731 pc_rtx, pc_rtx, 0, 0);
2732
2733
2734 x = gen_rtx (IF_THEN_ELSE, mode, cond, t_arm, f_arm);
2735 goto restart;
2736 }
2737
2738 else if (GET_RTX_CLASS (code) == '1'
2739 && GET_CODE (XEXP (x, 0)) == IF_THEN_ELSE
2740 && GET_MODE (XEXP (x, 0)) == mode)
2741 {
2742 rtx cond = XEXP (XEXP (x, 0), 0);
2743 rtx t_arm = subst (gen_unary (code, mode, XEXP (XEXP (x, 0), 1)),
2744 pc_rtx, pc_rtx, 0, 0);
2745 rtx f_arm = subst (gen_unary (code, mode, XEXP (XEXP (x, 0), 2)),
2746 pc_rtx, pc_rtx, 0, 0);
2747
2748 x = gen_rtx_combine (IF_THEN_ELSE, mode, cond, t_arm, f_arm);
2749 goto restart;
2750 }
2751
2752 /* Try to fold this expression in case we have constants that weren't
2753 present before. */
2754 temp = 0;
2755 switch (GET_RTX_CLASS (code))
2756 {
2757 case '1':
2758 temp = simplify_unary_operation (code, mode, XEXP (x, 0), op0_mode);
2759 break;
2760 case '<':
2761 temp = simplify_relational_operation (code, op0_mode,
2762 XEXP (x, 0), XEXP (x, 1));
2763 #ifdef FLOAT_STORE_FLAG_VALUE
2764 if (temp != 0 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2765 temp = ((temp == const0_rtx) ? CONST0_RTX (GET_MODE (x))
2766 : immed_real_const_1 (FLOAT_STORE_FLAG_VALUE, GET_MODE (x)));
2767 #endif
2768 break;
2769 case 'c':
2770 case '2':
2771 temp = simplify_binary_operation (code, mode, XEXP (x, 0), XEXP (x, 1));
2772 break;
2773 case 'b':
2774 case '3':
2775 temp = simplify_ternary_operation (code, mode, op0_mode, XEXP (x, 0),
2776 XEXP (x, 1), XEXP (x, 2));
2777 break;
2778 }
2779
2780 if (temp)
2781 x = temp, code = GET_CODE (temp);
2782
2783 /* First see if we can apply the inverse distributive law. */
2784 if (code == PLUS || code == MINUS || code == IOR || code == XOR)
2785 {
2786 x = apply_distributive_law (x);
2787 code = GET_CODE (x);
2788 }
2789
2790 /* If CODE is an associative operation not otherwise handled, see if we
2791 can associate some operands. This can win if they are constants or
2792 if they are logically related (i.e. (a & b) & a. */
2793 if ((code == PLUS || code == MINUS
2794 || code == MULT || code == AND || code == IOR || code == XOR
2795 || code == DIV || code == UDIV
2796 || code == SMAX || code == SMIN || code == UMAX || code == UMIN)
2797 && GET_MODE_CLASS (mode) == MODE_INT)
2798 {
2799 if (GET_CODE (XEXP (x, 0)) == code)
2800 {
2801 rtx other = XEXP (XEXP (x, 0), 0);
2802 rtx inner_op0 = XEXP (XEXP (x, 0), 1);
2803 rtx inner_op1 = XEXP (x, 1);
2804 rtx inner;
2805
2806 /* Make sure we pass the constant operand if any as the second
2807 one if this is a commutative operation. */
2808 if (CONSTANT_P (inner_op0) && GET_RTX_CLASS (code) == 'c')
2809 {
2810 rtx tem = inner_op0;
2811 inner_op0 = inner_op1;
2812 inner_op1 = tem;
2813 }
2814 inner = simplify_binary_operation (code == MINUS ? PLUS
2815 : code == DIV ? MULT
2816 : code == UDIV ? MULT
2817 : code,
2818 mode, inner_op0, inner_op1);
2819
2820 /* For commutative operations, try the other pair if that one
2821 didn't simplify. */
2822 if (inner == 0 && GET_RTX_CLASS (code) == 'c')
2823 {
2824 other = XEXP (XEXP (x, 0), 1);
2825 inner = simplify_binary_operation (code, mode,
2826 XEXP (XEXP (x, 0), 0),
2827 XEXP (x, 1));
2828 }
2829
2830 if (inner)
2831 {
2832 x = gen_binary (code, mode, other, inner);
2833 goto restart;
2834
2835 }
2836 }
2837 }
2838
2839 /* A little bit of algebraic simplification here. */
2840 switch (code)
2841 {
2842 case MEM:
2843 /* Ensure that our address has any ASHIFTs converted to MULT in case
2844 address-recognizing predicates are called later. */
2845 temp = make_compound_operation (XEXP (x, 0), MEM);
2846 SUBST (XEXP (x, 0), temp);
2847 break;
2848
2849 case SUBREG:
2850 /* (subreg:A (mem:B X) N) becomes a modified MEM unless the SUBREG
2851 is paradoxical. If we can't do that safely, then it becomes
2852 something nonsensical so that this combination won't take place. */
2853
2854 if (GET_CODE (SUBREG_REG (x)) == MEM
2855 && (GET_MODE_SIZE (mode)
2856 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
2857 {
2858 rtx inner = SUBREG_REG (x);
2859 int endian_offset = 0;
2860 /* Don't change the mode of the MEM
2861 if that would change the meaning of the address. */
2862 if (MEM_VOLATILE_P (SUBREG_REG (x))
2863 || mode_dependent_address_p (XEXP (inner, 0)))
2864 return gen_rtx (CLOBBER, mode, const0_rtx);
2865
2866 #if BYTES_BIG_ENDIAN
2867 if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
2868 endian_offset += UNITS_PER_WORD - GET_MODE_SIZE (mode);
2869 if (GET_MODE_SIZE (GET_MODE (inner)) < UNITS_PER_WORD)
2870 endian_offset -= UNITS_PER_WORD - GET_MODE_SIZE (GET_MODE (inner));
2871 #endif
2872 /* Note if the plus_constant doesn't make a valid address
2873 then this combination won't be accepted. */
2874 x = gen_rtx (MEM, mode,
2875 plus_constant (XEXP (inner, 0),
2876 (SUBREG_WORD (x) * UNITS_PER_WORD
2877 + endian_offset)));
2878 MEM_VOLATILE_P (x) = MEM_VOLATILE_P (inner);
2879 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (inner);
2880 MEM_IN_STRUCT_P (x) = MEM_IN_STRUCT_P (inner);
2881 return x;
2882 }
2883
2884 /* If we are in a SET_DEST, these other cases can't apply. */
2885 if (in_dest)
2886 return x;
2887
2888 /* Changing mode twice with SUBREG => just change it once,
2889 or not at all if changing back to starting mode. */
2890 if (GET_CODE (SUBREG_REG (x)) == SUBREG)
2891 {
2892 if (mode == GET_MODE (SUBREG_REG (SUBREG_REG (x)))
2893 && SUBREG_WORD (x) == 0 && SUBREG_WORD (SUBREG_REG (x)) == 0)
2894 return SUBREG_REG (SUBREG_REG (x));
2895
2896 SUBST_INT (SUBREG_WORD (x),
2897 SUBREG_WORD (x) + SUBREG_WORD (SUBREG_REG (x)));
2898 SUBST (SUBREG_REG (x), SUBREG_REG (SUBREG_REG (x)));
2899 }
2900
2901 /* SUBREG of a hard register => just change the register number
2902 and/or mode. If the hard register is not valid in that mode,
2903 suppress this combination. If the hard register is the stack,
2904 frame, or argument pointer, leave this as a SUBREG. */
2905
2906 if (GET_CODE (SUBREG_REG (x)) == REG
2907 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
2908 && REGNO (SUBREG_REG (x)) != FRAME_POINTER_REGNUM
2909 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2910 && REGNO (SUBREG_REG (x)) != ARG_POINTER_REGNUM
2911 #endif
2912 && REGNO (SUBREG_REG (x)) != STACK_POINTER_REGNUM)
2913 {
2914 if (HARD_REGNO_MODE_OK (REGNO (SUBREG_REG (x)) + SUBREG_WORD (x),
2915 mode))
2916 return gen_rtx (REG, mode,
2917 REGNO (SUBREG_REG (x)) + SUBREG_WORD (x));
2918 else
2919 return gen_rtx (CLOBBER, mode, const0_rtx);
2920 }
2921
2922 /* For a constant, try to pick up the part we want. Handle a full
2923 word and low-order part. Only do this if we are narrowing
2924 the constant; if it is being widened, we have no idea what
2925 the extra bits will have been set to. */
2926
2927 if (CONSTANT_P (SUBREG_REG (x)) && op0_mode != VOIDmode
2928 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
2929 && GET_MODE_SIZE (op0_mode) < UNITS_PER_WORD
2930 && GET_MODE_CLASS (mode) == MODE_INT)
2931 {
2932 temp = operand_subword (SUBREG_REG (x), SUBREG_WORD (x),
2933 0, op0_mode);
2934 if (temp)
2935 return temp;
2936 }
2937
2938 if (CONSTANT_P (SUBREG_REG (x)) && subreg_lowpart_p (x)
2939 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (op0_mode))
2940 return gen_lowpart_for_combine (mode, SUBREG_REG (x));
2941
2942 /* If we are narrowing the object, we need to see if we can simplify
2943 the expression for the object knowing that we only need the
2944 low-order bits. */
2945
2946 if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
2947 && subreg_lowpart_p (x))
2948 return force_to_mode (SUBREG_REG (x), mode, GET_MODE_BITSIZE (mode),
2949 NULL_RTX);
2950 break;
2951
2952 case NOT:
2953 /* (not (plus X -1)) can become (neg X). */
2954 if (GET_CODE (XEXP (x, 0)) == PLUS
2955 && XEXP (XEXP (x, 0), 1) == constm1_rtx)
2956 {
2957 x = gen_rtx_combine (NEG, mode, XEXP (XEXP (x, 0), 0));
2958 goto restart;
2959 }
2960
2961 /* Similarly, (not (neg X)) is (plus X -1). */
2962 if (GET_CODE (XEXP (x, 0)) == NEG)
2963 {
2964 x = gen_rtx_combine (PLUS, mode, XEXP (XEXP (x, 0), 0), constm1_rtx);
2965 goto restart;
2966 }
2967
2968 /* (not (xor X C)) for C constant is (xor X D) with D = ~ C. */
2969 if (GET_CODE (XEXP (x, 0)) == XOR
2970 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2971 && (temp = simplify_unary_operation (NOT, mode,
2972 XEXP (XEXP (x, 0), 1),
2973 mode)) != 0)
2974 {
2975 SUBST (XEXP (XEXP (x, 0), 1), temp);
2976 return XEXP (x, 0);
2977 }
2978
2979 /* (not (ashift 1 X)) is (rotate ~1 X). We used to do this for operands
2980 other than 1, but that is not valid. We could do a similar
2981 simplification for (not (lshiftrt C X)) where C is just the sign bit,
2982 but this doesn't seem common enough to bother with. */
2983 if (GET_CODE (XEXP (x, 0)) == ASHIFT
2984 && XEXP (XEXP (x, 0), 0) == const1_rtx)
2985 {
2986 x = gen_rtx (ROTATE, mode, gen_unary (NOT, mode, const1_rtx),
2987 XEXP (XEXP (x, 0), 1));
2988 goto restart;
2989 }
2990
2991 if (GET_CODE (XEXP (x, 0)) == SUBREG
2992 && subreg_lowpart_p (XEXP (x, 0))
2993 && (GET_MODE_SIZE (GET_MODE (XEXP (x, 0)))
2994 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (x, 0)))))
2995 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == ASHIFT
2996 && XEXP (SUBREG_REG (XEXP (x, 0)), 0) == const1_rtx)
2997 {
2998 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (XEXP (x, 0)));
2999
3000 x = gen_rtx (ROTATE, inner_mode,
3001 gen_unary (NOT, inner_mode, const1_rtx),
3002 XEXP (SUBREG_REG (XEXP (x, 0)), 1));
3003 x = gen_lowpart_for_combine (mode, x);
3004 goto restart;
3005 }
3006
3007 #if STORE_FLAG_VALUE == -1
3008 /* (not (comparison foo bar)) can be done by reversing the comparison
3009 code if valid. */
3010 if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3011 && reversible_comparison_p (XEXP (x, 0)))
3012 return gen_rtx_combine (reverse_condition (GET_CODE (XEXP (x, 0))),
3013 mode, XEXP (XEXP (x, 0), 0),
3014 XEXP (XEXP (x, 0), 1));
3015 #endif
3016
3017 /* Apply De Morgan's laws to reduce number of patterns for machines
3018 with negating logical insns (and-not, nand, etc.). If result has
3019 only one NOT, put it first, since that is how the patterns are
3020 coded. */
3021
3022 if (GET_CODE (XEXP (x, 0)) == IOR || GET_CODE (XEXP (x, 0)) == AND)
3023 {
3024 rtx in1 = XEXP (XEXP (x, 0), 0), in2 = XEXP (XEXP (x, 0), 1);
3025
3026 if (GET_CODE (in1) == NOT)
3027 in1 = XEXP (in1, 0);
3028 else
3029 in1 = gen_rtx_combine (NOT, GET_MODE (in1), in1);
3030
3031 if (GET_CODE (in2) == NOT)
3032 in2 = XEXP (in2, 0);
3033 else if (GET_CODE (in2) == CONST_INT
3034 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3035 in2 = GEN_INT (GET_MODE_MASK (mode) & ~ INTVAL (in2));
3036 else
3037 in2 = gen_rtx_combine (NOT, GET_MODE (in2), in2);
3038
3039 if (GET_CODE (in2) == NOT)
3040 {
3041 rtx tem = in2;
3042 in2 = in1; in1 = tem;
3043 }
3044
3045 x = gen_rtx_combine (GET_CODE (XEXP (x, 0)) == IOR ? AND : IOR,
3046 mode, in1, in2);
3047 goto restart;
3048 }
3049 break;
3050
3051 case NEG:
3052 /* (neg (plus X 1)) can become (not X). */
3053 if (GET_CODE (XEXP (x, 0)) == PLUS
3054 && XEXP (XEXP (x, 0), 1) == const1_rtx)
3055 {
3056 x = gen_rtx_combine (NOT, mode, XEXP (XEXP (x, 0), 0));
3057 goto restart;
3058 }
3059
3060 /* Similarly, (neg (not X)) is (plus X 1). */
3061 if (GET_CODE (XEXP (x, 0)) == NOT)
3062 {
3063 x = gen_rtx_combine (PLUS, mode, XEXP (XEXP (x, 0), 0), const1_rtx);
3064 goto restart;
3065 }
3066
3067 /* (neg (minus X Y)) can become (minus Y X). */
3068 if (GET_CODE (XEXP (x, 0)) == MINUS
3069 && (GET_MODE_CLASS (mode) != MODE_FLOAT
3070 /* x-y != -(y-x) with IEEE floating point. */
3071 || TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT))
3072 {
3073 x = gen_binary (MINUS, mode, XEXP (XEXP (x, 0), 1),
3074 XEXP (XEXP (x, 0), 0));
3075 goto restart;
3076 }
3077
3078 /* (neg (xor A 1)) is (plus A -1) if A is known to be either 0 or 1. */
3079 if (GET_CODE (XEXP (x, 0)) == XOR && XEXP (XEXP (x, 0), 1) == const1_rtx
3080 && significant_bits (XEXP (XEXP (x, 0), 0), mode) == 1)
3081 {
3082 x = gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0), constm1_rtx);
3083 goto restart;
3084 }
3085
3086 /* NEG commutes with ASHIFT since it is multiplication. Only do this
3087 if we can then eliminate the NEG (e.g.,
3088 if the operand is a constant). */
3089
3090 if (GET_CODE (XEXP (x, 0)) == ASHIFT)
3091 {
3092 temp = simplify_unary_operation (NEG, mode,
3093 XEXP (XEXP (x, 0), 0), mode);
3094 if (temp)
3095 {
3096 SUBST (XEXP (XEXP (x, 0), 0), temp);
3097 return XEXP (x, 0);
3098 }
3099 }
3100
3101 temp = expand_compound_operation (XEXP (x, 0));
3102
3103 /* For C equal to the width of MODE minus 1, (neg (ashiftrt X C)) can be
3104 replaced by (lshiftrt X C). This will convert
3105 (neg (sign_extract X 1 Y)) to (zero_extract X 1 Y). */
3106
3107 if (GET_CODE (temp) == ASHIFTRT
3108 && GET_CODE (XEXP (temp, 1)) == CONST_INT
3109 && INTVAL (XEXP (temp, 1)) == GET_MODE_BITSIZE (mode) - 1)
3110 {
3111 x = simplify_shift_const (temp, LSHIFTRT, mode, XEXP (temp, 0),
3112 INTVAL (XEXP (temp, 1)));
3113 goto restart;
3114 }
3115
3116 /* If X has only a single bit significant, say, bit I, convert
3117 (neg X) to (ashiftrt (ashift X C-I) C-I) where C is the bitsize of
3118 MODE minus 1. This will convert (neg (zero_extract X 1 Y)) to
3119 (sign_extract X 1 Y). But only do this if TEMP isn't a register
3120 or a SUBREG of one since we'd be making the expression more
3121 complex if it was just a register. */
3122
3123 if (GET_CODE (temp) != REG
3124 && ! (GET_CODE (temp) == SUBREG
3125 && GET_CODE (SUBREG_REG (temp)) == REG)
3126 && (i = exact_log2 (significant_bits (temp, mode))) >= 0)
3127 {
3128 rtx temp1 = simplify_shift_const
3129 (NULL_RTX, ASHIFTRT, mode,
3130 simplify_shift_const (NULL_RTX, ASHIFT, mode, temp,
3131 GET_MODE_BITSIZE (mode) - 1 - i),
3132 GET_MODE_BITSIZE (mode) - 1 - i);
3133
3134 /* If all we did was surround TEMP with the two shifts, we
3135 haven't improved anything, so don't use it. Otherwise,
3136 we are better off with TEMP1. */
3137 if (GET_CODE (temp1) != ASHIFTRT
3138 || GET_CODE (XEXP (temp1, 0)) != ASHIFT
3139 || XEXP (XEXP (temp1, 0), 0) != temp)
3140 {
3141 x = temp1;
3142 goto restart;
3143 }
3144 }
3145 break;
3146
3147 case FLOAT_TRUNCATE:
3148 /* (float_truncate:SF (float_extend:DF foo:SF)) = foo:SF. */
3149 if (GET_CODE (XEXP (x, 0)) == FLOAT_EXTEND
3150 && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode)
3151 return XEXP (XEXP (x, 0), 0);
3152 break;
3153
3154 #ifdef HAVE_cc0
3155 case COMPARE:
3156 /* Convert (compare FOO (const_int 0)) to FOO unless we aren't
3157 using cc0, in which case we want to leave it as a COMPARE
3158 so we can distinguish it from a register-register-copy. */
3159 if (XEXP (x, 1) == const0_rtx)
3160 return XEXP (x, 0);
3161
3162 /* In IEEE floating point, x-0 is not the same as x. */
3163 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3164 || GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) == MODE_INT)
3165 && XEXP (x, 1) == CONST0_RTX (GET_MODE (XEXP (x, 0))))
3166 return XEXP (x, 0);
3167 break;
3168 #endif
3169
3170 case CONST:
3171 /* (const (const X)) can become (const X). Do it this way rather than
3172 returning the inner CONST since CONST can be shared with a
3173 REG_EQUAL note. */
3174 if (GET_CODE (XEXP (x, 0)) == CONST)
3175 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
3176 break;
3177
3178 #ifdef HAVE_lo_sum
3179 case LO_SUM:
3180 /* Convert (lo_sum (high FOO) FOO) to FOO. This is necessary so we
3181 can add in an offset. find_split_point will split this address up
3182 again if it doesn't match. */
3183 if (GET_CODE (XEXP (x, 0)) == HIGH
3184 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1)))
3185 return XEXP (x, 1);
3186 break;
3187 #endif
3188
3189 case PLUS:
3190 /* If we have (plus (plus (A const) B)), associate it so that CONST is
3191 outermost. That's because that's the way indexed addresses are
3192 supposed to appear. This code used to check many more cases, but
3193 they are now checked elsewhere. */
3194 if (GET_CODE (XEXP (x, 0)) == PLUS
3195 && CONSTANT_ADDRESS_P (XEXP (XEXP (x, 0), 1)))
3196 return gen_binary (PLUS, mode,
3197 gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0),
3198 XEXP (x, 1)),
3199 XEXP (XEXP (x, 0), 1));
3200
3201 /* (plus (xor (and <foo> (const_int pow2 - 1)) <c>) <-c>)
3202 when c is (const_int (pow2 + 1) / 2) is a sign extension of a
3203 bit-field and can be replaced by either a sign_extend or a
3204 sign_extract. The `and' may be a zero_extend. */
3205 if (GET_CODE (XEXP (x, 0)) == XOR
3206 && GET_CODE (XEXP (x, 1)) == CONST_INT
3207 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3208 && INTVAL (XEXP (x, 1)) == - INTVAL (XEXP (XEXP (x, 0), 1))
3209 && (i = exact_log2 (INTVAL (XEXP (XEXP (x, 0), 1)))) >= 0
3210 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3211 && ((GET_CODE (XEXP (XEXP (x, 0), 0)) == AND
3212 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
3213 && (INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))
3214 == ((HOST_WIDE_INT) 1 << (i + 1)) - 1))
3215 || (GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND
3216 && (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)))
3217 == i + 1))))
3218 {
3219 x = simplify_shift_const
3220 (NULL_RTX, ASHIFTRT, mode,
3221 simplify_shift_const (NULL_RTX, ASHIFT, mode,
3222 XEXP (XEXP (XEXP (x, 0), 0), 0),
3223 GET_MODE_BITSIZE (mode) - (i + 1)),
3224 GET_MODE_BITSIZE (mode) - (i + 1));
3225 goto restart;
3226 }
3227
3228 /* If only the low-order bit of X is significant, (plus x -1)
3229 can become (ashiftrt (ashift (xor x 1) C) C) where C is
3230 the bitsize of the mode - 1. This allows simplification of
3231 "a = (b & 8) == 0;" */
3232 if (XEXP (x, 1) == constm1_rtx
3233 && GET_CODE (XEXP (x, 0)) != REG
3234 && ! (GET_CODE (XEXP (x,0)) == SUBREG
3235 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG)
3236 && significant_bits (XEXP (x, 0), mode) == 1)
3237 {
3238 x = simplify_shift_const
3239 (NULL_RTX, ASHIFTRT, mode,
3240 simplify_shift_const (NULL_RTX, ASHIFT, mode,
3241 gen_rtx_combine (XOR, mode,
3242 XEXP (x, 0), const1_rtx),
3243 GET_MODE_BITSIZE (mode) - 1),
3244 GET_MODE_BITSIZE (mode) - 1);
3245 goto restart;
3246 }
3247
3248 /* If we are adding two things that have no bits in common, convert
3249 the addition into an IOR. This will often be further simplified,
3250 for example in cases like ((a & 1) + (a & 2)), which can
3251 become a & 3. */
3252
3253 if ((significant_bits (XEXP (x, 0), mode)
3254 & significant_bits (XEXP (x, 1), mode)) == 0)
3255 {
3256 x = gen_binary (IOR, mode, XEXP (x, 0), XEXP (x, 1));
3257 goto restart;
3258 }
3259 break;
3260
3261 case MINUS:
3262 /* (minus <foo> (and <foo> (const_int -pow2))) becomes
3263 (and <foo> (const_int pow2-1)) */
3264 if (GET_CODE (XEXP (x, 1)) == AND
3265 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
3266 && exact_log2 (- INTVAL (XEXP (XEXP (x, 1), 1))) >= 0
3267 && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0)))
3268 {
3269 x = simplify_and_const_int (NULL_RTX, mode, XEXP (x, 0),
3270 - INTVAL (XEXP (XEXP (x, 1), 1)) - 1);
3271 goto restart;
3272 }
3273 break;
3274
3275 case MULT:
3276 /* If we have (mult (plus A B) C), apply the distributive law and then
3277 the inverse distributive law to see if things simplify. This
3278 occurs mostly in addresses, often when unrolling loops. */
3279
3280 if (GET_CODE (XEXP (x, 0)) == PLUS)
3281 {
3282 x = apply_distributive_law
3283 (gen_binary (PLUS, mode,
3284 gen_binary (MULT, mode,
3285 XEXP (XEXP (x, 0), 0), XEXP (x, 1)),
3286 gen_binary (MULT, mode,
3287 XEXP (XEXP (x, 0), 1), XEXP (x, 1))));
3288
3289 if (GET_CODE (x) != MULT)
3290 goto restart;
3291 }
3292
3293 /* If this is multiplication by a power of two and its first operand is
3294 a shift, treat the multiply as a shift to allow the shifts to
3295 possibly combine. */
3296 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3297 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0
3298 && (GET_CODE (XEXP (x, 0)) == ASHIFT
3299 || GET_CODE (XEXP (x, 0)) == LSHIFTRT
3300 || GET_CODE (XEXP (x, 0)) == ASHIFTRT
3301 || GET_CODE (XEXP (x, 0)) == ROTATE
3302 || GET_CODE (XEXP (x, 0)) == ROTATERT))
3303 {
3304 x = simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (x, 0), i);
3305 goto restart;
3306 }
3307
3308 /* Convert (mult (ashift (const_int 1) A) B) to (ashift B A). */
3309 if (GET_CODE (XEXP (x, 0)) == ASHIFT
3310 && XEXP (XEXP (x, 0), 0) == const1_rtx)
3311 return gen_rtx_combine (ASHIFT, mode, XEXP (x, 1),
3312 XEXP (XEXP (x, 0), 1));
3313 break;
3314
3315 case UDIV:
3316 /* If this is a divide by a power of two, treat it as a shift if
3317 its first operand is a shift. */
3318 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3319 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0
3320 && (GET_CODE (XEXP (x, 0)) == ASHIFT
3321 || GET_CODE (XEXP (x, 0)) == LSHIFTRT
3322 || GET_CODE (XEXP (x, 0)) == ASHIFTRT
3323 || GET_CODE (XEXP (x, 0)) == ROTATE
3324 || GET_CODE (XEXP (x, 0)) == ROTATERT))
3325 {
3326 x = simplify_shift_const (NULL_RTX, LSHIFTRT, mode, XEXP (x, 0), i);
3327 goto restart;
3328 }
3329 break;
3330
3331 case EQ: case NE:
3332 case GT: case GTU: case GE: case GEU:
3333 case LT: case LTU: case LE: case LEU:
3334 /* If the first operand is a condition code, we can't do anything
3335 with it. */
3336 if (GET_CODE (XEXP (x, 0)) == COMPARE
3337 || (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) != MODE_CC
3338 #ifdef HAVE_cc0
3339 && XEXP (x, 0) != cc0_rtx
3340 #endif
3341 ))
3342 {
3343 rtx op0 = XEXP (x, 0);
3344 rtx op1 = XEXP (x, 1);
3345 enum rtx_code new_code;
3346
3347 if (GET_CODE (op0) == COMPARE)
3348 op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
3349
3350 /* Simplify our comparison, if possible. */
3351 new_code = simplify_comparison (code, &op0, &op1);
3352
3353 #if STORE_FLAG_VALUE == 1
3354 /* If STORE_FLAG_VALUE is 1, we can convert (ne x 0) to simply X
3355 if only the low-order bit is significant in X (such as when
3356 X is a ZERO_EXTRACT of one bit. Similarly, we can convert
3357 EQ to (xor X 1). */
3358 if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
3359 && op1 == const0_rtx
3360 && significant_bits (op0, GET_MODE (op0)) == 1)
3361 return gen_lowpart_for_combine (mode, op0);
3362 else if (new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
3363 && op1 == const0_rtx
3364 && significant_bits (op0, GET_MODE (op0)) == 1)
3365 return gen_rtx_combine (XOR, mode,
3366 gen_lowpart_for_combine (mode, op0),
3367 const1_rtx);
3368 #endif
3369
3370 #if STORE_FLAG_VALUE == -1
3371 /* If STORE_FLAG_VALUE is -1, we can convert (ne x 0)
3372 to (neg x) if only the low-order bit of X is significant.
3373 This converts (ne (zero_extract X 1 Y) 0) to
3374 (sign_extract X 1 Y). */
3375 if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
3376 && op1 == const0_rtx
3377 && significant_bits (op0, GET_MODE (op0)) == 1)
3378 {
3379 x = gen_rtx_combine (NEG, mode,
3380 gen_lowpart_for_combine (mode, op0));
3381 goto restart;
3382 }
3383 #endif
3384
3385 /* If STORE_FLAG_VALUE says to just test the sign bit and X has just
3386 one significant bit, we can convert (ne x 0) to (ashift x c)
3387 where C puts the bit in the sign bit. Remove any AND with
3388 STORE_FLAG_VALUE when we are done, since we are only going to
3389 test the sign bit. */
3390 if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
3391 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3392 && (STORE_FLAG_VALUE
3393 == (HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
3394 && op1 == const0_rtx
3395 && mode == GET_MODE (op0)
3396 && (i = exact_log2 (significant_bits (op0, GET_MODE (op0)))) >= 0)
3397 {
3398 x = simplify_shift_const (NULL_RTX, ASHIFT, mode, op0,
3399 GET_MODE_BITSIZE (mode) - 1 - i);
3400 if (GET_CODE (x) == AND && XEXP (x, 1) == const_true_rtx)
3401 return XEXP (x, 0);
3402 else
3403 return x;
3404 }
3405
3406 /* If the code changed, return a whole new comparison. */
3407 if (new_code != code)
3408 return gen_rtx_combine (new_code, mode, op0, op1);
3409
3410 /* Otherwise, keep this operation, but maybe change its operands.
3411 This also converts (ne (compare FOO BAR) 0) to (ne FOO BAR). */
3412 SUBST (XEXP (x, 0), op0);
3413 SUBST (XEXP (x, 1), op1);
3414 }
3415 break;
3416
3417 case IF_THEN_ELSE:
3418 /* Sometimes we can simplify the arm of an IF_THEN_ELSE if a register
3419 used in it is being compared against certain values. Get the
3420 true and false comparisons and see if that says anything about the
3421 value of each arm. */
3422
3423 if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3424 && reversible_comparison_p (XEXP (x, 0))
3425 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG)
3426 {
3427 HOST_WIDE_INT sig;
3428 rtx from = XEXP (XEXP (x, 0), 0);
3429 enum rtx_code true_code = GET_CODE (XEXP (x, 0));
3430 enum rtx_code false_code = reverse_condition (true_code);
3431 rtx true_val = XEXP (XEXP (x, 0), 1);
3432 rtx false_val = true_val;
3433 rtx true_arm = XEXP (x, 1);
3434 rtx false_arm = XEXP (x, 2);
3435 int swapped = 0;
3436
3437 /* If FALSE_CODE is EQ, swap the codes and arms. */
3438
3439 if (false_code == EQ)
3440 {
3441 swapped = 1, true_code = EQ, false_code = NE;
3442 true_arm = XEXP (x, 2), false_arm = XEXP (x, 1);
3443 }
3444
3445 /* If we are comparing against zero and the expression being tested
3446 has only a single significant bit, that is its value when it is
3447 not equal to zero. Similarly if it is known to be -1 or 0. */
3448
3449 if (true_code == EQ && true_val == const0_rtx
3450 && exact_log2 (sig = significant_bits (from,
3451 GET_MODE (from))) >= 0)
3452 false_code = EQ, false_val = GEN_INT (sig);
3453 else if (true_code == EQ && true_val == const0_rtx
3454 && (num_sign_bit_copies (from, GET_MODE (from))
3455 == GET_MODE_BITSIZE (GET_MODE (from))))
3456 false_code = EQ, false_val = constm1_rtx;
3457
3458 /* Now simplify an arm if we know the value of the register
3459 in the branch and it is used in the arm. Be carefull due to
3460 the potential of locally-shared RTL. */
3461
3462 if (reg_mentioned_p (from, true_arm))
3463 true_arm = subst (known_cond (copy_rtx (true_arm), true_code,
3464 from, true_val),
3465 pc_rtx, pc_rtx, 0, 0);
3466 if (reg_mentioned_p (from, false_arm))
3467 false_arm = subst (known_cond (copy_rtx (false_arm), false_code,
3468 from, false_val),
3469 pc_rtx, pc_rtx, 0, 0);
3470
3471 SUBST (XEXP (x, 1), swapped ? false_arm : true_arm);
3472 SUBST (XEXP (x, 2), swapped ? true_arm : false_arm);
3473 }
3474
3475 /* If we have (if_then_else FOO (pc) (label_ref BAR)) and FOO can be
3476 reversed, do so to avoid needing two sets of patterns for
3477 subtract-and-branch insns. Similarly if we have a constant in that
3478 position or if the third operand is the same as the first operand
3479 of the comparison. */
3480
3481 if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3482 && reversible_comparison_p (XEXP (x, 0))
3483 && (XEXP (x, 1) == pc_rtx || GET_CODE (XEXP (x, 1)) == CONST_INT
3484 || rtx_equal_p (XEXP (x, 2), XEXP (XEXP (x, 0), 0))))
3485 {
3486 SUBST (XEXP (x, 0),
3487 gen_binary (reverse_condition (GET_CODE (XEXP (x, 0))),
3488 GET_MODE (XEXP (x, 0)),
3489 XEXP (XEXP (x, 0), 0), XEXP (XEXP (x, 0), 1)));
3490
3491 temp = XEXP (x, 1);
3492 SUBST (XEXP (x, 1), XEXP (x, 2));
3493 SUBST (XEXP (x, 2), temp);
3494 }
3495
3496 /* If the two arms are identical, we don't need the comparison. */
3497
3498 if (rtx_equal_p (XEXP (x, 1), XEXP (x, 2))
3499 && ! side_effects_p (XEXP (x, 0)))
3500 return XEXP (x, 1);
3501
3502 /* Look for cases where we have (abs x) or (neg (abs X)). */
3503
3504 if (GET_MODE_CLASS (mode) == MODE_INT
3505 && GET_CODE (XEXP (x, 2)) == NEG
3506 && rtx_equal_p (XEXP (x, 1), XEXP (XEXP (x, 2), 0))
3507 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3508 && rtx_equal_p (XEXP (x, 1), XEXP (XEXP (x, 0), 0))
3509 && ! side_effects_p (XEXP (x, 1)))
3510 switch (GET_CODE (XEXP (x, 0)))
3511 {
3512 case GT:
3513 case GE:
3514 x = gen_unary (ABS, mode, XEXP (x, 1));
3515 goto restart;
3516 case LT:
3517 case LE:
3518 x = gen_unary (NEG, mode, gen_unary (ABS, mode, XEXP (x, 1)));
3519 goto restart;
3520 }
3521
3522 /* Look for MIN or MAX. */
3523
3524 if (GET_MODE_CLASS (mode) == MODE_INT
3525 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3526 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1))
3527 && rtx_equal_p (XEXP (XEXP (x, 0), 1), XEXP (x, 2))
3528 && ! side_effects_p (XEXP (x, 0)))
3529 switch (GET_CODE (XEXP (x, 0)))
3530 {
3531 case GE:
3532 case GT:
3533 x = gen_binary (SMAX, mode, XEXP (x, 1), XEXP (x, 2));
3534 goto restart;
3535 case LE:
3536 case LT:
3537 x = gen_binary (SMIN, mode, XEXP (x, 1), XEXP (x, 2));
3538 goto restart;
3539 case GEU:
3540 case GTU:
3541 x = gen_binary (UMAX, mode, XEXP (x, 1), XEXP (x, 2));
3542 goto restart;
3543 case LEU:
3544 case LTU:
3545 x = gen_binary (UMIN, mode, XEXP (x, 1), XEXP (x, 2));
3546 goto restart;
3547 }
3548
3549 /* If we have something like (if_then_else (ne A 0) (OP X C) X),
3550 A is known to be either 0 or 1, and OP is an identity when its
3551 second operand is zero, this can be done as (OP X (mult A C)).
3552 Similarly if A is known to be 0 or -1 and also similarly if we have
3553 a ZERO_EXTEND or SIGN_EXTEND as long as X is already extended (so
3554 we don't destroy it). */
3555
3556 if (mode != VOIDmode
3557 && (GET_CODE (XEXP (x, 0)) == EQ || GET_CODE (XEXP (x, 0)) == NE)
3558 && XEXP (XEXP (x, 0), 1) == const0_rtx
3559 && (significant_bits (XEXP (XEXP (x, 0), 0), mode) == 1
3560 || (num_sign_bit_copies (XEXP (XEXP (x, 0), 0), mode)
3561 == GET_MODE_BITSIZE (mode))))
3562 {
3563 rtx nz = make_compound_operation (GET_CODE (XEXP (x, 0)) == NE
3564 ? XEXP (x, 1) : XEXP (x, 2));
3565 rtx z = GET_CODE (XEXP (x, 0)) == NE ? XEXP (x, 2) : XEXP (x, 1);
3566 rtx dir = (significant_bits (XEXP (XEXP (x, 0), 0), mode) == 1
3567 ? const1_rtx : constm1_rtx);
3568 rtx c = 0;
3569 enum machine_mode m = mode;
3570 enum rtx_code op, extend_op = 0;
3571
3572 if ((GET_CODE (nz) == PLUS || GET_CODE (nz) == MINUS
3573 || GET_CODE (nz) == IOR || GET_CODE (nz) == XOR
3574 || GET_CODE (nz) == ASHIFT
3575 || GET_CODE (nz) == LSHIFTRT || GET_CODE (nz) == ASHIFTRT)
3576 && rtx_equal_p (XEXP (nz, 0), z))
3577 c = XEXP (nz, 1), op = GET_CODE (nz);
3578 else if (GET_CODE (nz) == SIGN_EXTEND
3579 && (GET_CODE (XEXP (nz, 0)) == PLUS
3580 || GET_CODE (XEXP (nz, 0)) == MINUS
3581 || GET_CODE (XEXP (nz, 0)) == IOR
3582 || GET_CODE (XEXP (nz, 0)) == XOR
3583 || GET_CODE (XEXP (nz, 0)) == ASHIFT
3584 || GET_CODE (XEXP (nz, 0)) == LSHIFTRT
3585 || GET_CODE (XEXP (nz, 0)) == ASHIFTRT)
3586 && GET_CODE (XEXP (XEXP (nz, 0), 0)) == SUBREG
3587 && subreg_lowpart_p (XEXP (XEXP (nz, 0), 0))
3588 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (nz, 0), 0)), z)
3589 && (num_sign_bit_copies (z, GET_MODE (z))
3590 >= (GET_MODE_BITSIZE (mode)
3591 - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (nz, 0), 0))))))
3592 {
3593 c = XEXP (XEXP (nz, 0), 1);
3594 op = GET_CODE (XEXP (nz, 0));
3595 extend_op = SIGN_EXTEND;
3596 m = GET_MODE (XEXP (nz, 0));
3597 }
3598 else if (GET_CODE (nz) == ZERO_EXTEND
3599 && (GET_CODE (XEXP (nz, 0)) == PLUS
3600 || GET_CODE (XEXP (nz, 0)) == MINUS
3601 || GET_CODE (XEXP (nz, 0)) == IOR
3602 || GET_CODE (XEXP (nz, 0)) == XOR
3603 || GET_CODE (XEXP (nz, 0)) == ASHIFT
3604 || GET_CODE (XEXP (nz, 0)) == LSHIFTRT
3605 || GET_CODE (XEXP (nz, 0)) == ASHIFTRT)
3606 && GET_CODE (XEXP (XEXP (nz, 0), 0)) == SUBREG
3607 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3608 && subreg_lowpart_p (XEXP (XEXP (nz, 0), 0))
3609 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (nz, 0), 0)), z)
3610 && ((significant_bits (z, GET_MODE (z))
3611 & ~ GET_MODE_MASK (GET_MODE (XEXP (XEXP (nz, 0), 0))))
3612 == 0))
3613 {
3614 c = XEXP (XEXP (nz, 0), 1);
3615 op = GET_CODE (XEXP (nz, 0));
3616 extend_op = ZERO_EXTEND;
3617 m = GET_MODE (XEXP (nz, 0));
3618 }
3619
3620 if (c && ! side_effects_p (c) && ! side_effects_p (z))
3621 {
3622 temp
3623 = gen_binary (MULT, m,
3624 gen_lowpart_for_combine (m,
3625 XEXP (XEXP (x, 0), 0)),
3626 gen_binary (MULT, m, c, dir));
3627
3628 temp = gen_binary (op, m, gen_lowpart_for_combine (m, z), temp);
3629
3630 if (extend_op != 0)
3631 temp = gen_unary (extend_op, mode, temp);
3632
3633 return temp;
3634 }
3635 }
3636 break;
3637
3638 case ZERO_EXTRACT:
3639 case SIGN_EXTRACT:
3640 case ZERO_EXTEND:
3641 case SIGN_EXTEND:
3642 /* If we are processing SET_DEST, we are done. */
3643 if (in_dest)
3644 return x;
3645
3646 x = expand_compound_operation (x);
3647 if (GET_CODE (x) != code)
3648 goto restart;
3649 break;
3650
3651 case SET:
3652 /* (set (pc) (return)) gets written as (return). */
3653 if (GET_CODE (SET_DEST (x)) == PC && GET_CODE (SET_SRC (x)) == RETURN)
3654 return SET_SRC (x);
3655
3656 /* Convert this into a field assignment operation, if possible. */
3657 x = make_field_assignment (x);
3658
3659 /* If we are setting CC0 or if the source is a COMPARE, look for the
3660 use of the comparison result and try to simplify it unless we already
3661 have used undobuf.other_insn. */
3662 if ((GET_CODE (SET_SRC (x)) == COMPARE
3663 #ifdef HAVE_cc0
3664 || SET_DEST (x) == cc0_rtx
3665 #endif
3666 )
3667 && (cc_use = find_single_use (SET_DEST (x), subst_insn,
3668 &other_insn)) != 0
3669 && (undobuf.other_insn == 0 || other_insn == undobuf.other_insn)
3670 && GET_RTX_CLASS (GET_CODE (*cc_use)) == '<'
3671 && XEXP (*cc_use, 0) == SET_DEST (x))
3672 {
3673 enum rtx_code old_code = GET_CODE (*cc_use);
3674 enum rtx_code new_code;
3675 rtx op0, op1;
3676 int other_changed = 0;
3677 enum machine_mode compare_mode = GET_MODE (SET_DEST (x));
3678
3679 if (GET_CODE (SET_SRC (x)) == COMPARE)
3680 op0 = XEXP (SET_SRC (x), 0), op1 = XEXP (SET_SRC (x), 1);
3681 else
3682 op0 = SET_SRC (x), op1 = const0_rtx;
3683
3684 /* Simplify our comparison, if possible. */
3685 new_code = simplify_comparison (old_code, &op0, &op1);
3686
3687 #if !defined (HAVE_cc0) && defined (EXTRA_CC_MODES)
3688 /* If this machine has CC modes other than CCmode, check to see
3689 if we need to use a different CC mode here. */
3690 compare_mode = SELECT_CC_MODE (new_code, op0, op1);
3691
3692 /* If the mode changed, we have to change SET_DEST, the mode
3693 in the compare, and the mode in the place SET_DEST is used.
3694 If SET_DEST is a hard register, just build new versions with
3695 the proper mode. If it is a pseudo, we lose unless it is only
3696 time we set the pseudo, in which case we can safely change
3697 its mode. */
3698 if (compare_mode != GET_MODE (SET_DEST (x)))
3699 {
3700 int regno = REGNO (SET_DEST (x));
3701 rtx new_dest = gen_rtx (REG, compare_mode, regno);
3702
3703 if (regno < FIRST_PSEUDO_REGISTER
3704 || (reg_n_sets[regno] == 1
3705 && ! REG_USERVAR_P (SET_DEST (x))))
3706 {
3707 if (regno >= FIRST_PSEUDO_REGISTER)
3708 SUBST (regno_reg_rtx[regno], new_dest);
3709
3710 SUBST (SET_DEST (x), new_dest);
3711 SUBST (XEXP (*cc_use, 0), new_dest);
3712 other_changed = 1;
3713 }
3714 }
3715 #endif
3716
3717 /* If the code changed, we have to build a new comparison
3718 in undobuf.other_insn. */
3719 if (new_code != old_code)
3720 {
3721 unsigned mask;
3722
3723 SUBST (*cc_use, gen_rtx_combine (new_code, GET_MODE (*cc_use),
3724 SET_DEST (x), const0_rtx));
3725
3726 /* If the only change we made was to change an EQ into an
3727 NE or vice versa, OP0 has only one significant bit,
3728 and OP1 is zero, check if changing the user of the condition
3729 code will produce a valid insn. If it won't, we can keep
3730 the original code in that insn by surrounding our operation
3731 with an XOR. */
3732
3733 if (((old_code == NE && new_code == EQ)
3734 || (old_code == EQ && new_code == NE))
3735 && ! other_changed && op1 == const0_rtx
3736 && (GET_MODE_BITSIZE (GET_MODE (op0))
3737 <= HOST_BITS_PER_WIDE_INT)
3738 && (exact_log2 (mask = significant_bits (op0,
3739 GET_MODE (op0)))
3740 >= 0))
3741 {
3742 rtx pat = PATTERN (other_insn), note = 0;
3743
3744 if ((recog_for_combine (&pat, undobuf.other_insn, &note) < 0
3745 && ! check_asm_operands (pat)))
3746 {
3747 PUT_CODE (*cc_use, old_code);
3748 other_insn = 0;
3749
3750 op0 = gen_binary (XOR, GET_MODE (op0), op0,
3751 GEN_INT (mask));
3752 }
3753 }
3754
3755 other_changed = 1;
3756 }
3757
3758 if (other_changed)
3759 undobuf.other_insn = other_insn;
3760
3761 #ifdef HAVE_cc0
3762 /* If we are now comparing against zero, change our source if
3763 needed. If we do not use cc0, we always have a COMPARE. */
3764 if (op1 == const0_rtx && SET_DEST (x) == cc0_rtx)
3765 SUBST (SET_SRC (x), op0);
3766 else
3767 #endif
3768
3769 /* Otherwise, if we didn't previously have a COMPARE in the
3770 correct mode, we need one. */
3771 if (GET_CODE (SET_SRC (x)) != COMPARE
3772 || GET_MODE (SET_SRC (x)) != compare_mode)
3773 SUBST (SET_SRC (x), gen_rtx_combine (COMPARE, compare_mode,
3774 op0, op1));
3775 else
3776 {
3777 /* Otherwise, update the COMPARE if needed. */
3778 SUBST (XEXP (SET_SRC (x), 0), op0);
3779 SUBST (XEXP (SET_SRC (x), 1), op1);
3780 }
3781 }
3782 else
3783 {
3784 /* Get SET_SRC in a form where we have placed back any
3785 compound expressions. Then do the checks below. */
3786 temp = make_compound_operation (SET_SRC (x), SET);
3787 SUBST (SET_SRC (x), temp);
3788 }
3789
3790 /* If we have (set x (subreg:m1 (op:m2 ...) 0)) with OP being some
3791 operation, and X being a REG or (subreg (reg)), we may be able to
3792 convert this to (set (subreg:m2 x) (op)).
3793
3794 We can always do this if M1 is narrower than M2 because that
3795 means that we only care about the low bits of the result.
3796
3797 However, on most machines (those with BYTE_LOADS_ZERO_EXTEND
3798 and BYTES_LOADS_SIGN_EXTEND not defined), we cannot perform a
3799 narrower operation that requested since the high-order bits will
3800 be undefined. On machine where BYTE_LOADS_*_EXTEND is defined,
3801 however, this transformation is safe as long as M1 and M2 have
3802 the same number of words. */
3803
3804 if (GET_CODE (SET_SRC (x)) == SUBREG
3805 && subreg_lowpart_p (SET_SRC (x))
3806 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (SET_SRC (x)))) != 'o'
3807 && (((GET_MODE_SIZE (GET_MODE (SET_SRC (x))) + (UNITS_PER_WORD - 1))
3808 / UNITS_PER_WORD)
3809 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_SRC (x))))
3810 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
3811 #if ! defined(BYTE_LOADS_ZERO_EXTEND) && ! defined (BYTE_LOADS_SIGN_EXTEND)
3812 && (GET_MODE_SIZE (GET_MODE (SET_SRC (x)))
3813 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_SRC (x)))))
3814 #endif
3815 && (GET_CODE (SET_DEST (x)) == REG
3816 || (GET_CODE (SET_DEST (x)) == SUBREG
3817 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG)))
3818 {
3819 SUBST (SET_DEST (x),
3820 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (SET_SRC (x))),
3821 SET_DEST (x)));
3822 SUBST (SET_SRC (x), SUBREG_REG (SET_SRC (x)));
3823 }
3824
3825 #ifdef BYTE_LOADS_ZERO_EXTEND
3826 /* If we have (set FOO (subreg:M (mem:N BAR) 0)) with
3827 M wider than N, this would require a paradoxical subreg.
3828 Replace the subreg with a zero_extend to avoid the reload that
3829 would otherwise be required. */
3830 if (GET_CODE (SET_SRC (x)) == SUBREG
3831 && subreg_lowpart_p (SET_SRC (x))
3832 && SUBREG_WORD (SET_SRC (x)) == 0
3833 && (GET_MODE_SIZE (GET_MODE (SET_SRC (x)))
3834 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_SRC (x)))))
3835 && GET_CODE (SUBREG_REG (SET_SRC (x))) == MEM)
3836 SUBST (SET_SRC (x), gen_rtx_combine (ZERO_EXTEND,
3837 GET_MODE (SET_SRC (x)),
3838 XEXP (SET_SRC (x), 0)));
3839 #endif
3840
3841 #ifndef HAVE_conditional_move
3842
3843 /* If we don't have a conditional move, SET_SRC is an IF_THEN_ELSE,
3844 and we are comparing an item known to be 0 or -1 against 0, use a
3845 logical operation instead. Check for one of the arms being an IOR
3846 of the other arm with some value. We compute three terms to be
3847 IOR'ed together. In practice, at most two will be nonzero. Then
3848 we do the IOR's. */
3849
3850 if (GET_CODE (SET_SRC (x)) == IF_THEN_ELSE
3851 && (GET_CODE (XEXP (SET_SRC (x), 0)) == EQ
3852 || GET_CODE (XEXP (SET_SRC (x), 0)) == NE)
3853 && XEXP (XEXP (SET_SRC (x), 0), 1) == const0_rtx
3854 && (num_sign_bit_copies (XEXP (XEXP (SET_SRC (x), 0), 0),
3855 GET_MODE (XEXP (XEXP (SET_SRC (x), 0), 0)))
3856 == GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (SET_SRC (x), 0), 0))))
3857 && ! side_effects_p (SET_SRC (x)))
3858 {
3859 rtx true = (GET_CODE (XEXP (SET_SRC (x), 0)) == NE
3860 ? XEXP (SET_SRC (x), 1) : XEXP (SET_SRC (x), 2));
3861 rtx false = (GET_CODE (XEXP (SET_SRC (x), 0)) == NE
3862 ? XEXP (SET_SRC (x), 2) : XEXP (SET_SRC (x), 1));
3863 rtx term1 = const0_rtx, term2, term3;
3864
3865 if (GET_CODE (true) == IOR && rtx_equal_p (XEXP (true, 0), false))
3866 term1 = false, true = XEXP (true, 1), false = const0_rtx;
3867 else if (GET_CODE (true) == IOR
3868 && rtx_equal_p (XEXP (true, 1), false))
3869 term1 = false, true = XEXP (true, 0), false = const0_rtx;
3870 else if (GET_CODE (false) == IOR
3871 && rtx_equal_p (XEXP (false, 0), true))
3872 term1 = true, false = XEXP (false, 1), true = const0_rtx;
3873 else if (GET_CODE (false) == IOR
3874 && rtx_equal_p (XEXP (false, 1), true))
3875 term1 = true, false = XEXP (false, 0), true = const0_rtx;
3876
3877 term2 = gen_binary (AND, GET_MODE (SET_SRC (x)),
3878 XEXP (XEXP (SET_SRC (x), 0), 0), true);
3879 term3 = gen_binary (AND, GET_MODE (SET_SRC (x)),
3880 gen_unary (NOT, GET_MODE (SET_SRC (x)),
3881 XEXP (XEXP (SET_SRC (x), 0), 0)),
3882 false);
3883
3884 SUBST (SET_SRC (x),
3885 gen_binary (IOR, GET_MODE (SET_SRC (x)),
3886 gen_binary (IOR, GET_MODE (SET_SRC (x)),
3887 term1, term2),
3888 term3));
3889 }
3890 #endif
3891 break;
3892
3893 case AND:
3894 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
3895 {
3896 x = simplify_and_const_int (x, mode, XEXP (x, 0),
3897 INTVAL (XEXP (x, 1)));
3898
3899 /* If we have (ior (and (X C1) C2)) and the next restart would be
3900 the last, simplify this by making C1 as small as possible
3901 and then exit. */
3902 if (n_restarts >= 3 && GET_CODE (x) == IOR
3903 && GET_CODE (XEXP (x, 0)) == AND
3904 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3905 && GET_CODE (XEXP (x, 1)) == CONST_INT)
3906 {
3907 temp = gen_binary (AND, mode, XEXP (XEXP (x, 0), 0),
3908 GEN_INT (INTVAL (XEXP (XEXP (x, 0), 1))
3909 & ~ INTVAL (XEXP (x, 1))));
3910 return gen_binary (IOR, mode, temp, XEXP (x, 1));
3911 }
3912
3913 if (GET_CODE (x) != AND)
3914 goto restart;
3915 }
3916
3917 /* Convert (A | B) & A to A. */
3918 if (GET_CODE (XEXP (x, 0)) == IOR
3919 && (rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1))
3920 || rtx_equal_p (XEXP (XEXP (x, 0), 1), XEXP (x, 1)))
3921 && ! side_effects_p (XEXP (XEXP (x, 0), 0))
3922 && ! side_effects_p (XEXP (XEXP (x, 0), 1)))
3923 return XEXP (x, 1);
3924
3925 /* Convert (A ^ B) & A to A & (~ B) since the latter is often a single
3926 insn (and may simplify more). */
3927 else if (GET_CODE (XEXP (x, 0)) == XOR
3928 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1))
3929 && ! side_effects_p (XEXP (x, 1)))
3930 {
3931 x = gen_binary (AND, mode,
3932 gen_unary (NOT, mode, XEXP (XEXP (x, 0), 1)),
3933 XEXP (x, 1));
3934 goto restart;
3935 }
3936 else if (GET_CODE (XEXP (x, 0)) == XOR
3937 && rtx_equal_p (XEXP (XEXP (x, 0), 1), XEXP (x, 1))
3938 && ! side_effects_p (XEXP (x, 1)))
3939 {
3940 x = gen_binary (AND, mode,
3941 gen_unary (NOT, mode, XEXP (XEXP (x, 0), 0)),
3942 XEXP (x, 1));
3943 goto restart;
3944 }
3945
3946 /* Similarly for (~ (A ^ B)) & A. */
3947 else if (GET_CODE (XEXP (x, 0)) == NOT
3948 && GET_CODE (XEXP (XEXP (x, 0), 0)) == XOR
3949 && rtx_equal_p (XEXP (XEXP (XEXP (x, 0), 0), 0), XEXP (x, 1))
3950 && ! side_effects_p (XEXP (x, 1)))
3951 {
3952 x = gen_binary (AND, mode, XEXP (XEXP (XEXP (x, 0), 0), 1),
3953 XEXP (x, 1));
3954 goto restart;
3955 }
3956 else if (GET_CODE (XEXP (x, 0)) == NOT
3957 && GET_CODE (XEXP (XEXP (x, 0), 0)) == XOR
3958 && rtx_equal_p (XEXP (XEXP (XEXP (x, 0), 0), 1), XEXP (x, 1))
3959 && ! side_effects_p (XEXP (x, 1)))
3960 {
3961 x = gen_binary (AND, mode, XEXP (XEXP (XEXP (x, 0), 0), 0),
3962 XEXP (x, 1));
3963 goto restart;
3964 }
3965
3966 /* If we have (and A B) with A not an object but that is known to
3967 be -1 or 0, this is equivalent to the expression
3968 (if_then_else (ne A (const_int 0)) B (const_int 0))
3969 We make this conversion because it may allow further
3970 simplifications and then allow use of conditional move insns.
3971 If the machine doesn't have condition moves, code in case SET
3972 will convert the IF_THEN_ELSE back to the logical operation.
3973 We build the IF_THEN_ELSE here in case further simplification
3974 is possible (e.g., we can convert it to ABS). */
3975
3976 if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) != 'o'
3977 && ! (GET_CODE (XEXP (x, 0)) == SUBREG
3978 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0)))) == 'o')
3979 && (num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
3980 == GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))))
3981 {
3982 rtx op0 = XEXP (x, 0);
3983 rtx op1 = const0_rtx;
3984 enum rtx_code comp_code
3985 = simplify_comparison (NE, &op0, &op1);
3986
3987 x = gen_rtx_combine (IF_THEN_ELSE, mode,
3988 gen_binary (comp_code, VOIDmode, op0, op1),
3989 XEXP (x, 1), const0_rtx);
3990 goto restart;
3991 }
3992
3993 /* In the following group of tests (and those in case IOR below),
3994 we start with some combination of logical operations and apply
3995 the distributive law followed by the inverse distributive law.
3996 Most of the time, this results in no change. However, if some of
3997 the operands are the same or inverses of each other, simplifications
3998 will result.
3999
4000 For example, (and (ior A B) (not B)) can occur as the result of
4001 expanding a bit field assignment. When we apply the distributive
4002 law to this, we get (ior (and (A (not B))) (and (B (not B)))),
4003 which then simplifies to (and (A (not B))). */
4004
4005 /* If we have (and (ior A B) C), apply the distributive law and then
4006 the inverse distributive law to see if things simplify. */
4007
4008 if (GET_CODE (XEXP (x, 0)) == IOR || GET_CODE (XEXP (x, 0)) == XOR)
4009 {
4010 x = apply_distributive_law
4011 (gen_binary (GET_CODE (XEXP (x, 0)), mode,
4012 gen_binary (AND, mode,
4013 XEXP (XEXP (x, 0), 0), XEXP (x, 1)),
4014 gen_binary (AND, mode,
4015 XEXP (XEXP (x, 0), 1), XEXP (x, 1))));
4016 if (GET_CODE (x) != AND)
4017 goto restart;
4018 }
4019
4020 if (GET_CODE (XEXP (x, 1)) == IOR || GET_CODE (XEXP (x, 1)) == XOR)
4021 {
4022 x = apply_distributive_law
4023 (gen_binary (GET_CODE (XEXP (x, 1)), mode,
4024 gen_binary (AND, mode,
4025 XEXP (XEXP (x, 1), 0), XEXP (x, 0)),
4026 gen_binary (AND, mode,
4027 XEXP (XEXP (x, 1), 1), XEXP (x, 0))));
4028 if (GET_CODE (x) != AND)
4029 goto restart;
4030 }
4031
4032 /* Similarly, taking advantage of the fact that
4033 (and (not A) (xor B C)) == (xor (ior A B) (ior A C)) */
4034
4035 if (GET_CODE (XEXP (x, 0)) == NOT && GET_CODE (XEXP (x, 1)) == XOR)
4036 {
4037 x = apply_distributive_law
4038 (gen_binary (XOR, mode,
4039 gen_binary (IOR, mode, XEXP (XEXP (x, 0), 0),
4040 XEXP (XEXP (x, 1), 0)),
4041 gen_binary (IOR, mode, XEXP (XEXP (x, 0), 0),
4042 XEXP (XEXP (x, 1), 1))));
4043 if (GET_CODE (x) != AND)
4044 goto restart;
4045 }
4046
4047 else if (GET_CODE (XEXP (x, 1)) == NOT && GET_CODE (XEXP (x, 0)) == XOR)
4048 {
4049 x = apply_distributive_law
4050 (gen_binary (XOR, mode,
4051 gen_binary (IOR, mode, XEXP (XEXP (x, 1), 0),
4052 XEXP (XEXP (x, 0), 0)),
4053 gen_binary (IOR, mode, XEXP (XEXP (x, 1), 0),
4054 XEXP (XEXP (x, 0), 1))));
4055 if (GET_CODE (x) != AND)
4056 goto restart;
4057 }
4058 break;
4059
4060 case IOR:
4061 /* (ior A C) is C if all significant bits of A are on in C. */
4062 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4063 && (significant_bits (XEXP (x, 0), mode)
4064 & ~ INTVAL (XEXP (x, 1))) == 0)
4065 return XEXP (x, 1);
4066
4067 /* Convert (A & B) | A to A. */
4068 if (GET_CODE (XEXP (x, 0)) == AND
4069 && (rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1))
4070 || rtx_equal_p (XEXP (XEXP (x, 0), 1), XEXP (x, 1)))
4071 && ! side_effects_p (XEXP (XEXP (x, 0), 0))
4072 && ! side_effects_p (XEXP (XEXP (x, 0), 1)))
4073 return XEXP (x, 1);
4074
4075 /* If we have (ior (and A B) C), apply the distributive law and then
4076 the inverse distributive law to see if things simplify. */
4077
4078 if (GET_CODE (XEXP (x, 0)) == AND)
4079 {
4080 x = apply_distributive_law
4081 (gen_binary (AND, mode,
4082 gen_binary (IOR, mode,
4083 XEXP (XEXP (x, 0), 0), XEXP (x, 1)),
4084 gen_binary (IOR, mode,
4085 XEXP (XEXP (x, 0), 1), XEXP (x, 1))));
4086
4087 if (GET_CODE (x) != IOR)
4088 goto restart;
4089 }
4090
4091 if (GET_CODE (XEXP (x, 1)) == AND)
4092 {
4093 x = apply_distributive_law
4094 (gen_binary (AND, mode,
4095 gen_binary (IOR, mode,
4096 XEXP (XEXP (x, 1), 0), XEXP (x, 0)),
4097 gen_binary (IOR, mode,
4098 XEXP (XEXP (x, 1), 1), XEXP (x, 0))));
4099
4100 if (GET_CODE (x) != IOR)
4101 goto restart;
4102 }
4103
4104 /* Convert (ior (ashift A CX) (lshiftrt A CY)) where CX+CY equals the
4105 mode size to (rotate A CX). */
4106
4107 if (((GET_CODE (XEXP (x, 0)) == ASHIFT
4108 && GET_CODE (XEXP (x, 1)) == LSHIFTRT)
4109 || (GET_CODE (XEXP (x, 1)) == ASHIFT
4110 && GET_CODE (XEXP (x, 0)) == LSHIFTRT))
4111 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (XEXP (x, 1), 0))
4112 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4113 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
4114 && (INTVAL (XEXP (XEXP (x, 0), 1)) + INTVAL (XEXP (XEXP (x, 1), 1))
4115 == GET_MODE_BITSIZE (mode)))
4116 {
4117 rtx shift_count;
4118
4119 if (GET_CODE (XEXP (x, 0)) == ASHIFT)
4120 shift_count = XEXP (XEXP (x, 0), 1);
4121 else
4122 shift_count = XEXP (XEXP (x, 1), 1);
4123 x = gen_rtx (ROTATE, mode, XEXP (XEXP (x, 0), 0), shift_count);
4124 goto restart;
4125 }
4126 break;
4127
4128 case XOR:
4129 /* Convert (XOR (NOT x) (NOT y)) to (XOR x y).
4130 Also convert (XOR (NOT x) y) to (NOT (XOR x y)), similarly for
4131 (NOT y). */
4132 {
4133 int num_negated = 0;
4134 rtx in1 = XEXP (x, 0), in2 = XEXP (x, 1);
4135
4136 if (GET_CODE (in1) == NOT)
4137 num_negated++, in1 = XEXP (in1, 0);
4138 if (GET_CODE (in2) == NOT)
4139 num_negated++, in2 = XEXP (in2, 0);
4140
4141 if (num_negated == 2)
4142 {
4143 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4144 SUBST (XEXP (x, 1), XEXP (XEXP (x, 1), 0));
4145 }
4146 else if (num_negated == 1)
4147 {
4148 x = gen_unary (NOT, mode,
4149 gen_binary (XOR, mode, in1, in2));
4150 goto restart;
4151 }
4152 }
4153
4154 /* Convert (xor (and A B) B) to (and (not A) B). The latter may
4155 correspond to a machine insn or result in further simplifications
4156 if B is a constant. */
4157
4158 if (GET_CODE (XEXP (x, 0)) == AND
4159 && rtx_equal_p (XEXP (XEXP (x, 0), 1), XEXP (x, 1))
4160 && ! side_effects_p (XEXP (x, 1)))
4161 {
4162 x = gen_binary (AND, mode,
4163 gen_unary (NOT, mode, XEXP (XEXP (x, 0), 0)),
4164 XEXP (x, 1));
4165 goto restart;
4166 }
4167 else if (GET_CODE (XEXP (x, 0)) == AND
4168 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1))
4169 && ! side_effects_p (XEXP (x, 1)))
4170 {
4171 x = gen_binary (AND, mode,
4172 gen_unary (NOT, mode, XEXP (XEXP (x, 0), 1)),
4173 XEXP (x, 1));
4174 goto restart;
4175 }
4176
4177
4178 #if STORE_FLAG_VALUE == 1
4179 /* (xor (comparison foo bar) (const_int 1)) can become the reversed
4180 comparison. */
4181 if (XEXP (x, 1) == const1_rtx
4182 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
4183 && reversible_comparison_p (XEXP (x, 0)))
4184 return gen_rtx_combine (reverse_condition (GET_CODE (XEXP (x, 0))),
4185 mode, XEXP (XEXP (x, 0), 0),
4186 XEXP (XEXP (x, 0), 1));
4187 #endif
4188
4189 /* (xor (comparison foo bar) (const_int sign-bit))
4190 when STORE_FLAG_VALUE is the sign bit. */
4191 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4192 && (STORE_FLAG_VALUE
4193 == (HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
4194 && XEXP (x, 1) == const_true_rtx
4195 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
4196 && reversible_comparison_p (XEXP (x, 0)))
4197 return gen_rtx_combine (reverse_condition (GET_CODE (XEXP (x, 0))),
4198 mode, XEXP (XEXP (x, 0), 0),
4199 XEXP (XEXP (x, 0), 1));
4200 break;
4201
4202 case ABS:
4203 /* (abs (neg <foo>)) -> (abs <foo>) */
4204 if (GET_CODE (XEXP (x, 0)) == NEG)
4205 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4206
4207 /* If operand is something known to be positive, ignore the ABS. */
4208 if (GET_CODE (XEXP (x, 0)) == FFS || GET_CODE (XEXP (x, 0)) == ABS
4209 || ((GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4210 <= HOST_BITS_PER_WIDE_INT)
4211 && ((significant_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
4212 & ((HOST_WIDE_INT) 1
4213 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1)))
4214 == 0)))
4215 return XEXP (x, 0);
4216
4217
4218 /* If operand is known to be only -1 or 0, convert ABS to NEG. */
4219 if (num_sign_bit_copies (XEXP (x, 0), mode) == GET_MODE_BITSIZE (mode))
4220 {
4221 x = gen_rtx_combine (NEG, mode, XEXP (x, 0));
4222 goto restart;
4223 }
4224 break;
4225
4226 case FFS:
4227 /* (ffs (*_extend <X>)) = (ffs <X>) */
4228 if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND
4229 || GET_CODE (XEXP (x, 0)) == ZERO_EXTEND)
4230 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4231 break;
4232
4233 case FLOAT:
4234 /* (float (sign_extend <X>)) = (float <X>). */
4235 if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND)
4236 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4237 break;
4238
4239 case LSHIFT:
4240 case ASHIFT:
4241 case LSHIFTRT:
4242 case ASHIFTRT:
4243 case ROTATE:
4244 case ROTATERT:
4245 /* If this is a shift by a constant amount, simplify it. */
4246 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
4247 {
4248 x = simplify_shift_const (x, code, mode, XEXP (x, 0),
4249 INTVAL (XEXP (x, 1)));
4250 if (GET_CODE (x) != code)
4251 goto restart;
4252 }
4253
4254 #ifdef SHIFT_COUNT_TRUNCATED
4255 else if (GET_CODE (XEXP (x, 1)) != REG)
4256 SUBST (XEXP (x, 1),
4257 force_to_mode (XEXP (x, 1), GET_MODE (x),
4258 exact_log2 (GET_MODE_BITSIZE (GET_MODE (x))),
4259 NULL_RTX));
4260 #endif
4261
4262 break;
4263 }
4264
4265 return x;
4266 }
4267 \f
4268 /* We consider ZERO_EXTRACT, SIGN_EXTRACT, and SIGN_EXTEND as "compound
4269 operations" because they can be replaced with two more basic operations.
4270 ZERO_EXTEND is also considered "compound" because it can be replaced with
4271 an AND operation, which is simpler, though only one operation.
4272
4273 The function expand_compound_operation is called with an rtx expression
4274 and will convert it to the appropriate shifts and AND operations,
4275 simplifying at each stage.
4276
4277 The function make_compound_operation is called to convert an expression
4278 consisting of shifts and ANDs into the equivalent compound expression.
4279 It is the inverse of this function, loosely speaking. */
4280
4281 static rtx
4282 expand_compound_operation (x)
4283 rtx x;
4284 {
4285 int pos = 0, len;
4286 int unsignedp = 0;
4287 int modewidth;
4288 rtx tem;
4289
4290 switch (GET_CODE (x))
4291 {
4292 case ZERO_EXTEND:
4293 unsignedp = 1;
4294 case SIGN_EXTEND:
4295 /* We can't necessarily use a const_int for a multiword mode;
4296 it depends on implicitly extending the value.
4297 Since we don't know the right way to extend it,
4298 we can't tell whether the implicit way is right.
4299
4300 Even for a mode that is no wider than a const_int,
4301 we can't win, because we need to sign extend one of its bits through
4302 the rest of it, and we don't know which bit. */
4303 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
4304 return x;
4305
4306 if (! FAKE_EXTEND_SAFE_P (GET_MODE (XEXP (x, 0)), XEXP (x, 0)))
4307 return x;
4308
4309 len = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)));
4310 /* If the inner object has VOIDmode (the only way this can happen
4311 is if it is a ASM_OPERANDS), we can't do anything since we don't
4312 know how much masking to do. */
4313 if (len == 0)
4314 return x;
4315
4316 break;
4317
4318 case ZERO_EXTRACT:
4319 unsignedp = 1;
4320 case SIGN_EXTRACT:
4321 /* If the operand is a CLOBBER, just return it. */
4322 if (GET_CODE (XEXP (x, 0)) == CLOBBER)
4323 return XEXP (x, 0);
4324
4325 if (GET_CODE (XEXP (x, 1)) != CONST_INT
4326 || GET_CODE (XEXP (x, 2)) != CONST_INT
4327 || GET_MODE (XEXP (x, 0)) == VOIDmode)
4328 return x;
4329
4330 len = INTVAL (XEXP (x, 1));
4331 pos = INTVAL (XEXP (x, 2));
4332
4333 /* If this goes outside the object being extracted, replace the object
4334 with a (use (mem ...)) construct that only combine understands
4335 and is used only for this purpose. */
4336 if (len + pos > GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
4337 SUBST (XEXP (x, 0), gen_rtx (USE, GET_MODE (x), XEXP (x, 0)));
4338
4339 #if BITS_BIG_ENDIAN
4340 pos = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - len - pos;
4341 #endif
4342 break;
4343
4344 default:
4345 return x;
4346 }
4347
4348 /* If we reach here, we want to return a pair of shifts. The inner
4349 shift is a left shift of BITSIZE - POS - LEN bits. The outer
4350 shift is a right shift of BITSIZE - LEN bits. It is arithmetic or
4351 logical depending on the value of UNSIGNEDP.
4352
4353 If this was a ZERO_EXTEND or ZERO_EXTRACT, this pair of shifts will be
4354 converted into an AND of a shift.
4355
4356 We must check for the case where the left shift would have a negative
4357 count. This can happen in a case like (x >> 31) & 255 on machines
4358 that can't shift by a constant. On those machines, we would first
4359 combine the shift with the AND to produce a variable-position
4360 extraction. Then the constant of 31 would be substituted in to produce
4361 a such a position. */
4362
4363 modewidth = GET_MODE_BITSIZE (GET_MODE (x));
4364 if (modewidth >= pos - len)
4365 tem = simplify_shift_const (NULL_RTX, unsignedp ? LSHIFTRT : ASHIFTRT,
4366 GET_MODE (x),
4367 simplify_shift_const (NULL_RTX, ASHIFT,
4368 GET_MODE (x),
4369 XEXP (x, 0),
4370 modewidth - pos - len),
4371 modewidth - len);
4372
4373 else if (unsignedp && len < HOST_BITS_PER_WIDE_INT)
4374 tem = simplify_and_const_int (NULL_RTX, GET_MODE (x),
4375 simplify_shift_const (NULL_RTX, LSHIFTRT,
4376 GET_MODE (x),
4377 XEXP (x, 0), pos),
4378 ((HOST_WIDE_INT) 1 << len) - 1);
4379 else
4380 /* Any other cases we can't handle. */
4381 return x;
4382
4383
4384 /* If we couldn't do this for some reason, return the original
4385 expression. */
4386 if (GET_CODE (tem) == CLOBBER)
4387 return x;
4388
4389 return tem;
4390 }
4391 \f
4392 /* X is a SET which contains an assignment of one object into
4393 a part of another (such as a bit-field assignment, STRICT_LOW_PART,
4394 or certain SUBREGS). If possible, convert it into a series of
4395 logical operations.
4396
4397 We half-heartedly support variable positions, but do not at all
4398 support variable lengths. */
4399
4400 static rtx
4401 expand_field_assignment (x)
4402 rtx x;
4403 {
4404 rtx inner;
4405 rtx pos; /* Always counts from low bit. */
4406 int len;
4407 rtx mask;
4408 enum machine_mode compute_mode;
4409
4410 /* Loop until we find something we can't simplify. */
4411 while (1)
4412 {
4413 if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
4414 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG)
4415 {
4416 inner = SUBREG_REG (XEXP (SET_DEST (x), 0));
4417 len = GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)));
4418 pos = const0_rtx;
4419 }
4420 else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
4421 && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT)
4422 {
4423 inner = XEXP (SET_DEST (x), 0);
4424 len = INTVAL (XEXP (SET_DEST (x), 1));
4425 pos = XEXP (SET_DEST (x), 2);
4426
4427 /* If the position is constant and spans the width of INNER,
4428 surround INNER with a USE to indicate this. */
4429 if (GET_CODE (pos) == CONST_INT
4430 && INTVAL (pos) + len > GET_MODE_BITSIZE (GET_MODE (inner)))
4431 inner = gen_rtx (USE, GET_MODE (SET_DEST (x)), inner);
4432
4433 #if BITS_BIG_ENDIAN
4434 if (GET_CODE (pos) == CONST_INT)
4435 pos = GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner)) - len
4436 - INTVAL (pos));
4437 else if (GET_CODE (pos) == MINUS
4438 && GET_CODE (XEXP (pos, 1)) == CONST_INT
4439 && (INTVAL (XEXP (pos, 1))
4440 == GET_MODE_BITSIZE (GET_MODE (inner)) - len))
4441 /* If position is ADJUST - X, new position is X. */
4442 pos = XEXP (pos, 0);
4443 else
4444 pos = gen_binary (MINUS, GET_MODE (pos),
4445 GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner))
4446 - len),
4447 pos);
4448 #endif
4449 }
4450
4451 /* A SUBREG between two modes that occupy the same numbers of words
4452 can be done by moving the SUBREG to the source. */
4453 else if (GET_CODE (SET_DEST (x)) == SUBREG
4454 && (((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
4455 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
4456 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
4457 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
4458 {
4459 x = gen_rtx (SET, VOIDmode, SUBREG_REG (SET_DEST (x)),
4460 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (SET_DEST (x))),
4461 SET_SRC (x)));
4462 continue;
4463 }
4464 else
4465 break;
4466
4467 while (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
4468 inner = SUBREG_REG (inner);
4469
4470 compute_mode = GET_MODE (inner);
4471
4472 /* Compute a mask of LEN bits, if we can do this on the host machine. */
4473 if (len < HOST_BITS_PER_WIDE_INT)
4474 mask = GEN_INT (((HOST_WIDE_INT) 1 << len) - 1);
4475 else
4476 break;
4477
4478 /* Now compute the equivalent expression. Make a copy of INNER
4479 for the SET_DEST in case it is a MEM into which we will substitute;
4480 we don't want shared RTL in that case. */
4481 x = gen_rtx (SET, VOIDmode, copy_rtx (inner),
4482 gen_binary (IOR, compute_mode,
4483 gen_binary (AND, compute_mode,
4484 gen_unary (NOT, compute_mode,
4485 gen_binary (ASHIFT,
4486 compute_mode,
4487 mask, pos)),
4488 inner),
4489 gen_binary (ASHIFT, compute_mode,
4490 gen_binary (AND, compute_mode,
4491 gen_lowpart_for_combine
4492 (compute_mode,
4493 SET_SRC (x)),
4494 mask),
4495 pos)));
4496 }
4497
4498 return x;
4499 }
4500 \f
4501 /* Return an RTX for a reference to LEN bits of INNER. POS is the starting
4502 bit position (counted from the LSB) if >= 0; otherwise POS_RTX represents
4503 the starting bit position.
4504
4505 INNER may be a USE. This will occur when we started with a bitfield
4506 that went outside the boundary of the object in memory, which is
4507 allowed on most machines. To isolate this case, we produce a USE
4508 whose mode is wide enough and surround the MEM with it. The only
4509 code that understands the USE is this routine. If it is not removed,
4510 it will cause the resulting insn not to match.
4511
4512 UNSIGNEDP is non-zero for an unsigned reference and zero for a
4513 signed reference.
4514
4515 IN_DEST is non-zero if this is a reference in the destination of a
4516 SET. This is used when a ZERO_ or SIGN_EXTRACT isn't needed. If non-zero,
4517 a STRICT_LOW_PART will be used, if zero, ZERO_EXTEND or SIGN_EXTEND will
4518 be used.
4519
4520 IN_COMPARE is non-zero if we are in a COMPARE. This means that a
4521 ZERO_EXTRACT should be built even for bits starting at bit 0.
4522
4523 MODE is the desired mode of the result (if IN_DEST == 0). */
4524
4525 static rtx
4526 make_extraction (mode, inner, pos, pos_rtx, len,
4527 unsignedp, in_dest, in_compare)
4528 enum machine_mode mode;
4529 rtx inner;
4530 int pos;
4531 rtx pos_rtx;
4532 int len;
4533 int unsignedp;
4534 int in_dest, in_compare;
4535 {
4536 enum machine_mode is_mode = GET_MODE (inner);
4537 enum machine_mode inner_mode;
4538 enum machine_mode wanted_mem_mode = byte_mode;
4539 enum machine_mode pos_mode = word_mode;
4540 enum machine_mode extraction_mode = word_mode;
4541 enum machine_mode tmode = mode_for_size (len, MODE_INT, 1);
4542 int spans_byte = 0;
4543 rtx new = 0;
4544
4545 /* Get some information about INNER and get the innermost object. */
4546 if (GET_CODE (inner) == USE)
4547 /* We don't need to adjust the position because we set up the USE
4548 to pretend that it was a full-word object. */
4549 spans_byte = 1, inner = XEXP (inner, 0);
4550 else if (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
4551 inner = SUBREG_REG (inner);
4552
4553 inner_mode = GET_MODE (inner);
4554
4555 if (pos_rtx && GET_CODE (pos_rtx) == CONST_INT)
4556 pos = INTVAL (pos_rtx);
4557
4558 /* See if this can be done without an extraction. We never can if the
4559 width of the field is not the same as that of some integer mode. For
4560 registers, we can only avoid the extraction if the position is at the
4561 low-order bit and this is either not in the destination or we have the
4562 appropriate STRICT_LOW_PART operation available.
4563
4564 For MEM, we can avoid an extract if the field starts on an appropriate
4565 boundary and we can change the mode of the memory reference. However,
4566 we cannot directly access the MEM if we have a USE and the underlying
4567 MEM is not TMODE. This combination means that MEM was being used in a
4568 context where bits outside its mode were being referenced; that is only
4569 valid in bit-field insns. */
4570
4571 if (tmode != BLKmode
4572 && ! (spans_byte && inner_mode != tmode)
4573 && ((pos == 0 && GET_CODE (inner) != MEM
4574 && (! in_dest
4575 || (GET_CODE (inner) == REG
4576 && (movstrict_optab->handlers[(int) tmode].insn_code
4577 != CODE_FOR_nothing))))
4578 || (GET_CODE (inner) == MEM && pos >= 0
4579 && (pos
4580 % (STRICT_ALIGNMENT ? GET_MODE_ALIGNMENT (tmode)
4581 : BITS_PER_UNIT)) == 0
4582 /* We can't do this if we are widening INNER_MODE (it
4583 may not be aligned, for one thing). */
4584 && GET_MODE_BITSIZE (inner_mode) >= GET_MODE_BITSIZE (tmode)
4585 && (inner_mode == tmode
4586 || (! mode_dependent_address_p (XEXP (inner, 0))
4587 && ! MEM_VOLATILE_P (inner))))))
4588 {
4589 int offset = pos / BITS_PER_UNIT;
4590
4591 /* If INNER is a MEM, make a new MEM that encompasses just the desired
4592 field. If the original and current mode are the same, we need not
4593 adjust the offset. Otherwise, we do if bytes big endian.
4594
4595 If INNER is not a MEM, get a piece consisting of the just the field
4596 of interest (in this case POS must be 0). */
4597
4598 if (GET_CODE (inner) == MEM)
4599 {
4600 #if BYTES_BIG_ENDIAN
4601 if (inner_mode != tmode)
4602 offset = (GET_MODE_SIZE (inner_mode)
4603 - GET_MODE_SIZE (tmode) - offset);
4604 #endif
4605
4606 new = gen_rtx (MEM, tmode, plus_constant (XEXP (inner, 0), offset));
4607 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (inner);
4608 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (inner);
4609 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (inner);
4610 }
4611 else if (GET_CODE (inner) == REG)
4612 /* We can't call gen_lowpart_for_combine here since we always want
4613 a SUBREG and it would sometimes return a new hard register. */
4614 new = gen_rtx (SUBREG, tmode, inner,
4615 (WORDS_BIG_ENDIAN
4616 && GET_MODE_SIZE (is_mode) > UNITS_PER_WORD)
4617 ? ((GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (tmode)
4618 / UNITS_PER_WORD))
4619 : 0);
4620 else
4621 new = force_to_mode (inner, tmode, len, NULL_RTX);
4622
4623 /* If this extraction is going into the destination of a SET,
4624 make a STRICT_LOW_PART unless we made a MEM. */
4625
4626 if (in_dest)
4627 return (GET_CODE (new) == MEM ? new
4628 : (GET_CODE (new) != SUBREG
4629 ? gen_rtx (CLOBBER, tmode, const0_rtx)
4630 : gen_rtx_combine (STRICT_LOW_PART, VOIDmode, new)));
4631
4632 /* Otherwise, sign- or zero-extend unless we already are in the
4633 proper mode. */
4634
4635 return (mode == tmode ? new
4636 : gen_rtx_combine (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
4637 mode, new));
4638 }
4639
4640 /* Unless this is a COMPARE or we have a funny memory reference,
4641 don't do anything with zero-extending field extracts starting at
4642 the low-order bit since they are simple AND operations. */
4643 if (pos == 0 && ! in_dest && ! in_compare && ! spans_byte && unsignedp)
4644 return 0;
4645
4646 /* Get the mode to use should INNER be a MEM, the mode for the position,
4647 and the mode for the result. */
4648 #ifdef HAVE_insv
4649 if (in_dest)
4650 {
4651 wanted_mem_mode = insn_operand_mode[(int) CODE_FOR_insv][0];
4652 pos_mode = insn_operand_mode[(int) CODE_FOR_insv][2];
4653 extraction_mode = insn_operand_mode[(int) CODE_FOR_insv][3];
4654 }
4655 #endif
4656
4657 #ifdef HAVE_extzv
4658 if (! in_dest && unsignedp)
4659 {
4660 wanted_mem_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
4661 pos_mode = insn_operand_mode[(int) CODE_FOR_extzv][3];
4662 extraction_mode = insn_operand_mode[(int) CODE_FOR_extzv][0];
4663 }
4664 #endif
4665
4666 #ifdef HAVE_extv
4667 if (! in_dest && ! unsignedp)
4668 {
4669 wanted_mem_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
4670 pos_mode = insn_operand_mode[(int) CODE_FOR_extv][3];
4671 extraction_mode = insn_operand_mode[(int) CODE_FOR_extv][0];
4672 }
4673 #endif
4674
4675 /* Never narrow an object, since that might not be safe. */
4676
4677 if (mode != VOIDmode
4678 && GET_MODE_SIZE (extraction_mode) < GET_MODE_SIZE (mode))
4679 extraction_mode = mode;
4680
4681 if (pos_rtx && GET_MODE (pos_rtx) != VOIDmode
4682 && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
4683 pos_mode = GET_MODE (pos_rtx);
4684
4685 /* If this is not from memory or we have to change the mode of memory and
4686 cannot, the desired mode is EXTRACTION_MODE. */
4687 if (GET_CODE (inner) != MEM
4688 || (inner_mode != wanted_mem_mode
4689 && (mode_dependent_address_p (XEXP (inner, 0))
4690 || MEM_VOLATILE_P (inner))))
4691 wanted_mem_mode = extraction_mode;
4692
4693 #if BITS_BIG_ENDIAN
4694 /* If position is constant, compute new position. Otherwise, build
4695 subtraction. */
4696 if (pos >= 0)
4697 pos = (MAX (GET_MODE_BITSIZE (is_mode), GET_MODE_BITSIZE (wanted_mem_mode))
4698 - len - pos);
4699 else
4700 pos_rtx
4701 = gen_rtx_combine (MINUS, GET_MODE (pos_rtx),
4702 GEN_INT (MAX (GET_MODE_BITSIZE (is_mode),
4703 GET_MODE_BITSIZE (wanted_mem_mode))
4704 - len),
4705 pos_rtx);
4706 #endif
4707
4708 /* If INNER has a wider mode, make it smaller. If this is a constant
4709 extract, try to adjust the byte to point to the byte containing
4710 the value. */
4711 if (wanted_mem_mode != VOIDmode
4712 && GET_MODE_SIZE (wanted_mem_mode) < GET_MODE_SIZE (is_mode)
4713 && ((GET_CODE (inner) == MEM
4714 && (inner_mode == wanted_mem_mode
4715 || (! mode_dependent_address_p (XEXP (inner, 0))
4716 && ! MEM_VOLATILE_P (inner))))))
4717 {
4718 int offset = 0;
4719
4720 /* The computations below will be correct if the machine is big
4721 endian in both bits and bytes or little endian in bits and bytes.
4722 If it is mixed, we must adjust. */
4723
4724 #if BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
4725 if (! spans_byte && is_mode != wanted_mem_mode)
4726 offset = (GET_MODE_SIZE (is_mode)
4727 - GET_MODE_SIZE (wanted_mem_mode) - offset);
4728 #endif
4729
4730 /* If bytes are big endian and we had a paradoxical SUBREG, we must
4731 adjust OFFSET to compensate. */
4732 #if BYTES_BIG_ENDIAN
4733 if (! spans_byte
4734 && GET_MODE_SIZE (inner_mode) < GET_MODE_SIZE (is_mode))
4735 offset -= GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (inner_mode);
4736 #endif
4737
4738 /* If this is a constant position, we can move to the desired byte. */
4739 if (pos >= 0)
4740 {
4741 offset += pos / BITS_PER_UNIT;
4742 pos %= GET_MODE_BITSIZE (wanted_mem_mode);
4743 }
4744
4745 if (offset != 0 || inner_mode != wanted_mem_mode)
4746 {
4747 rtx newmem = gen_rtx (MEM, wanted_mem_mode,
4748 plus_constant (XEXP (inner, 0), offset));
4749 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (inner);
4750 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (inner);
4751 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (inner);
4752 inner = newmem;
4753 }
4754 }
4755
4756 /* If INNER is not memory, we can always get it into the proper mode. */
4757 else if (GET_CODE (inner) != MEM)
4758 inner = force_to_mode (inner, extraction_mode,
4759 (pos < 0 ? GET_MODE_BITSIZE (extraction_mode)
4760 : len + pos),
4761 NULL_RTX);
4762
4763 /* Adjust mode of POS_RTX, if needed. If we want a wider mode, we
4764 have to zero extend. Otherwise, we can just use a SUBREG. */
4765 if (pos < 0
4766 && GET_MODE_SIZE (pos_mode) > GET_MODE_SIZE (GET_MODE (pos_rtx)))
4767 pos_rtx = gen_rtx_combine (ZERO_EXTEND, pos_mode, pos_rtx);
4768 else if (pos < 0
4769 && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
4770 pos_rtx = gen_lowpart_for_combine (pos_mode, pos_rtx);
4771
4772 /* Make POS_RTX unless we already have it and it is correct. */
4773 if (pos_rtx == 0 || (pos >= 0 && INTVAL (pos_rtx) != pos))
4774 pos_rtx = GEN_INT (pos);
4775
4776 /* Make the required operation. See if we can use existing rtx. */
4777 new = gen_rtx_combine (unsignedp ? ZERO_EXTRACT : SIGN_EXTRACT,
4778 extraction_mode, inner, GEN_INT (len), pos_rtx);
4779 if (! in_dest)
4780 new = gen_lowpart_for_combine (mode, new);
4781
4782 return new;
4783 }
4784 \f
4785 /* Look at the expression rooted at X. Look for expressions
4786 equivalent to ZERO_EXTRACT, SIGN_EXTRACT, ZERO_EXTEND, SIGN_EXTEND.
4787 Form these expressions.
4788
4789 Return the new rtx, usually just X.
4790
4791 Also, for machines like the Vax that don't have logical shift insns,
4792 try to convert logical to arithmetic shift operations in cases where
4793 they are equivalent. This undoes the canonicalizations to logical
4794 shifts done elsewhere.
4795
4796 We try, as much as possible, to re-use rtl expressions to save memory.
4797
4798 IN_CODE says what kind of expression we are processing. Normally, it is
4799 SET. In a memory address (inside a MEM, PLUS or minus, the latter two
4800 being kludges), it is MEM. When processing the arguments of a comparison
4801 or a COMPARE against zero, it is COMPARE. */
4802
4803 static rtx
4804 make_compound_operation (x, in_code)
4805 rtx x;
4806 enum rtx_code in_code;
4807 {
4808 enum rtx_code code = GET_CODE (x);
4809 enum machine_mode mode = GET_MODE (x);
4810 int mode_width = GET_MODE_BITSIZE (mode);
4811 enum rtx_code next_code;
4812 int i, count;
4813 rtx new = 0;
4814 char *fmt;
4815
4816 /* Select the code to be used in recursive calls. Once we are inside an
4817 address, we stay there. If we have a comparison, set to COMPARE,
4818 but once inside, go back to our default of SET. */
4819
4820 next_code = (code == MEM || code == PLUS || code == MINUS ? MEM
4821 : ((code == COMPARE || GET_RTX_CLASS (code) == '<')
4822 && XEXP (x, 1) == const0_rtx) ? COMPARE
4823 : in_code == COMPARE ? SET : in_code);
4824
4825 /* Process depending on the code of this operation. If NEW is set
4826 non-zero, it will be returned. */
4827
4828 switch (code)
4829 {
4830 case ASHIFT:
4831 case LSHIFT:
4832 /* Convert shifts by constants into multiplications if inside
4833 an address. */
4834 if (in_code == MEM && GET_CODE (XEXP (x, 1)) == CONST_INT
4835 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
4836 && INTVAL (XEXP (x, 1)) >= 0)
4837 new = gen_rtx_combine (MULT, mode, XEXP (x, 0),
4838 GEN_INT ((HOST_WIDE_INT) 1
4839 << INTVAL (XEXP (x, 1))));
4840 break;
4841
4842 case AND:
4843 /* If the second operand is not a constant, we can't do anything
4844 with it. */
4845 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
4846 break;
4847
4848 /* If the constant is a power of two minus one and the first operand
4849 is a logical right shift, make an extraction. */
4850 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
4851 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
4852 new = make_extraction (mode, XEXP (XEXP (x, 0), 0), -1,
4853 XEXP (XEXP (x, 0), 1), i, 1,
4854 0, in_code == COMPARE);
4855
4856 /* Same as previous, but for (subreg (lshiftrt ...)) in first op. */
4857 else if (GET_CODE (XEXP (x, 0)) == SUBREG
4858 && subreg_lowpart_p (XEXP (x, 0))
4859 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == LSHIFTRT
4860 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
4861 new = make_extraction (GET_MODE (SUBREG_REG (XEXP (x, 0))),
4862 XEXP (SUBREG_REG (XEXP (x, 0)), 0), -1,
4863 XEXP (SUBREG_REG (XEXP (x, 0)), 1), i, 1,
4864 0, in_code == COMPARE);
4865
4866
4867 /* If we are have (and (rotate X C) M) and C is larger than the number
4868 of bits in M, this is an extraction. */
4869
4870 else if (GET_CODE (XEXP (x, 0)) == ROTATE
4871 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4872 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0
4873 && i <= INTVAL (XEXP (XEXP (x, 0), 1)))
4874 new = make_extraction (mode, XEXP (XEXP (x, 0), 0),
4875 (GET_MODE_BITSIZE (mode)
4876 - INTVAL (XEXP (XEXP (x, 0), 1))),
4877 NULL_RTX, i, 1, 0, in_code == COMPARE);
4878
4879 /* On machines without logical shifts, if the operand of the AND is
4880 a logical shift and our mask turns off all the propagated sign
4881 bits, we can replace the logical shift with an arithmetic shift. */
4882 else if (ashr_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
4883 && (lshr_optab->handlers[(int) mode].insn_code
4884 == CODE_FOR_nothing)
4885 && GET_CODE (XEXP (x, 0)) == LSHIFTRT
4886 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4887 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
4888 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
4889 && mode_width <= HOST_BITS_PER_WIDE_INT)
4890 {
4891 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
4892
4893 mask >>= INTVAL (XEXP (XEXP (x, 0), 1));
4894 if ((INTVAL (XEXP (x, 1)) & ~mask) == 0)
4895 SUBST (XEXP (x, 0),
4896 gen_rtx_combine (ASHIFTRT, mode, XEXP (XEXP (x, 0), 0),
4897 XEXP (XEXP (x, 0), 1)));
4898 }
4899
4900 /* If the constant is one less than a power of two, this might be
4901 representable by an extraction even if no shift is present.
4902 If it doesn't end up being a ZERO_EXTEND, we will ignore it unless
4903 we are in a COMPARE. */
4904 else if ((i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
4905 new = make_extraction (mode, XEXP (x, 0), 0, NULL_RTX, i, 1,
4906 0, in_code == COMPARE);
4907
4908 /* If we are in a comparison and this is an AND with a power of two,
4909 convert this into the appropriate bit extract. */
4910 else if (in_code == COMPARE
4911 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
4912 new = make_extraction (mode, XEXP (x, 0), i, NULL_RTX, 1, 1, 0, 1);
4913
4914 break;
4915
4916 case LSHIFTRT:
4917 /* If the sign bit is known to be zero, replace this with an
4918 arithmetic shift. */
4919 if (ashr_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing
4920 && lshr_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
4921 && mode_width <= HOST_BITS_PER_WIDE_INT
4922 && (significant_bits (XEXP (x, 0), mode)
4923 & (1 << (mode_width - 1))) == 0)
4924 {
4925 new = gen_rtx_combine (ASHIFTRT, mode, XEXP (x, 0), XEXP (x, 1));
4926 break;
4927 }
4928
4929 /* ... fall through ... */
4930
4931 case ASHIFTRT:
4932 /* If we have (ashiftrt (ashift foo C1) C2) with C2 >= C1,
4933 this is a SIGN_EXTRACT. */
4934 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4935 && GET_CODE (XEXP (x, 0)) == ASHIFT
4936 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4937 && INTVAL (XEXP (x, 1)) >= INTVAL (XEXP (XEXP (x, 0), 1)))
4938 new = make_extraction (mode, XEXP (XEXP (x, 0), 0),
4939 (INTVAL (XEXP (x, 1))
4940 - INTVAL (XEXP (XEXP (x, 0), 1))),
4941 NULL_RTX, mode_width - INTVAL (XEXP (x, 1)),
4942 code == LSHIFTRT, 0, in_code == COMPARE);
4943
4944 /* Similarly if we have (ashifrt (OP (ashift foo C1) C3) C2). In these
4945 cases, we are better off returning a SIGN_EXTEND of the operation. */
4946
4947 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4948 && (GET_CODE (XEXP (x, 0)) == IOR || GET_CODE (XEXP (x, 0)) == AND
4949 || GET_CODE (XEXP (x, 0)) == XOR
4950 || GET_CODE (XEXP (x, 0)) == PLUS)
4951 && GET_CODE (XEXP (XEXP (x, 0), 0)) == ASHIFT
4952 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
4953 && INTVAL (XEXP (x, 1)) >= INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))
4954 && INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1)) < HOST_BITS_PER_WIDE_INT
4955 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4956 && (INTVAL (XEXP (XEXP (x, 0), 1))
4957 & (((HOST_WIDE_INT) 1
4958 << INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))) - 1)) == 0)
4959 {
4960 HOST_WIDE_INT newop1
4961 = (INTVAL (XEXP (XEXP (x, 0), 1))
4962 >> INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1)));
4963
4964 new = make_extraction (mode,
4965 gen_binary (GET_CODE (XEXP (x, 0)), mode,
4966 XEXP (XEXP (XEXP (x, 0), 0), 0),
4967 GEN_INT (newop1)),
4968 (INTVAL (XEXP (x, 1))
4969 - INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))),
4970 NULL_RTX, mode_width - INTVAL (XEXP (x, 1)),
4971 code == LSHIFTRT, 0, in_code == COMPARE);
4972 }
4973
4974 /* Similarly for (ashiftrt (neg (ashift FOO C1)) C2). */
4975 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4976 && GET_CODE (XEXP (x, 0)) == NEG
4977 && GET_CODE (XEXP (XEXP (x, 0), 0)) == ASHIFT
4978 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
4979 && INTVAL (XEXP (x, 1)) >= INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1)))
4980 new = make_extraction (mode,
4981 gen_unary (GET_CODE (XEXP (x, 0)), mode,
4982 XEXP (XEXP (XEXP (x, 0), 0), 0)),
4983 (INTVAL (XEXP (x, 1))
4984 - INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))),
4985 NULL_RTX, mode_width - INTVAL (XEXP (x, 1)),
4986 code == LSHIFTRT, 0, in_code == COMPARE);
4987 break;
4988 }
4989
4990 if (new)
4991 {
4992 x = gen_lowpart_for_combine (mode, new);
4993 code = GET_CODE (x);
4994 }
4995
4996 /* Now recursively process each operand of this operation. */
4997 fmt = GET_RTX_FORMAT (code);
4998 for (i = 0; i < GET_RTX_LENGTH (code); i++)
4999 if (fmt[i] == 'e')
5000 {
5001 new = make_compound_operation (XEXP (x, i), next_code);
5002 SUBST (XEXP (x, i), new);
5003 }
5004
5005 return x;
5006 }
5007 \f
5008 /* Given M see if it is a value that would select a field of bits
5009 within an item, but not the entire word. Return -1 if not.
5010 Otherwise, return the starting position of the field, where 0 is the
5011 low-order bit.
5012
5013 *PLEN is set to the length of the field. */
5014
5015 static int
5016 get_pos_from_mask (m, plen)
5017 unsigned HOST_WIDE_INT m;
5018 int *plen;
5019 {
5020 /* Get the bit number of the first 1 bit from the right, -1 if none. */
5021 int pos = exact_log2 (m & - m);
5022
5023 if (pos < 0)
5024 return -1;
5025
5026 /* Now shift off the low-order zero bits and see if we have a power of
5027 two minus 1. */
5028 *plen = exact_log2 ((m >> pos) + 1);
5029
5030 if (*plen <= 0)
5031 return -1;
5032
5033 return pos;
5034 }
5035 \f
5036 /* Rewrite X so that it is an expression in MODE. We only care about the
5037 low-order BITS bits so we can ignore AND operations that just clear
5038 higher-order bits.
5039
5040 Also, if REG is non-zero and X is a register equal in value to REG,
5041 replace X with REG. */
5042
5043 static rtx
5044 force_to_mode (x, mode, bits, reg)
5045 rtx x;
5046 enum machine_mode mode;
5047 int bits;
5048 rtx reg;
5049 {
5050 enum rtx_code code = GET_CODE (x);
5051 enum machine_mode op_mode = mode;
5052
5053 /* If X is narrower than MODE or if BITS is larger than the size of MODE,
5054 just get X in the proper mode. */
5055
5056 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode)
5057 || bits > GET_MODE_BITSIZE (mode))
5058 return gen_lowpart_for_combine (mode, x);
5059
5060 switch (code)
5061 {
5062 case SIGN_EXTEND:
5063 case ZERO_EXTEND:
5064 case ZERO_EXTRACT:
5065 case SIGN_EXTRACT:
5066 x = expand_compound_operation (x);
5067 if (GET_CODE (x) != code)
5068 return force_to_mode (x, mode, bits, reg);
5069 break;
5070
5071 case REG:
5072 if (reg != 0 && (rtx_equal_p (get_last_value (reg), x)
5073 || rtx_equal_p (reg, get_last_value (x))))
5074 x = reg;
5075 break;
5076
5077 case CONST_INT:
5078 if (bits < HOST_BITS_PER_WIDE_INT)
5079 x = GEN_INT (INTVAL (x) & (((HOST_WIDE_INT) 1 << bits) - 1));
5080 return x;
5081
5082 case SUBREG:
5083 /* Ignore low-order SUBREGs. */
5084 if (subreg_lowpart_p (x))
5085 return force_to_mode (SUBREG_REG (x), mode, bits, reg);
5086 break;
5087
5088 case AND:
5089 /* If this is an AND with a constant. Otherwise, we fall through to
5090 do the general binary case. */
5091
5092 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
5093 {
5094 HOST_WIDE_INT mask = INTVAL (XEXP (x, 1));
5095 int len = exact_log2 (mask + 1);
5096 rtx op = XEXP (x, 0);
5097
5098 /* If this is masking some low-order bits, we may be able to
5099 impose a stricter constraint on what bits of the operand are
5100 required. */
5101
5102 op = force_to_mode (op, mode, len > 0 ? MIN (len, bits) : bits,
5103 reg);
5104
5105 if (bits < HOST_BITS_PER_WIDE_INT)
5106 mask &= ((HOST_WIDE_INT) 1 << bits) - 1;
5107
5108 /* If we have no AND in MODE, use the original mode for the
5109 operation. */
5110
5111 if (and_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
5112 op_mode = GET_MODE (x);
5113
5114 x = simplify_and_const_int (x, op_mode, op, mask);
5115
5116 /* If X is still an AND, see if it is an AND with a mask that
5117 is just some low-order bits. If so, and it is BITS wide (it
5118 can't be wider), we don't need it. */
5119
5120 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
5121 && bits < HOST_BITS_PER_WIDE_INT
5122 && INTVAL (XEXP (x, 1)) == ((HOST_WIDE_INT) 1 << bits) - 1)
5123 x = XEXP (x, 0);
5124
5125 break;
5126 }
5127
5128 /* ... fall through ... */
5129
5130 case PLUS:
5131 case MINUS:
5132 case MULT:
5133 case IOR:
5134 case XOR:
5135 /* For most binary operations, just propagate into the operation and
5136 change the mode if we have an operation of that mode. */
5137
5138 if ((code == PLUS
5139 && add_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
5140 || (code == MINUS
5141 && sub_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
5142 || (code == MULT && (smul_optab->handlers[(int) mode].insn_code
5143 == CODE_FOR_nothing))
5144 || (code == AND
5145 && and_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
5146 || (code == IOR
5147 && ior_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
5148 || (code == XOR && (xor_optab->handlers[(int) mode].insn_code
5149 == CODE_FOR_nothing)))
5150 op_mode = GET_MODE (x);
5151
5152 x = gen_binary (code, op_mode,
5153 gen_lowpart_for_combine (op_mode,
5154 force_to_mode (XEXP (x, 0),
5155 mode, bits,
5156 reg)),
5157 gen_lowpart_for_combine (op_mode,
5158 force_to_mode (XEXP (x, 1),
5159 mode, bits,
5160 reg)));
5161 break;
5162
5163 case ASHIFT:
5164 case LSHIFT:
5165 /* For left shifts, do the same, but just for the first operand.
5166 If the shift count is a constant, we need even fewer bits of the
5167 first operand. */
5168
5169 if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) < bits)
5170 bits -= INTVAL (XEXP (x, 1));
5171
5172 if ((code == ASHIFT
5173 && ashl_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
5174 || (code == LSHIFT && (lshl_optab->handlers[(int) mode].insn_code
5175 == CODE_FOR_nothing)))
5176 op_mode = GET_MODE (x);
5177
5178 x = gen_binary (code, op_mode,
5179 gen_lowpart_for_combine (op_mode,
5180 force_to_mode (XEXP (x, 0),
5181 mode, bits,
5182 reg)),
5183 XEXP (x, 1));
5184 break;
5185
5186 case LSHIFTRT:
5187 /* Here we can only do something if the shift count is a constant and
5188 the count plus BITS is no larger than the width of MODE, we can do
5189 the shift in MODE. */
5190
5191 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5192 && INTVAL (XEXP (x, 1)) + bits <= GET_MODE_BITSIZE (mode))
5193 {
5194 rtx inner = force_to_mode (XEXP (x, 0), mode,
5195 bits + INTVAL (XEXP (x, 1)), reg);
5196
5197 if (lshr_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
5198 op_mode = GET_MODE (x);
5199
5200 x = gen_binary (LSHIFTRT, op_mode,
5201 gen_lowpart_for_combine (op_mode, inner),
5202 XEXP (x, 1));
5203 }
5204 break;
5205
5206 case ASHIFTRT:
5207 /* If this is a sign-extension operation that just affects bits
5208 we don't care about, remove it. */
5209
5210 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5211 && INTVAL (XEXP (x, 1)) >= 0
5212 && INTVAL (XEXP (x, 1)) <= GET_MODE_BITSIZE (GET_MODE (x)) - bits
5213 && GET_CODE (XEXP (x, 0)) == ASHIFT
5214 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5215 && INTVAL (XEXP (XEXP (x, 0), 1)) == INTVAL (XEXP (x, 1)))
5216 return force_to_mode (XEXP (XEXP (x, 0), 0), mode, bits, reg);
5217 break;
5218
5219 case NEG:
5220 case NOT:
5221 if ((code == NEG
5222 && neg_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
5223 || (code == NOT && (one_cmpl_optab->handlers[(int) mode].insn_code
5224 == CODE_FOR_nothing)))
5225 op_mode = GET_MODE (x);
5226
5227 /* Handle these similarly to the way we handle most binary operations. */
5228 x = gen_unary (code, op_mode,
5229 gen_lowpart_for_combine (op_mode,
5230 force_to_mode (XEXP (x, 0), mode,
5231 bits, reg)));
5232 break;
5233
5234 case IF_THEN_ELSE:
5235 /* We have no way of knowing if the IF_THEN_ELSE can itself be
5236 written in a narrower mode. We play it safe and do not do so. */
5237
5238 SUBST (XEXP (x, 1),
5239 gen_lowpart_for_combine (GET_MODE (x),
5240 force_to_mode (XEXP (x, 1), mode,
5241 bits, reg)));
5242 SUBST (XEXP (x, 2),
5243 gen_lowpart_for_combine (GET_MODE (x),
5244 force_to_mode (XEXP (x, 2), mode,
5245 bits, reg)));
5246 break;
5247 }
5248
5249 /* Ensure we return a value of the proper mode. */
5250 return gen_lowpart_for_combine (mode, x);
5251 }
5252 \f
5253 /* Return the value of expression X given the fact that condition COND
5254 is known to be true when applied to REG as its first operand and VAL
5255 as its second. X is known to not be shared and so can be modified in
5256 place.
5257
5258 We only handle the simplest cases, and specifically those cases that
5259 arise with IF_THEN_ELSE expressions. */
5260
5261 static rtx
5262 known_cond (x, cond, reg, val)
5263 rtx x;
5264 enum rtx_code cond;
5265 rtx reg, val;
5266 {
5267 enum rtx_code code = GET_CODE (x);
5268 rtx new, temp;
5269 char *fmt;
5270 int i, j;
5271
5272 if (side_effects_p (x))
5273 return x;
5274
5275 if (cond == EQ && rtx_equal_p (x, reg))
5276 return val;
5277
5278 /* If X is (abs REG) and we know something about REG's relationship
5279 with zero, we may be able to simplify this. */
5280
5281 if (code == ABS && rtx_equal_p (XEXP (x, 0), reg) && val == const0_rtx)
5282 switch (cond)
5283 {
5284 case GE: case GT: case EQ:
5285 return XEXP (x, 0);
5286 case LT: case LE:
5287 return gen_unary (NEG, GET_MODE (XEXP (x, 0)), XEXP (x, 0));
5288 }
5289
5290 /* The only other cases we handle are MIN, MAX, and comparisons if the
5291 operands are the same as REG and VAL. */
5292
5293 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
5294 {
5295 if (rtx_equal_p (XEXP (x, 0), val))
5296 cond = swap_condition (cond), temp = val, val = reg, reg = temp;
5297
5298 if (rtx_equal_p (XEXP (x, 0), reg) && rtx_equal_p (XEXP (x, 1), val))
5299 {
5300 if (GET_RTX_CLASS (code) == '<')
5301 return (comparison_dominates_p (cond, code) ? const_true_rtx
5302 : (comparison_dominates_p (cond,
5303 reverse_condition (code))
5304 ? const0_rtx : x));
5305
5306 else if (code == SMAX || code == SMIN
5307 || code == UMIN || code == UMAX)
5308 {
5309 int unsignedp = (code == UMIN || code == UMAX);
5310
5311 if (code == SMAX || code == UMAX)
5312 cond = reverse_condition (cond);
5313
5314 switch (cond)
5315 {
5316 case GE: case GT:
5317 return unsignedp ? x : XEXP (x, 1);
5318 case LE: case LT:
5319 return unsignedp ? x : XEXP (x, 0);
5320 case GEU: case GTU:
5321 return unsignedp ? XEXP (x, 1) : x;
5322 case LEU: case LTU:
5323 return unsignedp ? XEXP (x, 0) : x;
5324 }
5325 }
5326 }
5327 }
5328
5329 fmt = GET_RTX_FORMAT (code);
5330 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5331 {
5332 if (fmt[i] == 'e')
5333 SUBST (XEXP (x, i), known_cond (XEXP (x, i), cond, reg, val));
5334 else if (fmt[i] == 'E')
5335 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5336 SUBST (XVECEXP (x, i, j), known_cond (XVECEXP (x, i, j),
5337 cond, reg, val));
5338 }
5339
5340 return x;
5341 }
5342 \f
5343 /* See if X, a SET operation, can be rewritten as a bit-field assignment.
5344 Return that assignment if so.
5345
5346 We only handle the most common cases. */
5347
5348 static rtx
5349 make_field_assignment (x)
5350 rtx x;
5351 {
5352 rtx dest = SET_DEST (x);
5353 rtx src = SET_SRC (x);
5354 rtx ourdest;
5355 rtx assign;
5356 HOST_WIDE_INT c1;
5357 int pos, len;
5358 rtx other;
5359 enum machine_mode mode;
5360
5361 /* If SRC was (and (not (ashift (const_int 1) POS)) DEST), this is
5362 a clear of a one-bit field. We will have changed it to
5363 (and (rotate (const_int -2) POS) DEST), so check for that. Also check
5364 for a SUBREG. */
5365
5366 if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == ROTATE
5367 && GET_CODE (XEXP (XEXP (src, 0), 0)) == CONST_INT
5368 && INTVAL (XEXP (XEXP (src, 0), 0)) == -2
5369 && (rtx_equal_p (dest, XEXP (src, 1))
5370 || rtx_equal_p (dest, get_last_value (XEXP (src, 1)))
5371 || rtx_equal_p (get_last_value (dest), XEXP (src, 1))))
5372 {
5373 assign = make_extraction (VOIDmode, dest, -1, XEXP (XEXP (src, 0), 1),
5374 1, 1, 1, 0);
5375 return gen_rtx (SET, VOIDmode, assign, const0_rtx);
5376 }
5377
5378 else if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == SUBREG
5379 && subreg_lowpart_p (XEXP (src, 0))
5380 && (GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
5381 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (src, 0)))))
5382 && GET_CODE (SUBREG_REG (XEXP (src, 0))) == ROTATE
5383 && INTVAL (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == -2
5384 && (rtx_equal_p (dest, XEXP (src, 1))
5385 || rtx_equal_p (dest, get_last_value (XEXP (src, 1)))
5386 || rtx_equal_p (get_last_value (dest), XEXP (src, 1))))
5387 {
5388 assign = make_extraction (VOIDmode, dest, -1,
5389 XEXP (SUBREG_REG (XEXP (src, 0)), 1),
5390 1, 1, 1, 0);
5391 return gen_rtx (SET, VOIDmode, assign, const0_rtx);
5392 }
5393
5394 /* If SRC is (ior (ashift (const_int 1) POS DEST)), this is a set of a
5395 one-bit field. */
5396 else if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == ASHIFT
5397 && XEXP (XEXP (src, 0), 0) == const1_rtx
5398 && (rtx_equal_p (dest, XEXP (src, 1))
5399 || rtx_equal_p (dest, get_last_value (XEXP (src, 1)))
5400 || rtx_equal_p (get_last_value (dest), XEXP (src, 1))))
5401 {
5402 assign = make_extraction (VOIDmode, dest, -1, XEXP (XEXP (src, 0), 1),
5403 1, 1, 1, 0);
5404 return gen_rtx (SET, VOIDmode, assign, const1_rtx);
5405 }
5406
5407 /* The other case we handle is assignments into a constant-position
5408 field. They look like (ior (and DEST C1) OTHER). If C1 represents
5409 a mask that has all one bits except for a group of zero bits and
5410 OTHER is known to have zeros where C1 has ones, this is such an
5411 assignment. Compute the position and length from C1. Shift OTHER
5412 to the appropriate position, force it to the required mode, and
5413 make the extraction. Check for the AND in both operands. */
5414
5415 if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == AND
5416 && GET_CODE (XEXP (XEXP (src, 0), 1)) == CONST_INT
5417 && (rtx_equal_p (XEXP (XEXP (src, 0), 0), dest)
5418 || rtx_equal_p (XEXP (XEXP (src, 0), 0), get_last_value (dest))
5419 || rtx_equal_p (get_last_value (XEXP (XEXP (src, 0), 1)), dest)))
5420 c1 = INTVAL (XEXP (XEXP (src, 0), 1)), other = XEXP (src, 1);
5421 else if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 1)) == AND
5422 && GET_CODE (XEXP (XEXP (src, 1), 1)) == CONST_INT
5423 && (rtx_equal_p (XEXP (XEXP (src, 1), 0), dest)
5424 || rtx_equal_p (XEXP (XEXP (src, 1), 0), get_last_value (dest))
5425 || rtx_equal_p (get_last_value (XEXP (XEXP (src, 1), 0)),
5426 dest)))
5427 c1 = INTVAL (XEXP (XEXP (src, 1), 1)), other = XEXP (src, 0);
5428 else
5429 return x;
5430
5431 pos = get_pos_from_mask (~c1, &len);
5432 if (pos < 0 || pos + len > GET_MODE_BITSIZE (GET_MODE (dest))
5433 || (c1 & significant_bits (other, GET_MODE (other))) != 0)
5434 return x;
5435
5436 assign = make_extraction (VOIDmode, dest, pos, NULL_RTX, len, 1, 1, 0);
5437
5438 /* The mode to use for the source is the mode of the assignment, or of
5439 what is inside a possible STRICT_LOW_PART. */
5440 mode = (GET_CODE (assign) == STRICT_LOW_PART
5441 ? GET_MODE (XEXP (assign, 0)) : GET_MODE (assign));
5442
5443 /* Shift OTHER right POS places and make it the source, restricting it
5444 to the proper length and mode. */
5445
5446 src = force_to_mode (simplify_shift_const (NULL_RTX, LSHIFTRT,
5447 GET_MODE (src), other, pos),
5448 mode, len, dest);
5449
5450 return gen_rtx_combine (SET, VOIDmode, assign, src);
5451 }
5452 \f
5453 /* See if X is of the form (+ (* a c) (* b c)) and convert to (* (+ a b) c)
5454 if so. */
5455
5456 static rtx
5457 apply_distributive_law (x)
5458 rtx x;
5459 {
5460 enum rtx_code code = GET_CODE (x);
5461 rtx lhs, rhs, other;
5462 rtx tem;
5463 enum rtx_code inner_code;
5464
5465 /* The outer operation can only be one of the following: */
5466 if (code != IOR && code != AND && code != XOR
5467 && code != PLUS && code != MINUS)
5468 return x;
5469
5470 lhs = XEXP (x, 0), rhs = XEXP (x, 1);
5471
5472 /* If either operand is a primitive we can't do anything, so get out fast. */
5473 if (GET_RTX_CLASS (GET_CODE (lhs)) == 'o'
5474 || GET_RTX_CLASS (GET_CODE (rhs)) == 'o')
5475 return x;
5476
5477 lhs = expand_compound_operation (lhs);
5478 rhs = expand_compound_operation (rhs);
5479 inner_code = GET_CODE (lhs);
5480 if (inner_code != GET_CODE (rhs))
5481 return x;
5482
5483 /* See if the inner and outer operations distribute. */
5484 switch (inner_code)
5485 {
5486 case LSHIFTRT:
5487 case ASHIFTRT:
5488 case AND:
5489 case IOR:
5490 /* These all distribute except over PLUS. */
5491 if (code == PLUS || code == MINUS)
5492 return x;
5493 break;
5494
5495 case MULT:
5496 if (code != PLUS && code != MINUS)
5497 return x;
5498 break;
5499
5500 case ASHIFT:
5501 case LSHIFT:
5502 /* These are also multiplies, so they distribute over everything. */
5503 break;
5504
5505 case SUBREG:
5506 /* Non-paradoxical SUBREGs distributes over all operations, provided
5507 the inner modes and word numbers are the same, this is an extraction
5508 of a low-order part, we don't convert an fp operation to int or
5509 vice versa, and we would not be converting a single-word
5510 operation into a multi-word operation. The latter test is not
5511 required, but it prevents generating unneeded multi-word operations.
5512 Some of the previous tests are redundant given the latter test, but
5513 are retained because they are required for correctness.
5514
5515 We produce the result slightly differently in this case. */
5516
5517 if (GET_MODE (SUBREG_REG (lhs)) != GET_MODE (SUBREG_REG (rhs))
5518 || SUBREG_WORD (lhs) != SUBREG_WORD (rhs)
5519 || ! subreg_lowpart_p (lhs)
5520 || (GET_MODE_CLASS (GET_MODE (lhs))
5521 != GET_MODE_CLASS (GET_MODE (SUBREG_REG (lhs))))
5522 || (GET_MODE_SIZE (GET_MODE (lhs))
5523 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))))
5524 || GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))) > UNITS_PER_WORD)
5525 return x;
5526
5527 tem = gen_binary (code, GET_MODE (SUBREG_REG (lhs)),
5528 SUBREG_REG (lhs), SUBREG_REG (rhs));
5529 return gen_lowpart_for_combine (GET_MODE (x), tem);
5530
5531 default:
5532 return x;
5533 }
5534
5535 /* Set LHS and RHS to the inner operands (A and B in the example
5536 above) and set OTHER to the common operand (C in the example).
5537 These is only one way to do this unless the inner operation is
5538 commutative. */
5539 if (GET_RTX_CLASS (inner_code) == 'c'
5540 && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 0)))
5541 other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 1);
5542 else if (GET_RTX_CLASS (inner_code) == 'c'
5543 && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 1)))
5544 other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 0);
5545 else if (GET_RTX_CLASS (inner_code) == 'c'
5546 && rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 0)))
5547 other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 1);
5548 else if (rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 1)))
5549 other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 0);
5550 else
5551 return x;
5552
5553 /* Form the new inner operation, seeing if it simplifies first. */
5554 tem = gen_binary (code, GET_MODE (x), lhs, rhs);
5555
5556 /* There is one exception to the general way of distributing:
5557 (a ^ b) | (a ^ c) -> (~a) & (b ^ c) */
5558 if (code == XOR && inner_code == IOR)
5559 {
5560 inner_code = AND;
5561 other = gen_unary (NOT, GET_MODE (x), other);
5562 }
5563
5564 /* We may be able to continuing distributing the result, so call
5565 ourselves recursively on the inner operation before forming the
5566 outer operation, which we return. */
5567 return gen_binary (inner_code, GET_MODE (x),
5568 apply_distributive_law (tem), other);
5569 }
5570 \f
5571 /* We have X, a logical `and' of VAROP with the constant CONSTOP, to be done
5572 in MODE.
5573
5574 Return an equivalent form, if different from X. Otherwise, return X. If
5575 X is zero, we are to always construct the equivalent form. */
5576
5577 static rtx
5578 simplify_and_const_int (x, mode, varop, constop)
5579 rtx x;
5580 enum machine_mode mode;
5581 rtx varop;
5582 unsigned HOST_WIDE_INT constop;
5583 {
5584 register enum machine_mode tmode;
5585 register rtx temp;
5586 unsigned HOST_WIDE_INT significant;
5587
5588 /* There is a large class of optimizations based on the principle that
5589 some operations produce results where certain bits are known to be zero,
5590 and hence are not significant to the AND. For example, if we have just
5591 done a left shift of one bit, the low-order bit is known to be zero and
5592 hence an AND with a mask of ~1 would not do anything.
5593
5594 At the end of the following loop, we set:
5595
5596 VAROP to be the item to be AND'ed with;
5597 CONSTOP to the constant value to AND it with. */
5598
5599 while (1)
5600 {
5601 /* If we ever encounter a mode wider than the host machine's widest
5602 integer size, we can't compute the masks accurately, so give up. */
5603 if (GET_MODE_BITSIZE (GET_MODE (varop)) > HOST_BITS_PER_WIDE_INT)
5604 break;
5605
5606 /* Unless one of the cases below does a `continue',
5607 a `break' will be executed to exit the loop. */
5608
5609 switch (GET_CODE (varop))
5610 {
5611 case CLOBBER:
5612 /* If VAROP is a (clobber (const_int)), return it since we know
5613 we are generating something that won't match. */
5614 return varop;
5615
5616 #if ! BITS_BIG_ENDIAN
5617 case USE:
5618 /* VAROP is a (use (mem ..)) that was made from a bit-field
5619 extraction that spanned the boundary of the MEM. If we are
5620 now masking so it is within that boundary, we don't need the
5621 USE any more. */
5622 if ((constop & ~ GET_MODE_MASK (GET_MODE (XEXP (varop, 0)))) == 0)
5623 {
5624 varop = XEXP (varop, 0);
5625 continue;
5626 }
5627 break;
5628 #endif
5629
5630 case SUBREG:
5631 if (subreg_lowpart_p (varop)
5632 /* We can ignore the effect this SUBREG if it narrows the mode
5633 or, on machines where byte operations extend, if the
5634 constant masks to zero all the bits the mode doesn't have. */
5635 && ((GET_MODE_SIZE (GET_MODE (varop))
5636 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop))))
5637 #if defined(BYTE_LOADS_ZERO_EXTEND) || defined(BYTE_LOADS_SIGN_EXTEND)
5638 || (0 == (constop
5639 & GET_MODE_MASK (GET_MODE (varop))
5640 & ~ GET_MODE_MASK (GET_MODE (SUBREG_REG (varop)))))
5641 #endif
5642 ))
5643 {
5644 varop = SUBREG_REG (varop);
5645 continue;
5646 }
5647 break;
5648
5649 case ZERO_EXTRACT:
5650 case SIGN_EXTRACT:
5651 case ZERO_EXTEND:
5652 case SIGN_EXTEND:
5653 /* Try to expand these into a series of shifts and then work
5654 with that result. If we can't, for example, if the extract
5655 isn't at a fixed position, give up. */
5656 temp = expand_compound_operation (varop);
5657 if (temp != varop)
5658 {
5659 varop = temp;
5660 continue;
5661 }
5662 break;
5663
5664 case AND:
5665 if (GET_CODE (XEXP (varop, 1)) == CONST_INT)
5666 {
5667 constop &= INTVAL (XEXP (varop, 1));
5668 varop = XEXP (varop, 0);
5669 continue;
5670 }
5671 break;
5672
5673 case IOR:
5674 case XOR:
5675 /* If VAROP is (ior (lshiftrt FOO C1) C2), try to commute the IOR and
5676 LSHIFT so we end up with an (and (lshiftrt (ior ...) ...) ...)
5677 operation which may be a bitfield extraction. */
5678
5679 if (GET_CODE (XEXP (varop, 0)) == LSHIFTRT
5680 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
5681 && INTVAL (XEXP (XEXP (varop, 0), 1)) >= 0
5682 && INTVAL (XEXP (XEXP (varop, 0), 1)) < HOST_BITS_PER_WIDE_INT
5683 && GET_CODE (XEXP (varop, 1)) == CONST_INT
5684 && (INTVAL (XEXP (varop, 1))
5685 & ~ significant_bits (XEXP (varop, 0),
5686 GET_MODE (varop)) == 0))
5687 {
5688 temp = GEN_INT ((INTVAL (XEXP (varop, 1)) & constop)
5689 << INTVAL (XEXP (XEXP (varop, 0), 1)));
5690 temp = gen_binary (GET_CODE (varop), GET_MODE (varop),
5691 XEXP (XEXP (varop, 0), 0), temp);
5692 varop = gen_rtx_combine (LSHIFTRT, GET_MODE (varop),
5693 temp, XEXP (varop, 1));
5694 continue;
5695 }
5696
5697 /* Apply the AND to both branches of the IOR or XOR, then try to
5698 apply the distributive law. This may eliminate operations
5699 if either branch can be simplified because of the AND.
5700 It may also make some cases more complex, but those cases
5701 probably won't match a pattern either with or without this. */
5702 return
5703 gen_lowpart_for_combine
5704 (mode, apply_distributive_law
5705 (gen_rtx_combine
5706 (GET_CODE (varop), GET_MODE (varop),
5707 simplify_and_const_int (NULL_RTX, GET_MODE (varop),
5708 XEXP (varop, 0), constop),
5709 simplify_and_const_int (NULL_RTX, GET_MODE (varop),
5710 XEXP (varop, 1), constop))));
5711
5712 case NOT:
5713 /* (and (not FOO)) is (and (xor FOO CONST_OP)) so if FOO is an
5714 LSHIFTRT we can do the same as above. */
5715
5716 if (GET_CODE (XEXP (varop, 0)) == LSHIFTRT
5717 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
5718 && INTVAL (XEXP (XEXP (varop, 0), 1)) >= 0
5719 && INTVAL (XEXP (XEXP (varop, 0), 1)) < HOST_BITS_PER_WIDE_INT)
5720 {
5721 temp = GEN_INT (constop << INTVAL (XEXP (XEXP (varop, 0), 1)));
5722 temp = gen_binary (XOR, GET_MODE (varop),
5723 XEXP (XEXP (varop, 0), 0), temp);
5724 varop = gen_rtx_combine (LSHIFTRT, GET_MODE (varop),
5725 temp, XEXP (XEXP (varop, 0), 1));
5726 continue;
5727 }
5728 break;
5729
5730 case ASHIFTRT:
5731 /* If we are just looking for the sign bit, we don't need this
5732 shift at all, even if it has a variable count. */
5733 if (constop == ((HOST_WIDE_INT) 1
5734 << (GET_MODE_BITSIZE (GET_MODE (varop)) - 1)))
5735 {
5736 varop = XEXP (varop, 0);
5737 continue;
5738 }
5739
5740 /* If this is a shift by a constant, get a mask that contains
5741 those bits that are not copies of the sign bit. We then have
5742 two cases: If CONSTOP only includes those bits, this can be
5743 a logical shift, which may allow simplifications. If CONSTOP
5744 is a single-bit field not within those bits, we are requesting
5745 a copy of the sign bit and hence can shift the sign bit to
5746 the appropriate location. */
5747 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
5748 && INTVAL (XEXP (varop, 1)) >= 0
5749 && INTVAL (XEXP (varop, 1)) < HOST_BITS_PER_WIDE_INT)
5750 {
5751 int i = -1;
5752
5753 significant = GET_MODE_MASK (GET_MODE (varop));
5754 significant >>= INTVAL (XEXP (varop, 1));
5755
5756 if ((constop & ~significant) == 0
5757 || (i = exact_log2 (constop)) >= 0)
5758 {
5759 varop = simplify_shift_const
5760 (varop, LSHIFTRT, GET_MODE (varop), XEXP (varop, 0),
5761 i < 0 ? INTVAL (XEXP (varop, 1))
5762 : GET_MODE_BITSIZE (GET_MODE (varop)) - 1 - i);
5763 if (GET_CODE (varop) != ASHIFTRT)
5764 continue;
5765 }
5766 }
5767
5768 /* If our mask is 1, convert this to a LSHIFTRT. This can be done
5769 even if the shift count isn't a constant. */
5770 if (constop == 1)
5771 varop = gen_rtx_combine (LSHIFTRT, GET_MODE (varop),
5772 XEXP (varop, 0), XEXP (varop, 1));
5773 break;
5774
5775 case NE:
5776 /* (and (ne FOO 0) CONST) can be (and FOO CONST) if CONST is
5777 included in STORE_FLAG_VALUE and FOO has no significant bits
5778 not in CONST. */
5779 if ((constop & ~ STORE_FLAG_VALUE) == 0
5780 && XEXP (varop, 0) == const0_rtx
5781 && (significant_bits (XEXP (varop, 0), mode) & ~ constop) == 0)
5782 {
5783 varop = XEXP (varop, 0);
5784 continue;
5785 }
5786 break;
5787
5788 case PLUS:
5789 /* In (and (plus FOO C1) M), if M is a mask that just turns off
5790 low-order bits (as in an alignment operation) and FOO is already
5791 aligned to that boundary, we can convert remove this AND
5792 and possibly the PLUS if it is now adding zero. */
5793 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
5794 && exact_log2 (-constop) >= 0
5795 && (significant_bits (XEXP (varop, 0), mode) & ~ constop) == 0)
5796 {
5797 varop = plus_constant (XEXP (varop, 0),
5798 INTVAL (XEXP (varop, 1)) & constop);
5799 constop = ~0;
5800 break;
5801 }
5802
5803 /* ... fall through ... */
5804
5805 case MINUS:
5806 /* In (and (plus (and FOO M1) BAR) M2), if M1 and M2 are one
5807 less than powers of two and M2 is narrower than M1, we can
5808 eliminate the inner AND. This occurs when incrementing
5809 bit fields. */
5810
5811 if (GET_CODE (XEXP (varop, 0)) == ZERO_EXTRACT
5812 || GET_CODE (XEXP (varop, 0)) == ZERO_EXTEND)
5813 SUBST (XEXP (varop, 0),
5814 expand_compound_operation (XEXP (varop, 0)));
5815
5816 if (GET_CODE (XEXP (varop, 0)) == AND
5817 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
5818 && exact_log2 (constop + 1) >= 0
5819 && exact_log2 (INTVAL (XEXP (XEXP (varop, 0), 1)) + 1) >= 0
5820 && (~ INTVAL (XEXP (XEXP (varop, 0), 1)) & constop) == 0)
5821 SUBST (XEXP (varop, 0), XEXP (XEXP (varop, 0), 0));
5822 break;
5823 }
5824
5825 break;
5826 }
5827
5828 /* If we have reached a constant, this whole thing is constant. */
5829 if (GET_CODE (varop) == CONST_INT)
5830 return GEN_INT (constop & INTVAL (varop));
5831
5832 /* See what bits are significant in VAROP. */
5833 significant = significant_bits (varop, mode);
5834
5835 /* Turn off all bits in the constant that are known to already be zero.
5836 Thus, if the AND isn't needed at all, we will have CONSTOP == SIGNIFICANT
5837 which is tested below. */
5838
5839 constop &= significant;
5840
5841 /* If we don't have any bits left, return zero. */
5842 if (constop == 0)
5843 return const0_rtx;
5844
5845 /* Get VAROP in MODE. Try to get a SUBREG if not. Don't make a new SUBREG
5846 if we already had one (just check for the simplest cases). */
5847 if (x && GET_CODE (XEXP (x, 0)) == SUBREG
5848 && GET_MODE (XEXP (x, 0)) == mode
5849 && SUBREG_REG (XEXP (x, 0)) == varop)
5850 varop = XEXP (x, 0);
5851 else
5852 varop = gen_lowpart_for_combine (mode, varop);
5853
5854 /* If we can't make the SUBREG, try to return what we were given. */
5855 if (GET_CODE (varop) == CLOBBER)
5856 return x ? x : varop;
5857
5858 /* If we are only masking insignificant bits, return VAROP. */
5859 if (constop == significant)
5860 x = varop;
5861
5862 /* Otherwise, return an AND. See how much, if any, of X we can use. */
5863 else if (x == 0 || GET_CODE (x) != AND || GET_MODE (x) != mode)
5864 x = gen_rtx_combine (AND, mode, varop, GEN_INT (constop));
5865
5866 else
5867 {
5868 if (GET_CODE (XEXP (x, 1)) != CONST_INT
5869 || INTVAL (XEXP (x, 1)) != constop)
5870 SUBST (XEXP (x, 1), GEN_INT (constop));
5871
5872 SUBST (XEXP (x, 0), varop);
5873 }
5874
5875 return x;
5876 }
5877 \f
5878 /* Given an expression, X, compute which bits in X can be non-zero.
5879 We don't care about bits outside of those defined in MODE.
5880
5881 For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
5882 a shift, AND, or zero_extract, we can do better. */
5883
5884 static unsigned HOST_WIDE_INT
5885 significant_bits (x, mode)
5886 rtx x;
5887 enum machine_mode mode;
5888 {
5889 unsigned HOST_WIDE_INT significant = GET_MODE_MASK (mode);
5890 unsigned HOST_WIDE_INT inner_sig;
5891 enum rtx_code code;
5892 int mode_width = GET_MODE_BITSIZE (mode);
5893 rtx tem;
5894
5895 /* If X is wider than MODE, use its mode instead. */
5896 if (GET_MODE_BITSIZE (GET_MODE (x)) > mode_width)
5897 {
5898 mode = GET_MODE (x);
5899 significant = GET_MODE_MASK (mode);
5900 mode_width = GET_MODE_BITSIZE (mode);
5901 }
5902
5903 if (mode_width > HOST_BITS_PER_WIDE_INT)
5904 /* Our only callers in this case look for single bit values. So
5905 just return the mode mask. Those tests will then be false. */
5906 return significant;
5907
5908 code = GET_CODE (x);
5909 switch (code)
5910 {
5911 case REG:
5912 #ifdef STACK_BOUNDARY
5913 /* If this is the stack pointer, we may know something about its
5914 alignment. If PUSH_ROUNDING is defined, it is possible for the
5915 stack to be momentarily aligned only to that amount, so we pick
5916 the least alignment. */
5917
5918 if (x == stack_pointer_rtx)
5919 {
5920 int sp_alignment = STACK_BOUNDARY / BITS_PER_UNIT;
5921
5922 #ifdef PUSH_ROUNDING
5923 sp_alignment = MIN (PUSH_ROUNDING (1), sp_alignment);
5924 #endif
5925
5926 return significant & ~ (sp_alignment - 1);
5927 }
5928 #endif
5929
5930 /* If X is a register whose value we can find, use that value.
5931 Otherwise, use the previously-computed significant bits for this
5932 register. */
5933
5934 tem = get_last_value (x);
5935 if (tem)
5936 return significant_bits (tem, mode);
5937 else if (significant_valid && reg_significant[REGNO (x)])
5938 return reg_significant[REGNO (x)] & significant;
5939 else
5940 return significant;
5941
5942 case CONST_INT:
5943 return INTVAL (x);
5944
5945 #ifdef BYTE_LOADS_ZERO_EXTEND
5946 case MEM:
5947 /* In many, if not most, RISC machines, reading a byte from memory
5948 zeros the rest of the register. Noticing that fact saves a lot
5949 of extra zero-extends. */
5950 significant &= GET_MODE_MASK (GET_MODE (x));
5951 break;
5952 #endif
5953
5954 #if STORE_FLAG_VALUE == 1
5955 case EQ: case NE:
5956 case GT: case GTU:
5957 case LT: case LTU:
5958 case GE: case GEU:
5959 case LE: case LEU:
5960
5961 if (GET_MODE_CLASS (mode) == MODE_INT)
5962 significant = 1;
5963
5964 /* A comparison operation only sets the bits given by its mode. The
5965 rest are set undefined. */
5966 if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
5967 significant |= (GET_MODE_MASK (mode) & ~ GET_MODE_MASK (GET_MODE (x)));
5968 break;
5969 #endif
5970
5971 case NEG:
5972 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
5973 == GET_MODE_BITSIZE (GET_MODE (x)))
5974 significant = 1;
5975
5976 if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
5977 significant |= (GET_MODE_MASK (mode) & ~ GET_MODE_MASK (GET_MODE (x)));
5978 break;
5979
5980 case ABS:
5981 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
5982 == GET_MODE_BITSIZE (GET_MODE (x)))
5983 significant = 1;
5984 break;
5985
5986 case TRUNCATE:
5987 significant &= (significant_bits (XEXP (x, 0), mode)
5988 & GET_MODE_MASK (mode));
5989 break;
5990
5991 case ZERO_EXTEND:
5992 significant &= significant_bits (XEXP (x, 0), mode);
5993 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
5994 significant &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
5995 break;
5996
5997 case SIGN_EXTEND:
5998 /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
5999 Otherwise, show all the bits in the outer mode but not the inner
6000 may be non-zero. */
6001 inner_sig = significant_bits (XEXP (x, 0), mode);
6002 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
6003 {
6004 inner_sig &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
6005 if (inner_sig &
6006 (((HOST_WIDE_INT) 1
6007 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1))))
6008 inner_sig |= (GET_MODE_MASK (mode)
6009 & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
6010 }
6011
6012 significant &= inner_sig;
6013 break;
6014
6015 case AND:
6016 significant &= (significant_bits (XEXP (x, 0), mode)
6017 & significant_bits (XEXP (x, 1), mode));
6018 break;
6019
6020 case XOR: case IOR:
6021 case UMIN: case UMAX: case SMIN: case SMAX:
6022 significant &= (significant_bits (XEXP (x, 0), mode)
6023 | significant_bits (XEXP (x, 1), mode));
6024 break;
6025
6026 case PLUS: case MINUS:
6027 case MULT:
6028 case DIV: case UDIV:
6029 case MOD: case UMOD:
6030 /* We can apply the rules of arithmetic to compute the number of
6031 high- and low-order zero bits of these operations. We start by
6032 computing the width (position of the highest-order non-zero bit)
6033 and the number of low-order zero bits for each value. */
6034 {
6035 unsigned HOST_WIDE_INT sig0 = significant_bits (XEXP (x, 0), mode);
6036 unsigned HOST_WIDE_INT sig1 = significant_bits (XEXP (x, 1), mode);
6037 int width0 = floor_log2 (sig0) + 1;
6038 int width1 = floor_log2 (sig1) + 1;
6039 int low0 = floor_log2 (sig0 & -sig0);
6040 int low1 = floor_log2 (sig1 & -sig1);
6041 int op0_maybe_minusp = (sig0 & (1 << (mode_width - 1)));
6042 int op1_maybe_minusp = (sig1 & (1 << (mode_width - 1)));
6043 int result_width = mode_width;
6044 int result_low = 0;
6045
6046 switch (code)
6047 {
6048 case PLUS:
6049 result_width = MAX (width0, width1) + 1;
6050 result_low = MIN (low0, low1);
6051 break;
6052 case MINUS:
6053 result_low = MIN (low0, low1);
6054 break;
6055 case MULT:
6056 result_width = width0 + width1;
6057 result_low = low0 + low1;
6058 break;
6059 case DIV:
6060 if (! op0_maybe_minusp && ! op1_maybe_minusp)
6061 result_width = width0;
6062 break;
6063 case UDIV:
6064 result_width = width0;
6065 break;
6066 case MOD:
6067 if (! op0_maybe_minusp && ! op1_maybe_minusp)
6068 result_width = MIN (width0, width1);
6069 result_low = MIN (low0, low1);
6070 break;
6071 case UMOD:
6072 result_width = MIN (width0, width1);
6073 result_low = MIN (low0, low1);
6074 break;
6075 }
6076
6077 if (result_width < mode_width)
6078 significant &= ((HOST_WIDE_INT) 1 << result_width) - 1;
6079
6080 if (result_low > 0)
6081 significant &= ~ (((HOST_WIDE_INT) 1 << result_low) - 1);
6082 }
6083 break;
6084
6085 case ZERO_EXTRACT:
6086 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6087 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
6088 significant &= ((HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
6089 break;
6090
6091 case SUBREG:
6092 /* If this is a SUBREG formed for a promoted variable that has
6093 been zero-extended, we know that at least the high-order bits
6094 are zero, though others might be too. */
6095
6096 if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x))
6097 significant = (GET_MODE_MASK (GET_MODE (x))
6098 & significant_bits (SUBREG_REG (x), GET_MODE (x)));
6099
6100 /* If the inner mode is a single word for both the host and target
6101 machines, we can compute this from which bits of the inner
6102 object are known significant. */
6103 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) <= BITS_PER_WORD
6104 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
6105 <= HOST_BITS_PER_WIDE_INT))
6106 {
6107 significant &= significant_bits (SUBREG_REG (x), mode);
6108 #if ! defined(BYTE_LOADS_ZERO_EXTEND) && ! defined(BYTE_LOADS_SIGN_EXTEND)
6109 /* On many CISC machines, accessing an object in a wider mode
6110 causes the high-order bits to become undefined. So they are
6111 not known to be zero. */
6112 if (GET_MODE_SIZE (GET_MODE (x))
6113 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
6114 significant |= (GET_MODE_MASK (GET_MODE (x))
6115 & ~ GET_MODE_MASK (GET_MODE (SUBREG_REG (x))));
6116 #endif
6117 }
6118 break;
6119
6120 case ASHIFTRT:
6121 case LSHIFTRT:
6122 case ASHIFT:
6123 case LSHIFT:
6124 case ROTATE:
6125 /* The significant bits are in two classes: any bits within MODE
6126 that aren't in GET_MODE (x) are always significant. The rest of the
6127 significant bits are those that are significant in the operand of
6128 the shift when shifted the appropriate number of bits. This
6129 shows that high-order bits are cleared by the right shift and
6130 low-order bits by left shifts. */
6131 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6132 && INTVAL (XEXP (x, 1)) >= 0
6133 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
6134 {
6135 enum machine_mode inner_mode = GET_MODE (x);
6136 int width = GET_MODE_BITSIZE (inner_mode);
6137 int count = INTVAL (XEXP (x, 1));
6138 unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
6139 unsigned HOST_WIDE_INT op_significant
6140 = significant_bits (XEXP (x, 0), mode);
6141 unsigned HOST_WIDE_INT inner = op_significant & mode_mask;
6142 unsigned HOST_WIDE_INT outer = 0;
6143
6144 if (mode_width > width)
6145 outer = (op_significant & significant & ~ mode_mask);
6146
6147 if (code == LSHIFTRT)
6148 inner >>= count;
6149 else if (code == ASHIFTRT)
6150 {
6151 inner >>= count;
6152
6153 /* If the sign bit was significant at before the shift, we
6154 need to mark all the places it could have been copied to
6155 by the shift significant. */
6156 if (inner & ((HOST_WIDE_INT) 1 << (width - 1 - count)))
6157 inner |= (((HOST_WIDE_INT) 1 << count) - 1) << (width - count);
6158 }
6159 else if (code == LSHIFT || code == ASHIFT)
6160 inner <<= count;
6161 else
6162 inner = ((inner << (count % width)
6163 | (inner >> (width - (count % width)))) & mode_mask);
6164
6165 significant &= (outer | inner);
6166 }
6167 break;
6168
6169 case FFS:
6170 /* This is at most the number of bits in the mode. */
6171 significant = ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width) + 1)) - 1;
6172 break;
6173
6174 case IF_THEN_ELSE:
6175 significant &= (significant_bits (XEXP (x, 1), mode)
6176 | significant_bits (XEXP (x, 2), mode));
6177 break;
6178 }
6179
6180 return significant;
6181 }
6182 \f
6183 /* Return the number of bits at the high-order end of X that are known to
6184 be equal to the sign bit. This number will always be between 1 and
6185 the number of bits in the mode of X. MODE is the mode to be used
6186 if X is VOIDmode. */
6187
6188 static int
6189 num_sign_bit_copies (x, mode)
6190 rtx x;
6191 enum machine_mode mode;
6192 {
6193 enum rtx_code code = GET_CODE (x);
6194 int bitwidth;
6195 int num0, num1, result;
6196 unsigned HOST_WIDE_INT sig;
6197 rtx tem;
6198
6199 /* If we weren't given a mode, use the mode of X. If the mode is still
6200 VOIDmode, we don't know anything. */
6201
6202 if (mode == VOIDmode)
6203 mode = GET_MODE (x);
6204
6205 if (mode == VOIDmode)
6206 return 0;
6207
6208 bitwidth = GET_MODE_BITSIZE (mode);
6209
6210 switch (code)
6211 {
6212 case REG:
6213 if (significant_valid && reg_sign_bit_copies[REGNO (x)] != 0)
6214 return reg_sign_bit_copies[REGNO (x)];
6215
6216 tem = get_last_value (x);
6217 if (tem != 0)
6218 return num_sign_bit_copies (tem, mode);
6219 break;
6220
6221 #ifdef BYTE_LOADS_SIGN_EXTEND
6222 case MEM:
6223 /* Some RISC machines sign-extend all loads of smaller than a word. */
6224 return MAX (1, bitwidth - GET_MODE_BITSIZE (GET_MODE (x)) + 1);
6225 #endif
6226
6227 case CONST_INT:
6228 /* If the constant is negative, take its 1's complement and remask.
6229 Then see how many zero bits we have. */
6230 sig = INTVAL (x) & GET_MODE_MASK (mode);
6231 if (sig & ((HOST_WIDE_INT) 1 << (bitwidth - 1)))
6232 sig = (~ sig) & GET_MODE_MASK (mode);
6233
6234 return (sig == 0 ? bitwidth : bitwidth - floor_log2 (sig) - 1);
6235
6236 case SUBREG:
6237 /* If this is a SUBREG for a promoted object that is sign-extended
6238 and we are looking at it in a wider mode, we know that at least the
6239 high-order bits are known to be sign bit copies. */
6240
6241 if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x))
6242 return (GET_MODE_BITSIZE (mode) - GET_MODE_BITSIZE (GET_MODE (x))
6243 + num_sign_bit_copies (SUBREG_REG (x), GET_MODE (x)));
6244
6245 /* For a smaller object, just ignore the high bits. */
6246 if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
6247 {
6248 num0 = num_sign_bit_copies (SUBREG_REG (x), VOIDmode);
6249 return MAX (1, (num0
6250 - (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
6251 - bitwidth)));
6252 }
6253
6254 #if defined(BYTE_LOADS_ZERO_EXTEND) || defined(BYTE_LOADS_SIGN_EXTEND)
6255 /* For paradoxical SUBREGs, just look inside since, on machines with
6256 one of these defined, we assume that operations are actually
6257 performed on the full register. Note that we are passing MODE
6258 to the recursive call, so the number of sign bit copies will
6259 remain relative to that mode, not the inner mode. */
6260
6261 if (GET_MODE_SIZE (GET_MODE (x))
6262 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
6263 return num_sign_bit_copies (SUBREG_REG (x), mode);
6264 #endif
6265
6266 break;
6267
6268 case SIGN_EXTRACT:
6269 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
6270 return MAX (1, bitwidth - INTVAL (XEXP (x, 1)));
6271 break;
6272
6273 case SIGN_EXTEND:
6274 return (bitwidth - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
6275 + num_sign_bit_copies (XEXP (x, 0), VOIDmode));
6276
6277 case TRUNCATE:
6278 /* For a smaller object, just ignore the high bits. */
6279 num0 = num_sign_bit_copies (XEXP (x, 0), VOIDmode);
6280 return MAX (1, (num0 - (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
6281 - bitwidth)));
6282
6283 case NOT:
6284 return num_sign_bit_copies (XEXP (x, 0), mode);
6285
6286 case ROTATE: case ROTATERT:
6287 /* If we are rotating left by a number of bits less than the number
6288 of sign bit copies, we can just subtract that amount from the
6289 number. */
6290 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6291 && INTVAL (XEXP (x, 1)) >= 0 && INTVAL (XEXP (x, 1)) < bitwidth)
6292 {
6293 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
6294 return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
6295 : bitwidth - INTVAL (XEXP (x, 1))));
6296 }
6297 break;
6298
6299 case NEG:
6300 /* In general, this subtracts one sign bit copy. But if the value
6301 is known to be positive, the number of sign bit copies is the
6302 same as that of the input. Finally, if the input has just one
6303 significant bit, all the bits are copies of the sign bit. */
6304 sig = significant_bits (XEXP (x, 0), mode);
6305 if (sig == 1)
6306 return bitwidth;
6307
6308 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
6309 if (num0 > 1
6310 && (((HOST_WIDE_INT) 1 << (bitwidth - 1)) & sig))
6311 num0--;
6312
6313 return num0;
6314
6315 case IOR: case AND: case XOR:
6316 case SMIN: case SMAX: case UMIN: case UMAX:
6317 /* Logical operations will preserve the number of sign-bit copies.
6318 MIN and MAX operations always return one of the operands. */
6319 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
6320 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
6321 return MIN (num0, num1);
6322
6323 case PLUS: case MINUS:
6324 /* For addition and subtraction, we can have a 1-bit carry. However,
6325 if we are subtracting 1 from a positive number, there will not
6326 be such a carry. Furthermore, if the positive number is known to
6327 be 0 or 1, we know the result is either -1 or 0. */
6328
6329 if (code == PLUS && XEXP (x, 1) == constm1_rtx)
6330 {
6331 sig = significant_bits (XEXP (x, 0), mode);
6332 if ((((HOST_WIDE_INT) 1 << (bitwidth - 1)) & sig) == 0)
6333 return (sig == 1 || sig == 0 ? bitwidth
6334 : bitwidth - floor_log2 (sig));
6335 }
6336
6337 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
6338 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
6339 return MAX (1, MIN (num0, num1) - 1);
6340
6341 case MULT:
6342 /* The number of bits of the product is the sum of the number of
6343 bits of both terms. However, unless one of the terms if known
6344 to be positive, we must allow for an additional bit since negating
6345 a negative number can remove one sign bit copy. */
6346
6347 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
6348 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
6349
6350 result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
6351 if (result > 0
6352 && ((significant_bits (XEXP (x, 0), mode)
6353 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
6354 && (significant_bits (XEXP (x, 1), mode)
6355 & ((HOST_WIDE_INT) 1 << (bitwidth - 1)) != 0))
6356 result--;
6357
6358 return MAX (1, result);
6359
6360 case UDIV:
6361 /* The result must be <= the first operand. */
6362 return num_sign_bit_copies (XEXP (x, 0), mode);
6363
6364 case UMOD:
6365 /* The result must be <= the scond operand. */
6366 return num_sign_bit_copies (XEXP (x, 1), mode);
6367
6368 case DIV:
6369 /* Similar to unsigned division, except that we have to worry about
6370 the case where the divisor is negative, in which case we have
6371 to add 1. */
6372 result = num_sign_bit_copies (XEXP (x, 0), mode);
6373 if (result > 1
6374 && (significant_bits (XEXP (x, 1), mode)
6375 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
6376 result --;
6377
6378 return result;
6379
6380 case MOD:
6381 result = num_sign_bit_copies (XEXP (x, 1), mode);
6382 if (result > 1
6383 && (significant_bits (XEXP (x, 1), mode)
6384 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
6385 result --;
6386
6387 return result;
6388
6389 case ASHIFTRT:
6390 /* Shifts by a constant add to the number of bits equal to the
6391 sign bit. */
6392 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
6393 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6394 && INTVAL (XEXP (x, 1)) > 0)
6395 num0 = MIN (bitwidth, num0 + INTVAL (XEXP (x, 1)));
6396
6397 return num0;
6398
6399 case ASHIFT:
6400 case LSHIFT:
6401 /* Left shifts destroy copies. */
6402 if (GET_CODE (XEXP (x, 1)) != CONST_INT
6403 || INTVAL (XEXP (x, 1)) < 0
6404 || INTVAL (XEXP (x, 1)) >= bitwidth)
6405 return 1;
6406
6407 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
6408 return MAX (1, num0 - INTVAL (XEXP (x, 1)));
6409
6410 case IF_THEN_ELSE:
6411 num0 = num_sign_bit_copies (XEXP (x, 1), mode);
6412 num1 = num_sign_bit_copies (XEXP (x, 2), mode);
6413 return MIN (num0, num1);
6414
6415 #if STORE_FLAG_VALUE == -1
6416 case EQ: case NE: case GE: case GT: case LE: case LT:
6417 case GEU: case GTU: case LEU: case LTU:
6418 return bitwidth;
6419 #endif
6420 }
6421
6422 /* If we haven't been able to figure it out by one of the above rules,
6423 see if some of the high-order bits are known to be zero. If so,
6424 count those bits and return one less than that amount. */
6425
6426 sig = significant_bits (x, mode);
6427 return sig == GET_MODE_MASK (mode) ? 1 : bitwidth - floor_log2 (sig) - 1;
6428 }
6429 \f
6430 /* Return the number of "extended" bits there are in X, when interpreted
6431 as a quantity in MODE whose signedness is indicated by UNSIGNEDP. For
6432 unsigned quantities, this is the number of high-order zero bits.
6433 For signed quantities, this is the number of copies of the sign bit
6434 minus 1. In both case, this function returns the number of "spare"
6435 bits. For example, if two quantities for which this function returns
6436 at least 1 are added, the addition is known not to overflow.
6437
6438 This function will always return 0 unless called during combine, which
6439 implies that it must be called from a define_split. */
6440
6441 int
6442 extended_count (x, mode, unsignedp)
6443 rtx x;
6444 enum machine_mode mode;
6445 int unsignedp;
6446 {
6447 if (significant_valid == 0)
6448 return 0;
6449
6450 return (unsignedp
6451 ? (GET_MODE_BITSIZE (mode) - 1
6452 - floor_log2 (significant_bits (x, mode)))
6453 : num_sign_bit_copies (x, mode) - 1);
6454 }
6455 \f
6456 /* This function is called from `simplify_shift_const' to merge two
6457 outer operations. Specifically, we have already found that we need
6458 to perform operation *POP0 with constant *PCONST0 at the outermost
6459 position. We would now like to also perform OP1 with constant CONST1
6460 (with *POP0 being done last).
6461
6462 Return 1 if we can do the operation and update *POP0 and *PCONST0 with
6463 the resulting operation. *PCOMP_P is set to 1 if we would need to
6464 complement the innermost operand, otherwise it is unchanged.
6465
6466 MODE is the mode in which the operation will be done. No bits outside
6467 the width of this mode matter. It is assumed that the width of this mode
6468 is smaller than or equal to HOST_BITS_PER_WIDE_INT.
6469
6470 If *POP0 or OP1 are NIL, it means no operation is required. Only NEG, PLUS,
6471 IOR, XOR, and AND are supported. We may set *POP0 to SET if the proper
6472 result is simply *PCONST0.
6473
6474 If the resulting operation cannot be expressed as one operation, we
6475 return 0 and do not change *POP0, *PCONST0, and *PCOMP_P. */
6476
6477 static int
6478 merge_outer_ops (pop0, pconst0, op1, const1, mode, pcomp_p)
6479 enum rtx_code *pop0;
6480 HOST_WIDE_INT *pconst0;
6481 enum rtx_code op1;
6482 HOST_WIDE_INT const1;
6483 enum machine_mode mode;
6484 int *pcomp_p;
6485 {
6486 enum rtx_code op0 = *pop0;
6487 HOST_WIDE_INT const0 = *pconst0;
6488
6489 const0 &= GET_MODE_MASK (mode);
6490 const1 &= GET_MODE_MASK (mode);
6491
6492 /* If OP0 is an AND, clear unimportant bits in CONST1. */
6493 if (op0 == AND)
6494 const1 &= const0;
6495
6496 /* If OP0 or OP1 is NIL, this is easy. Similarly if they are the same or
6497 if OP0 is SET. */
6498
6499 if (op1 == NIL || op0 == SET)
6500 return 1;
6501
6502 else if (op0 == NIL)
6503 op0 = op1, const0 = const1;
6504
6505 else if (op0 == op1)
6506 {
6507 switch (op0)
6508 {
6509 case AND:
6510 const0 &= const1;
6511 break;
6512 case IOR:
6513 const0 |= const1;
6514 break;
6515 case XOR:
6516 const0 ^= const1;
6517 break;
6518 case PLUS:
6519 const0 += const1;
6520 break;
6521 case NEG:
6522 op0 = NIL;
6523 break;
6524 }
6525 }
6526
6527 /* Otherwise, if either is a PLUS or NEG, we can't do anything. */
6528 else if (op0 == PLUS || op1 == PLUS || op0 == NEG || op1 == NEG)
6529 return 0;
6530
6531 /* If the two constants aren't the same, we can't do anything. The
6532 remaining six cases can all be done. */
6533 else if (const0 != const1)
6534 return 0;
6535
6536 else
6537 switch (op0)
6538 {
6539 case IOR:
6540 if (op1 == AND)
6541 /* (a & b) | b == b */
6542 op0 = SET;
6543 else /* op1 == XOR */
6544 /* (a ^ b) | b == a | b */
6545 ;
6546 break;
6547
6548 case XOR:
6549 if (op1 == AND)
6550 /* (a & b) ^ b == (~a) & b */
6551 op0 = AND, *pcomp_p = 1;
6552 else /* op1 == IOR */
6553 /* (a | b) ^ b == a & ~b */
6554 op0 = AND, *pconst0 = ~ const0;
6555 break;
6556
6557 case AND:
6558 if (op1 == IOR)
6559 /* (a | b) & b == b */
6560 op0 = SET;
6561 else /* op1 == XOR */
6562 /* (a ^ b) & b) == (~a) & b */
6563 *pcomp_p = 1;
6564 break;
6565 }
6566
6567 /* Check for NO-OP cases. */
6568 const0 &= GET_MODE_MASK (mode);
6569 if (const0 == 0
6570 && (op0 == IOR || op0 == XOR || op0 == PLUS))
6571 op0 = NIL;
6572 else if (const0 == 0 && op0 == AND)
6573 op0 = SET;
6574 else if (const0 == GET_MODE_MASK (mode) && op0 == AND)
6575 op0 = NIL;
6576
6577 *pop0 = op0;
6578 *pconst0 = const0;
6579
6580 return 1;
6581 }
6582 \f
6583 /* Simplify a shift of VAROP by COUNT bits. CODE says what kind of shift.
6584 The result of the shift is RESULT_MODE. X, if non-zero, is an expression
6585 that we started with.
6586
6587 The shift is normally computed in the widest mode we find in VAROP, as
6588 long as it isn't a different number of words than RESULT_MODE. Exceptions
6589 are ASHIFTRT and ROTATE, which are always done in their original mode, */
6590
6591 static rtx
6592 simplify_shift_const (x, code, result_mode, varop, count)
6593 rtx x;
6594 enum rtx_code code;
6595 enum machine_mode result_mode;
6596 rtx varop;
6597 int count;
6598 {
6599 enum rtx_code orig_code = code;
6600 int orig_count = count;
6601 enum machine_mode mode = result_mode;
6602 enum machine_mode shift_mode, tmode;
6603 int mode_words
6604 = (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
6605 /* We form (outer_op (code varop count) (outer_const)). */
6606 enum rtx_code outer_op = NIL;
6607 HOST_WIDE_INT outer_const;
6608 rtx const_rtx;
6609 int complement_p = 0;
6610 rtx new;
6611
6612 /* If we were given an invalid count, don't do anything except exactly
6613 what was requested. */
6614
6615 if (count < 0 || count > GET_MODE_BITSIZE (mode))
6616 {
6617 if (x)
6618 return x;
6619
6620 return gen_rtx (code, mode, varop, GEN_INT (count));
6621 }
6622
6623 /* Unless one of the branches of the `if' in this loop does a `continue',
6624 we will `break' the loop after the `if'. */
6625
6626 while (count != 0)
6627 {
6628 /* If we have an operand of (clobber (const_int 0)), just return that
6629 value. */
6630 if (GET_CODE (varop) == CLOBBER)
6631 return varop;
6632
6633 /* If we discovered we had to complement VAROP, leave. Making a NOT
6634 here would cause an infinite loop. */
6635 if (complement_p)
6636 break;
6637
6638 /* Convert ROTATETRT to ROTATE. */
6639 if (code == ROTATERT)
6640 code = ROTATE, count = GET_MODE_BITSIZE (result_mode) - count;
6641
6642 /* Canonicalize LSHIFT to ASHIFT. */
6643 if (code == LSHIFT)
6644 code = ASHIFT;
6645
6646 /* We need to determine what mode we will do the shift in. If the
6647 shift is a ASHIFTRT or ROTATE, we must always do it in the mode it
6648 was originally done in. Otherwise, we can do it in MODE, the widest
6649 mode encountered. */
6650 shift_mode = (code == ASHIFTRT || code == ROTATE ? result_mode : mode);
6651
6652 /* Handle cases where the count is greater than the size of the mode
6653 minus 1. For ASHIFT, use the size minus one as the count (this can
6654 occur when simplifying (lshiftrt (ashiftrt ..))). For rotates,
6655 take the count modulo the size. For other shifts, the result is
6656 zero.
6657
6658 Since these shifts are being produced by the compiler by combining
6659 multiple operations, each of which are defined, we know what the
6660 result is supposed to be. */
6661
6662 if (count > GET_MODE_BITSIZE (shift_mode) - 1)
6663 {
6664 if (code == ASHIFTRT)
6665 count = GET_MODE_BITSIZE (shift_mode) - 1;
6666 else if (code == ROTATE || code == ROTATERT)
6667 count %= GET_MODE_BITSIZE (shift_mode);
6668 else
6669 {
6670 /* We can't simply return zero because there may be an
6671 outer op. */
6672 varop = const0_rtx;
6673 count = 0;
6674 break;
6675 }
6676 }
6677
6678 /* Negative counts are invalid and should not have been made (a
6679 programmer-specified negative count should have been handled
6680 above). */
6681 else if (count < 0)
6682 abort ();
6683
6684 /* An arithmetic right shift of a quantity known to be -1 or 0
6685 is a no-op. */
6686 if (code == ASHIFTRT
6687 && (num_sign_bit_copies (varop, shift_mode)
6688 == GET_MODE_BITSIZE (shift_mode)))
6689 {
6690 count = 0;
6691 break;
6692 }
6693
6694 /* We simplify the tests below and elsewhere by converting
6695 ASHIFTRT to LSHIFTRT if we know the sign bit is clear.
6696 `make_compound_operation' will convert it to a ASHIFTRT for
6697 those machines (such as Vax) that don't have a LSHIFTRT. */
6698 if (GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT
6699 && code == ASHIFTRT
6700 && ((significant_bits (varop, shift_mode)
6701 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (shift_mode) - 1)))
6702 == 0))
6703 code = LSHIFTRT;
6704
6705 switch (GET_CODE (varop))
6706 {
6707 case SIGN_EXTEND:
6708 case ZERO_EXTEND:
6709 case SIGN_EXTRACT:
6710 case ZERO_EXTRACT:
6711 new = expand_compound_operation (varop);
6712 if (new != varop)
6713 {
6714 varop = new;
6715 continue;
6716 }
6717 break;
6718
6719 case MEM:
6720 /* If we have (xshiftrt (mem ...) C) and C is MODE_WIDTH
6721 minus the width of a smaller mode, we can do this with a
6722 SIGN_EXTEND or ZERO_EXTEND from the narrower memory location. */
6723 if ((code == ASHIFTRT || code == LSHIFTRT)
6724 && ! mode_dependent_address_p (XEXP (varop, 0))
6725 && ! MEM_VOLATILE_P (varop)
6726 && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
6727 MODE_INT, 1)) != BLKmode)
6728 {
6729 #if BYTES_BIG_ENDIAN
6730 new = gen_rtx (MEM, tmode, XEXP (varop, 0));
6731 #else
6732 new = gen_rtx (MEM, tmode,
6733 plus_constant (XEXP (varop, 0),
6734 count / BITS_PER_UNIT));
6735 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (varop);
6736 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (varop);
6737 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (varop);
6738 #endif
6739 varop = gen_rtx_combine (code == ASHIFTRT ? SIGN_EXTEND
6740 : ZERO_EXTEND, mode, new);
6741 count = 0;
6742 continue;
6743 }
6744 break;
6745
6746 case USE:
6747 /* Similar to the case above, except that we can only do this if
6748 the resulting mode is the same as that of the underlying
6749 MEM and adjust the address depending on the *bits* endianness
6750 because of the way that bit-field extract insns are defined. */
6751 if ((code == ASHIFTRT || code == LSHIFTRT)
6752 && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
6753 MODE_INT, 1)) != BLKmode
6754 && tmode == GET_MODE (XEXP (varop, 0)))
6755 {
6756 #if BITS_BIG_ENDIAN
6757 new = XEXP (varop, 0);
6758 #else
6759 new = copy_rtx (XEXP (varop, 0));
6760 SUBST (XEXP (new, 0),
6761 plus_constant (XEXP (new, 0),
6762 count / BITS_PER_UNIT));
6763 #endif
6764
6765 varop = gen_rtx_combine (code == ASHIFTRT ? SIGN_EXTEND
6766 : ZERO_EXTEND, mode, new);
6767 count = 0;
6768 continue;
6769 }
6770 break;
6771
6772 case SUBREG:
6773 /* If VAROP is a SUBREG, strip it as long as the inner operand has
6774 the same number of words as what we've seen so far. Then store
6775 the widest mode in MODE. */
6776 if (SUBREG_WORD (varop) == 0
6777 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
6778 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
6779 == mode_words))
6780 {
6781 varop = SUBREG_REG (varop);
6782 if (GET_MODE_SIZE (GET_MODE (varop)) > GET_MODE_SIZE (mode))
6783 mode = GET_MODE (varop);
6784 continue;
6785 }
6786 break;
6787
6788 case MULT:
6789 /* Some machines use MULT instead of ASHIFT because MULT
6790 is cheaper. But it is still better on those machines to
6791 merge two shifts into one. */
6792 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
6793 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
6794 {
6795 varop = gen_binary (ASHIFT, GET_MODE (varop), XEXP (varop, 0),
6796 GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));;
6797 continue;
6798 }
6799 break;
6800
6801 case UDIV:
6802 /* Similar, for when divides are cheaper. */
6803 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
6804 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
6805 {
6806 varop = gen_binary (LSHIFTRT, GET_MODE (varop), XEXP (varop, 0),
6807 GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));
6808 continue;
6809 }
6810 break;
6811
6812 case ASHIFTRT:
6813 /* If we are extracting just the sign bit of an arithmetic right
6814 shift, that shift is not needed. */
6815 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1)
6816 {
6817 varop = XEXP (varop, 0);
6818 continue;
6819 }
6820
6821 /* ... fall through ... */
6822
6823 case LSHIFTRT:
6824 case ASHIFT:
6825 case LSHIFT:
6826 case ROTATE:
6827 /* Here we have two nested shifts. The result is usually the
6828 AND of a new shift with a mask. We compute the result below. */
6829 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
6830 && INTVAL (XEXP (varop, 1)) >= 0
6831 && INTVAL (XEXP (varop, 1)) < GET_MODE_BITSIZE (GET_MODE (varop))
6832 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
6833 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
6834 {
6835 enum rtx_code first_code = GET_CODE (varop);
6836 int first_count = INTVAL (XEXP (varop, 1));
6837 unsigned HOST_WIDE_INT mask;
6838 rtx mask_rtx;
6839 rtx inner;
6840
6841 if (first_code == LSHIFT)
6842 first_code = ASHIFT;
6843
6844 /* We have one common special case. We can't do any merging if
6845 the inner code is an ASHIFTRT of a smaller mode. However, if
6846 we have (ashift:M1 (subreg:M1 (ashiftrt:M2 FOO C1) 0) C2)
6847 with C2 == GET_MODE_BITSIZE (M1) - GET_MODE_BITSIZE (M2),
6848 we can convert it to
6849 (ashiftrt:M1 (ashift:M1 (and:M1 (subreg:M1 FOO 0 C2) C3) C1).
6850 This simplifies certain SIGN_EXTEND operations. */
6851 if (code == ASHIFT && first_code == ASHIFTRT
6852 && (GET_MODE_BITSIZE (result_mode)
6853 - GET_MODE_BITSIZE (GET_MODE (varop))) == count)
6854 {
6855 /* C3 has the low-order C1 bits zero. */
6856
6857 mask = (GET_MODE_MASK (mode)
6858 & ~ (((HOST_WIDE_INT) 1 << first_count) - 1));
6859
6860 varop = simplify_and_const_int (NULL_RTX, result_mode,
6861 XEXP (varop, 0), mask);
6862 varop = simplify_shift_const (NULL_RTX, ASHIFT, result_mode,
6863 varop, count);
6864 count = first_count;
6865 code = ASHIFTRT;
6866 continue;
6867 }
6868
6869 /* If this was (ashiftrt (ashift foo C1) C2) and FOO has more
6870 than C1 high-order bits equal to the sign bit, we can convert
6871 this to either an ASHIFT or a ASHIFTRT depending on the
6872 two counts.
6873
6874 We cannot do this if VAROP's mode is not SHIFT_MODE. */
6875
6876 if (code == ASHIFTRT && first_code == ASHIFT
6877 && GET_MODE (varop) == shift_mode
6878 && (num_sign_bit_copies (XEXP (varop, 0), shift_mode)
6879 > first_count))
6880 {
6881 count -= first_count;
6882 if (count < 0)
6883 count = - count, code = ASHIFT;
6884 varop = XEXP (varop, 0);
6885 continue;
6886 }
6887
6888 /* There are some cases we can't do. If CODE is ASHIFTRT,
6889 we can only do this if FIRST_CODE is also ASHIFTRT.
6890
6891 We can't do the case when CODE is ROTATE and FIRST_CODE is
6892 ASHIFTRT.
6893
6894 If the mode of this shift is not the mode of the outer shift,
6895 we can't do this if either shift is ASHIFTRT or ROTATE.
6896
6897 Finally, we can't do any of these if the mode is too wide
6898 unless the codes are the same.
6899
6900 Handle the case where the shift codes are the same
6901 first. */
6902
6903 if (code == first_code)
6904 {
6905 if (GET_MODE (varop) != result_mode
6906 && (code == ASHIFTRT || code == ROTATE))
6907 break;
6908
6909 count += first_count;
6910 varop = XEXP (varop, 0);
6911 continue;
6912 }
6913
6914 if (code == ASHIFTRT
6915 || (code == ROTATE && first_code == ASHIFTRT)
6916 || GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT
6917 || (GET_MODE (varop) != result_mode
6918 && (first_code == ASHIFTRT || first_code == ROTATE
6919 || code == ROTATE)))
6920 break;
6921
6922 /* To compute the mask to apply after the shift, shift the
6923 significant bits of the inner shift the same way the
6924 outer shift will. */
6925
6926 mask_rtx = GEN_INT (significant_bits (varop, GET_MODE (varop)));
6927
6928 mask_rtx
6929 = simplify_binary_operation (code, result_mode, mask_rtx,
6930 GEN_INT (count));
6931
6932 /* Give up if we can't compute an outer operation to use. */
6933 if (mask_rtx == 0
6934 || GET_CODE (mask_rtx) != CONST_INT
6935 || ! merge_outer_ops (&outer_op, &outer_const, AND,
6936 INTVAL (mask_rtx),
6937 result_mode, &complement_p))
6938 break;
6939
6940 /* If the shifts are in the same direction, we add the
6941 counts. Otherwise, we subtract them. */
6942 if ((code == ASHIFTRT || code == LSHIFTRT)
6943 == (first_code == ASHIFTRT || first_code == LSHIFTRT))
6944 count += first_count;
6945 else
6946 count -= first_count;
6947
6948 /* If COUNT is positive, the new shift is usually CODE,
6949 except for the two exceptions below, in which case it is
6950 FIRST_CODE. If the count is negative, FIRST_CODE should
6951 always be used */
6952 if (count > 0
6953 && ((first_code == ROTATE && code == ASHIFT)
6954 || (first_code == ASHIFTRT && code == LSHIFTRT)))
6955 code = first_code;
6956 else if (count < 0)
6957 code = first_code, count = - count;
6958
6959 varop = XEXP (varop, 0);
6960 continue;
6961 }
6962
6963 /* If we have (A << B << C) for any shift, we can convert this to
6964 (A << C << B). This wins if A is a constant. Only try this if
6965 B is not a constant. */
6966
6967 else if (GET_CODE (varop) == code
6968 && GET_CODE (XEXP (varop, 1)) != CONST_INT
6969 && 0 != (new
6970 = simplify_binary_operation (code, mode,
6971 XEXP (varop, 0),
6972 GEN_INT (count))))
6973 {
6974 varop = gen_rtx_combine (code, mode, new, XEXP (varop, 1));
6975 count = 0;
6976 continue;
6977 }
6978 break;
6979
6980 case NOT:
6981 /* Make this fit the case below. */
6982 varop = gen_rtx_combine (XOR, mode, XEXP (varop, 0),
6983 GEN_INT (GET_MODE_MASK (mode)));
6984 continue;
6985
6986 case IOR:
6987 case AND:
6988 case XOR:
6989 /* If we have (xshiftrt (ior (plus X (const_int -1)) X) C)
6990 with C the size of VAROP - 1 and the shift is logical if
6991 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
6992 we have an (le X 0) operation. If we have an arithmetic shift
6993 and STORE_FLAG_VALUE is 1 or we have a logical shift with
6994 STORE_FLAG_VALUE of -1, we have a (neg (le X 0)) operation. */
6995
6996 if (GET_CODE (varop) == IOR && GET_CODE (XEXP (varop, 0)) == PLUS
6997 && XEXP (XEXP (varop, 0), 1) == constm1_rtx
6998 && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
6999 && (code == LSHIFTRT || code == ASHIFTRT)
7000 && count == GET_MODE_BITSIZE (GET_MODE (varop)) - 1
7001 && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
7002 {
7003 count = 0;
7004 varop = gen_rtx_combine (LE, GET_MODE (varop), XEXP (varop, 1),
7005 const0_rtx);
7006
7007 if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
7008 varop = gen_rtx_combine (NEG, GET_MODE (varop), varop);
7009
7010 continue;
7011 }
7012
7013 /* If we have (shift (logical)), move the logical to the outside
7014 to allow it to possibly combine with another logical and the
7015 shift to combine with another shift. This also canonicalizes to
7016 what a ZERO_EXTRACT looks like. Also, some machines have
7017 (and (shift)) insns. */
7018
7019 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
7020 && (new = simplify_binary_operation (code, result_mode,
7021 XEXP (varop, 1),
7022 GEN_INT (count))) != 0
7023 && merge_outer_ops (&outer_op, &outer_const, GET_CODE (varop),
7024 INTVAL (new), result_mode, &complement_p))
7025 {
7026 varop = XEXP (varop, 0);
7027 continue;
7028 }
7029
7030 /* If we can't do that, try to simplify the shift in each arm of the
7031 logical expression, make a new logical expression, and apply
7032 the inverse distributive law. */
7033 {
7034 rtx lhs = simplify_shift_const (NULL_RTX, code, result_mode,
7035 XEXP (varop, 0), count);
7036 rtx rhs = simplify_shift_const (NULL_RTX, code, result_mode,
7037 XEXP (varop, 1), count);
7038
7039 varop = gen_binary (GET_CODE (varop), result_mode, lhs, rhs);
7040 varop = apply_distributive_law (varop);
7041
7042 count = 0;
7043 }
7044 break;
7045
7046 case EQ:
7047 /* convert (lshift (eq FOO 0) C) to (xor FOO 1) if STORE_FLAG_VALUE
7048 says that the sign bit can be tested, FOO has mode MODE, C is
7049 GET_MODE_BITSIZE (MODE) - 1, and FOO has only the low-order bit
7050 significant. */
7051 if (code == LSHIFT
7052 && XEXP (varop, 1) == const0_rtx
7053 && GET_MODE (XEXP (varop, 0)) == result_mode
7054 && count == GET_MODE_BITSIZE (result_mode) - 1
7055 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
7056 && ((STORE_FLAG_VALUE
7057 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (result_mode) - 1))))
7058 && significant_bits (XEXP (varop, 0), result_mode) == 1
7059 && merge_outer_ops (&outer_op, &outer_const, XOR,
7060 (HOST_WIDE_INT) 1, result_mode,
7061 &complement_p))
7062 {
7063 varop = XEXP (varop, 0);
7064 count = 0;
7065 continue;
7066 }
7067 break;
7068
7069 case NEG:
7070 /* (lshiftrt (neg A) C) where A is either 0 or 1 and C is one less
7071 than the number of bits in the mode is equivalent to A. */
7072 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1
7073 && significant_bits (XEXP (varop, 0), result_mode) == 1)
7074 {
7075 varop = XEXP (varop, 0);
7076 count = 0;
7077 continue;
7078 }
7079
7080 /* NEG commutes with ASHIFT since it is multiplication. Move the
7081 NEG outside to allow shifts to combine. */
7082 if (code == ASHIFT
7083 && merge_outer_ops (&outer_op, &outer_const, NEG,
7084 (HOST_WIDE_INT) 0, result_mode,
7085 &complement_p))
7086 {
7087 varop = XEXP (varop, 0);
7088 continue;
7089 }
7090 break;
7091
7092 case PLUS:
7093 /* (lshiftrt (plus A -1) C) where A is either 0 or 1 and C
7094 is one less than the number of bits in the mode is
7095 equivalent to (xor A 1). */
7096 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1
7097 && XEXP (varop, 1) == constm1_rtx
7098 && significant_bits (XEXP (varop, 0), result_mode) == 1
7099 && merge_outer_ops (&outer_op, &outer_const, XOR,
7100 (HOST_WIDE_INT) 1, result_mode,
7101 &complement_p))
7102 {
7103 count = 0;
7104 varop = XEXP (varop, 0);
7105 continue;
7106 }
7107
7108 /* If we have (xshiftrt (plus FOO BAR) C), and the only bits
7109 significant in BAR are those being shifted out and those
7110 bits are known zero in FOO, we can replace the PLUS with FOO.
7111 Similarly in the other operand order. This code occurs when
7112 we are computing the size of a variable-size array. */
7113
7114 if ((code == ASHIFTRT || code == LSHIFTRT)
7115 && count < HOST_BITS_PER_WIDE_INT
7116 && significant_bits (XEXP (varop, 1), result_mode) >> count == 0
7117 && (significant_bits (XEXP (varop, 1), result_mode)
7118 & significant_bits (XEXP (varop, 0), result_mode)) == 0)
7119 {
7120 varop = XEXP (varop, 0);
7121 continue;
7122 }
7123 else if ((code == ASHIFTRT || code == LSHIFTRT)
7124 && count < HOST_BITS_PER_WIDE_INT
7125 && 0 == (significant_bits (XEXP (varop, 0), result_mode)
7126 >> count)
7127 && 0 == (significant_bits (XEXP (varop, 0), result_mode)
7128 & significant_bits (XEXP (varop, 1),
7129 result_mode)))
7130 {
7131 varop = XEXP (varop, 1);
7132 continue;
7133 }
7134
7135 /* (ashift (plus foo C) N) is (plus (ashift foo N) C'). */
7136 if (code == ASHIFT
7137 && GET_CODE (XEXP (varop, 1)) == CONST_INT
7138 && (new = simplify_binary_operation (ASHIFT, result_mode,
7139 XEXP (varop, 1),
7140 GEN_INT (count))) != 0
7141 && merge_outer_ops (&outer_op, &outer_const, PLUS,
7142 INTVAL (new), result_mode, &complement_p))
7143 {
7144 varop = XEXP (varop, 0);
7145 continue;
7146 }
7147 break;
7148
7149 case MINUS:
7150 /* If we have (xshiftrt (minus (ashiftrt X C)) X) C)
7151 with C the size of VAROP - 1 and the shift is logical if
7152 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
7153 we have a (gt X 0) operation. If the shift is arithmetic with
7154 STORE_FLAG_VALUE of 1 or logical with STORE_FLAG_VALUE == -1,
7155 we have a (neg (gt X 0)) operation. */
7156
7157 if (GET_CODE (XEXP (varop, 0)) == ASHIFTRT
7158 && count == GET_MODE_BITSIZE (GET_MODE (varop)) - 1
7159 && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
7160 && (code == LSHIFTRT || code == ASHIFTRT)
7161 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
7162 && INTVAL (XEXP (XEXP (varop, 0), 1)) == count
7163 && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
7164 {
7165 count = 0;
7166 varop = gen_rtx_combine (GT, GET_MODE (varop), XEXP (varop, 1),
7167 const0_rtx);
7168
7169 if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
7170 varop = gen_rtx_combine (NEG, GET_MODE (varop), varop);
7171
7172 continue;
7173 }
7174 break;
7175 }
7176
7177 break;
7178 }
7179
7180 /* We need to determine what mode to do the shift in. If the shift is
7181 a ASHIFTRT or ROTATE, we must always do it in the mode it was originally
7182 done in. Otherwise, we can do it in MODE, the widest mode encountered.
7183 The code we care about is that of the shift that will actually be done,
7184 not the shift that was originally requested. */
7185 shift_mode = (code == ASHIFTRT || code == ROTATE ? result_mode : mode);
7186
7187 /* We have now finished analyzing the shift. The result should be
7188 a shift of type CODE with SHIFT_MODE shifting VAROP COUNT places. If
7189 OUTER_OP is non-NIL, it is an operation that needs to be applied
7190 to the result of the shift. OUTER_CONST is the relevant constant,
7191 but we must turn off all bits turned off in the shift.
7192
7193 If we were passed a value for X, see if we can use any pieces of
7194 it. If not, make new rtx. */
7195
7196 if (x && GET_RTX_CLASS (GET_CODE (x)) == '2'
7197 && GET_CODE (XEXP (x, 1)) == CONST_INT
7198 && INTVAL (XEXP (x, 1)) == count)
7199 const_rtx = XEXP (x, 1);
7200 else
7201 const_rtx = GEN_INT (count);
7202
7203 if (x && GET_CODE (XEXP (x, 0)) == SUBREG
7204 && GET_MODE (XEXP (x, 0)) == shift_mode
7205 && SUBREG_REG (XEXP (x, 0)) == varop)
7206 varop = XEXP (x, 0);
7207 else if (GET_MODE (varop) != shift_mode)
7208 varop = gen_lowpart_for_combine (shift_mode, varop);
7209
7210 /* If we can't make the SUBREG, try to return what we were given. */
7211 if (GET_CODE (varop) == CLOBBER)
7212 return x ? x : varop;
7213
7214 new = simplify_binary_operation (code, shift_mode, varop, const_rtx);
7215 if (new != 0)
7216 x = new;
7217 else
7218 {
7219 if (x == 0 || GET_CODE (x) != code || GET_MODE (x) != shift_mode)
7220 x = gen_rtx_combine (code, shift_mode, varop, const_rtx);
7221
7222 SUBST (XEXP (x, 0), varop);
7223 SUBST (XEXP (x, 1), const_rtx);
7224 }
7225
7226 /* If we were doing a LSHIFTRT in a wider mode than it was originally,
7227 turn off all the bits that the shift would have turned off. */
7228 if (orig_code == LSHIFTRT && result_mode != shift_mode)
7229 x = simplify_and_const_int (NULL_RTX, shift_mode, x,
7230 GET_MODE_MASK (result_mode) >> orig_count);
7231
7232 /* Do the remainder of the processing in RESULT_MODE. */
7233 x = gen_lowpart_for_combine (result_mode, x);
7234
7235 /* If COMPLEMENT_P is set, we have to complement X before doing the outer
7236 operation. */
7237 if (complement_p)
7238 x = gen_unary (NOT, result_mode, x);
7239
7240 if (outer_op != NIL)
7241 {
7242 if (GET_MODE_BITSIZE (result_mode) < HOST_BITS_PER_WIDE_INT)
7243 outer_const &= GET_MODE_MASK (result_mode);
7244
7245 if (outer_op == AND)
7246 x = simplify_and_const_int (NULL_RTX, result_mode, x, outer_const);
7247 else if (outer_op == SET)
7248 /* This means that we have determined that the result is
7249 equivalent to a constant. This should be rare. */
7250 x = GEN_INT (outer_const);
7251 else if (GET_RTX_CLASS (outer_op) == '1')
7252 x = gen_unary (outer_op, result_mode, x);
7253 else
7254 x = gen_binary (outer_op, result_mode, x, GEN_INT (outer_const));
7255 }
7256
7257 return x;
7258 }
7259 \f
7260 /* Like recog, but we receive the address of a pointer to a new pattern.
7261 We try to match the rtx that the pointer points to.
7262 If that fails, we may try to modify or replace the pattern,
7263 storing the replacement into the same pointer object.
7264
7265 Modifications include deletion or addition of CLOBBERs.
7266
7267 PNOTES is a pointer to a location where any REG_UNUSED notes added for
7268 the CLOBBERs are placed.
7269
7270 The value is the final insn code from the pattern ultimately matched,
7271 or -1. */
7272
7273 static int
7274 recog_for_combine (pnewpat, insn, pnotes)
7275 rtx *pnewpat;
7276 rtx insn;
7277 rtx *pnotes;
7278 {
7279 register rtx pat = *pnewpat;
7280 int insn_code_number;
7281 int num_clobbers_to_add = 0;
7282 int i;
7283 rtx notes = 0;
7284
7285 /* Is the result of combination a valid instruction? */
7286 insn_code_number = recog (pat, insn, &num_clobbers_to_add);
7287
7288 /* If it isn't, there is the possibility that we previously had an insn
7289 that clobbered some register as a side effect, but the combined
7290 insn doesn't need to do that. So try once more without the clobbers
7291 unless this represents an ASM insn. */
7292
7293 if (insn_code_number < 0 && ! check_asm_operands (pat)
7294 && GET_CODE (pat) == PARALLEL)
7295 {
7296 int pos;
7297
7298 for (pos = 0, i = 0; i < XVECLEN (pat, 0); i++)
7299 if (GET_CODE (XVECEXP (pat, 0, i)) != CLOBBER)
7300 {
7301 if (i != pos)
7302 SUBST (XVECEXP (pat, 0, pos), XVECEXP (pat, 0, i));
7303 pos++;
7304 }
7305
7306 SUBST_INT (XVECLEN (pat, 0), pos);
7307
7308 if (pos == 1)
7309 pat = XVECEXP (pat, 0, 0);
7310
7311 insn_code_number = recog (pat, insn, &num_clobbers_to_add);
7312 }
7313
7314 /* If we had any clobbers to add, make a new pattern than contains
7315 them. Then check to make sure that all of them are dead. */
7316 if (num_clobbers_to_add)
7317 {
7318 rtx newpat = gen_rtx (PARALLEL, VOIDmode,
7319 gen_rtvec (GET_CODE (pat) == PARALLEL
7320 ? XVECLEN (pat, 0) + num_clobbers_to_add
7321 : num_clobbers_to_add + 1));
7322
7323 if (GET_CODE (pat) == PARALLEL)
7324 for (i = 0; i < XVECLEN (pat, 0); i++)
7325 XVECEXP (newpat, 0, i) = XVECEXP (pat, 0, i);
7326 else
7327 XVECEXP (newpat, 0, 0) = pat;
7328
7329 add_clobbers (newpat, insn_code_number);
7330
7331 for (i = XVECLEN (newpat, 0) - num_clobbers_to_add;
7332 i < XVECLEN (newpat, 0); i++)
7333 {
7334 if (GET_CODE (XEXP (XVECEXP (newpat, 0, i), 0)) == REG
7335 && ! reg_dead_at_p (XEXP (XVECEXP (newpat, 0, i), 0), insn))
7336 return -1;
7337 notes = gen_rtx (EXPR_LIST, REG_UNUSED,
7338 XEXP (XVECEXP (newpat, 0, i), 0), notes);
7339 }
7340 pat = newpat;
7341 }
7342
7343 *pnewpat = pat;
7344 *pnotes = notes;
7345
7346 return insn_code_number;
7347 }
7348 \f
7349 /* Like gen_lowpart but for use by combine. In combine it is not possible
7350 to create any new pseudoregs. However, it is safe to create
7351 invalid memory addresses, because combine will try to recognize
7352 them and all they will do is make the combine attempt fail.
7353
7354 If for some reason this cannot do its job, an rtx
7355 (clobber (const_int 0)) is returned.
7356 An insn containing that will not be recognized. */
7357
7358 #undef gen_lowpart
7359
7360 static rtx
7361 gen_lowpart_for_combine (mode, x)
7362 enum machine_mode mode;
7363 register rtx x;
7364 {
7365 rtx result;
7366
7367 if (GET_MODE (x) == mode)
7368 return x;
7369
7370 if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
7371 return gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
7372
7373 /* X might be a paradoxical (subreg (mem)). In that case, gen_lowpart
7374 won't know what to do. So we will strip off the SUBREG here and
7375 process normally. */
7376 if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
7377 {
7378 x = SUBREG_REG (x);
7379 if (GET_MODE (x) == mode)
7380 return x;
7381 }
7382
7383 result = gen_lowpart_common (mode, x);
7384 if (result)
7385 return result;
7386
7387 if (GET_CODE (x) == MEM)
7388 {
7389 register int offset = 0;
7390 rtx new;
7391
7392 /* Refuse to work on a volatile memory ref or one with a mode-dependent
7393 address. */
7394 if (MEM_VOLATILE_P (x) || mode_dependent_address_p (XEXP (x, 0)))
7395 return gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
7396
7397 /* If we want to refer to something bigger than the original memref,
7398 generate a perverse subreg instead. That will force a reload
7399 of the original memref X. */
7400 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode))
7401 return gen_rtx (SUBREG, mode, x, 0);
7402
7403 #if WORDS_BIG_ENDIAN
7404 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
7405 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
7406 #endif
7407 #if BYTES_BIG_ENDIAN
7408 /* Adjust the address so that the address-after-the-data
7409 is unchanged. */
7410 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
7411 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
7412 #endif
7413 new = gen_rtx (MEM, mode, plus_constant (XEXP (x, 0), offset));
7414 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
7415 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
7416 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
7417 return new;
7418 }
7419
7420 /* If X is a comparison operator, rewrite it in a new mode. This
7421 probably won't match, but may allow further simplifications. */
7422 else if (GET_RTX_CLASS (GET_CODE (x)) == '<')
7423 return gen_rtx_combine (GET_CODE (x), mode, XEXP (x, 0), XEXP (x, 1));
7424
7425 /* If we couldn't simplify X any other way, just enclose it in a
7426 SUBREG. Normally, this SUBREG won't match, but some patterns may
7427 include an explicit SUBREG or we may simplify it further in combine. */
7428 else
7429 {
7430 int word = 0;
7431
7432 if (WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD)
7433 word = ((GET_MODE_SIZE (GET_MODE (x))
7434 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD))
7435 / UNITS_PER_WORD);
7436 return gen_rtx (SUBREG, mode, x, word);
7437 }
7438 }
7439 \f
7440 /* Make an rtx expression. This is a subset of gen_rtx and only supports
7441 expressions of 1, 2, or 3 operands, each of which are rtx expressions.
7442
7443 If the identical expression was previously in the insn (in the undobuf),
7444 it will be returned. Only if it is not found will a new expression
7445 be made. */
7446
7447 /*VARARGS2*/
7448 static rtx
7449 gen_rtx_combine (va_alist)
7450 va_dcl
7451 {
7452 va_list p;
7453 enum rtx_code code;
7454 enum machine_mode mode;
7455 int n_args;
7456 rtx args[3];
7457 int i, j;
7458 char *fmt;
7459 rtx rt;
7460
7461 va_start (p);
7462 code = va_arg (p, enum rtx_code);
7463 mode = va_arg (p, enum machine_mode);
7464 n_args = GET_RTX_LENGTH (code);
7465 fmt = GET_RTX_FORMAT (code);
7466
7467 if (n_args == 0 || n_args > 3)
7468 abort ();
7469
7470 /* Get each arg and verify that it is supposed to be an expression. */
7471 for (j = 0; j < n_args; j++)
7472 {
7473 if (*fmt++ != 'e')
7474 abort ();
7475
7476 args[j] = va_arg (p, rtx);
7477 }
7478
7479 /* See if this is in undobuf. Be sure we don't use objects that came
7480 from another insn; this could produce circular rtl structures. */
7481
7482 for (i = previous_num_undos; i < undobuf.num_undo; i++)
7483 if (!undobuf.undo[i].is_int
7484 && GET_CODE (undobuf.undo[i].old_contents.rtx) == code
7485 && GET_MODE (undobuf.undo[i].old_contents.rtx) == mode)
7486 {
7487 for (j = 0; j < n_args; j++)
7488 if (XEXP (undobuf.undo[i].old_contents.rtx, j) != args[j])
7489 break;
7490
7491 if (j == n_args)
7492 return undobuf.undo[i].old_contents.rtx;
7493 }
7494
7495 /* Otherwise make a new rtx. We know we have 1, 2, or 3 args.
7496 Use rtx_alloc instead of gen_rtx because it's faster on RISC. */
7497 rt = rtx_alloc (code);
7498 PUT_MODE (rt, mode);
7499 XEXP (rt, 0) = args[0];
7500 if (n_args > 1)
7501 {
7502 XEXP (rt, 1) = args[1];
7503 if (n_args > 2)
7504 XEXP (rt, 2) = args[2];
7505 }
7506 return rt;
7507 }
7508
7509 /* These routines make binary and unary operations by first seeing if they
7510 fold; if not, a new expression is allocated. */
7511
7512 static rtx
7513 gen_binary (code, mode, op0, op1)
7514 enum rtx_code code;
7515 enum machine_mode mode;
7516 rtx op0, op1;
7517 {
7518 rtx result;
7519 rtx tem;
7520
7521 if (GET_RTX_CLASS (code) == 'c'
7522 && (GET_CODE (op0) == CONST_INT
7523 || (CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)))
7524 tem = op0, op0 = op1, op1 = tem;
7525
7526 if (GET_RTX_CLASS (code) == '<')
7527 {
7528 enum machine_mode op_mode = GET_MODE (op0);
7529 if (op_mode == VOIDmode)
7530 op_mode = GET_MODE (op1);
7531 result = simplify_relational_operation (code, op_mode, op0, op1);
7532 }
7533 else
7534 result = simplify_binary_operation (code, mode, op0, op1);
7535
7536 if (result)
7537 return result;
7538
7539 /* Put complex operands first and constants second. */
7540 if (GET_RTX_CLASS (code) == 'c'
7541 && ((CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)
7542 || (GET_RTX_CLASS (GET_CODE (op0)) == 'o'
7543 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')
7544 || (GET_CODE (op0) == SUBREG
7545 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op0))) == 'o'
7546 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')))
7547 return gen_rtx_combine (code, mode, op1, op0);
7548
7549 return gen_rtx_combine (code, mode, op0, op1);
7550 }
7551
7552 static rtx
7553 gen_unary (code, mode, op0)
7554 enum rtx_code code;
7555 enum machine_mode mode;
7556 rtx op0;
7557 {
7558 rtx result = simplify_unary_operation (code, mode, op0, mode);
7559
7560 if (result)
7561 return result;
7562
7563 return gen_rtx_combine (code, mode, op0);
7564 }
7565 \f
7566 /* Simplify a comparison between *POP0 and *POP1 where CODE is the
7567 comparison code that will be tested.
7568
7569 The result is a possibly different comparison code to use. *POP0 and
7570 *POP1 may be updated.
7571
7572 It is possible that we might detect that a comparison is either always
7573 true or always false. However, we do not perform general constant
7574 folding in combine, so this knowledge isn't useful. Such tautologies
7575 should have been detected earlier. Hence we ignore all such cases. */
7576
7577 static enum rtx_code
7578 simplify_comparison (code, pop0, pop1)
7579 enum rtx_code code;
7580 rtx *pop0;
7581 rtx *pop1;
7582 {
7583 rtx op0 = *pop0;
7584 rtx op1 = *pop1;
7585 rtx tem, tem1;
7586 int i;
7587 enum machine_mode mode, tmode;
7588
7589 /* Try a few ways of applying the same transformation to both operands. */
7590 while (1)
7591 {
7592 /* If both operands are the same constant shift, see if we can ignore the
7593 shift. We can if the shift is a rotate or if the bits shifted out of
7594 this shift are not significant for either input and if the type of
7595 comparison is compatible with the shift. */
7596 if (GET_CODE (op0) == GET_CODE (op1)
7597 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
7598 && ((GET_CODE (op0) == ROTATE && (code == NE || code == EQ))
7599 || ((GET_CODE (op0) == LSHIFTRT
7600 || GET_CODE (op0) == ASHIFT || GET_CODE (op0) == LSHIFT)
7601 && (code != GT && code != LT && code != GE && code != LE))
7602 || (GET_CODE (op0) == ASHIFTRT
7603 && (code != GTU && code != LTU
7604 && code != GEU && code != GEU)))
7605 && GET_CODE (XEXP (op0, 1)) == CONST_INT
7606 && INTVAL (XEXP (op0, 1)) >= 0
7607 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
7608 && XEXP (op0, 1) == XEXP (op1, 1))
7609 {
7610 enum machine_mode mode = GET_MODE (op0);
7611 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
7612 int shift_count = INTVAL (XEXP (op0, 1));
7613
7614 if (GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFTRT)
7615 mask &= (mask >> shift_count) << shift_count;
7616 else if (GET_CODE (op0) == ASHIFT || GET_CODE (op0) == LSHIFT)
7617 mask = (mask & (mask << shift_count)) >> shift_count;
7618
7619 if ((significant_bits (XEXP (op0, 0), mode) & ~ mask) == 0
7620 && (significant_bits (XEXP (op1, 0), mode) & ~ mask) == 0)
7621 op0 = XEXP (op0, 0), op1 = XEXP (op1, 0);
7622 else
7623 break;
7624 }
7625
7626 /* If both operands are AND's of a paradoxical SUBREG by constant, the
7627 SUBREGs are of the same mode, and, in both cases, the AND would
7628 be redundant if the comparison was done in the narrower mode,
7629 do the comparison in the narrower mode (e.g., we are AND'ing with 1
7630 and the operand's significant bits are 0xffffff01; in that case if
7631 we only care about QImode, we don't need the AND). This case occurs
7632 if the output mode of an scc insn is not SImode and
7633 STORE_FLAG_VALUE == 1 (e.g., the 386). */
7634
7635 else if (GET_CODE (op0) == AND && GET_CODE (op1) == AND
7636 && GET_CODE (XEXP (op0, 1)) == CONST_INT
7637 && GET_CODE (XEXP (op1, 1)) == CONST_INT
7638 && GET_CODE (XEXP (op0, 0)) == SUBREG
7639 && GET_CODE (XEXP (op1, 0)) == SUBREG
7640 && (GET_MODE_SIZE (GET_MODE (XEXP (op0, 0)))
7641 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (op0, 0)))))
7642 && (GET_MODE (SUBREG_REG (XEXP (op0, 0)))
7643 == GET_MODE (SUBREG_REG (XEXP (op1, 0))))
7644 && (significant_bits (SUBREG_REG (XEXP (op0, 0)),
7645 GET_MODE (SUBREG_REG (XEXP (op0, 0))))
7646 & ~ INTVAL (XEXP (op0, 1))) == 0
7647 && (significant_bits (SUBREG_REG (XEXP (op1, 0)),
7648 GET_MODE (SUBREG_REG (XEXP (op1, 0))))
7649 & ~ INTVAL (XEXP (op1, 1))) == 0)
7650 {
7651 op0 = SUBREG_REG (XEXP (op0, 0));
7652 op1 = SUBREG_REG (XEXP (op1, 0));
7653
7654 /* the resulting comparison is always unsigned since we masked off
7655 the original sign bit. */
7656 code = unsigned_condition (code);
7657 }
7658 else
7659 break;
7660 }
7661
7662 /* If the first operand is a constant, swap the operands and adjust the
7663 comparison code appropriately. */
7664 if (CONSTANT_P (op0))
7665 {
7666 tem = op0, op0 = op1, op1 = tem;
7667 code = swap_condition (code);
7668 }
7669
7670 /* We now enter a loop during which we will try to simplify the comparison.
7671 For the most part, we only are concerned with comparisons with zero,
7672 but some things may really be comparisons with zero but not start
7673 out looking that way. */
7674
7675 while (GET_CODE (op1) == CONST_INT)
7676 {
7677 enum machine_mode mode = GET_MODE (op0);
7678 int mode_width = GET_MODE_BITSIZE (mode);
7679 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
7680 int equality_comparison_p;
7681 int sign_bit_comparison_p;
7682 int unsigned_comparison_p;
7683 HOST_WIDE_INT const_op;
7684
7685 /* We only want to handle integral modes. This catches VOIDmode,
7686 CCmode, and the floating-point modes. An exception is that we
7687 can handle VOIDmode if OP0 is a COMPARE or a comparison
7688 operation. */
7689
7690 if (GET_MODE_CLASS (mode) != MODE_INT
7691 && ! (mode == VOIDmode
7692 && (GET_CODE (op0) == COMPARE
7693 || GET_RTX_CLASS (GET_CODE (op0)) == '<')))
7694 break;
7695
7696 /* Get the constant we are comparing against and turn off all bits
7697 not on in our mode. */
7698 const_op = INTVAL (op1);
7699 if (mode_width <= HOST_BITS_PER_WIDE_INT)
7700 const_op &= mask;
7701
7702 /* If we are comparing against a constant power of two and the value
7703 being compared has only that single significant bit (e.g., it was
7704 `and'ed with that bit), we can replace this with a comparison
7705 with zero. */
7706 if (const_op
7707 && (code == EQ || code == NE || code == GE || code == GEU
7708 || code == LT || code == LTU)
7709 && mode_width <= HOST_BITS_PER_WIDE_INT
7710 && exact_log2 (const_op) >= 0
7711 && significant_bits (op0, mode) == const_op)
7712 {
7713 code = (code == EQ || code == GE || code == GEU ? NE : EQ);
7714 op1 = const0_rtx, const_op = 0;
7715 }
7716
7717 /* Similarly, if we are comparing a value known to be either -1 or
7718 0 with -1, change it to the opposite comparison against zero. */
7719
7720 if (const_op == -1
7721 && (code == EQ || code == NE || code == GT || code == LE
7722 || code == GEU || code == LTU)
7723 && num_sign_bit_copies (op0, mode) == mode_width)
7724 {
7725 code = (code == EQ || code == LE || code == GEU ? NE : EQ);
7726 op1 = const0_rtx, const_op = 0;
7727 }
7728
7729 /* Do some canonicalizations based on the comparison code. We prefer
7730 comparisons against zero and then prefer equality comparisons.
7731 If we can reduce the size of a constant, we will do that too. */
7732
7733 switch (code)
7734 {
7735 case LT:
7736 /* < C is equivalent to <= (C - 1) */
7737 if (const_op > 0)
7738 {
7739 const_op -= 1;
7740 op1 = GEN_INT (const_op);
7741 code = LE;
7742 /* ... fall through to LE case below. */
7743 }
7744 else
7745 break;
7746
7747 case LE:
7748 /* <= C is equivalent to < (C + 1); we do this for C < 0 */
7749 if (const_op < 0)
7750 {
7751 const_op += 1;
7752 op1 = GEN_INT (const_op);
7753 code = LT;
7754 }
7755
7756 /* If we are doing a <= 0 comparison on a value known to have
7757 a zero sign bit, we can replace this with == 0. */
7758 else if (const_op == 0
7759 && mode_width <= HOST_BITS_PER_WIDE_INT
7760 && (significant_bits (op0, mode)
7761 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
7762 code = EQ;
7763 break;
7764
7765 case GE:
7766 /* >= C is equivalent to > (C - 1). */
7767 if (const_op > 0)
7768 {
7769 const_op -= 1;
7770 op1 = GEN_INT (const_op);
7771 code = GT;
7772 /* ... fall through to GT below. */
7773 }
7774 else
7775 break;
7776
7777 case GT:
7778 /* > C is equivalent to >= (C + 1); we do this for C < 0*/
7779 if (const_op < 0)
7780 {
7781 const_op += 1;
7782 op1 = GEN_INT (const_op);
7783 code = GE;
7784 }
7785
7786 /* If we are doing a > 0 comparison on a value known to have
7787 a zero sign bit, we can replace this with != 0. */
7788 else if (const_op == 0
7789 && mode_width <= HOST_BITS_PER_WIDE_INT
7790 && (significant_bits (op0, mode)
7791 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
7792 code = NE;
7793 break;
7794
7795 case LTU:
7796 /* < C is equivalent to <= (C - 1). */
7797 if (const_op > 0)
7798 {
7799 const_op -= 1;
7800 op1 = GEN_INT (const_op);
7801 code = LEU;
7802 /* ... fall through ... */
7803 }
7804
7805 /* (unsigned) < 0x80000000 is equivalent to >= 0. */
7806 else if (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1))
7807 {
7808 const_op = 0, op1 = const0_rtx;
7809 code = GE;
7810 break;
7811 }
7812 else
7813 break;
7814
7815 case LEU:
7816 /* unsigned <= 0 is equivalent to == 0 */
7817 if (const_op == 0)
7818 code = EQ;
7819
7820 /* (unsigned) <= 0x7fffffff is equivalent to >= 0. */
7821 else if (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1)
7822 {
7823 const_op = 0, op1 = const0_rtx;
7824 code = GE;
7825 }
7826 break;
7827
7828 case GEU:
7829 /* >= C is equivalent to < (C - 1). */
7830 if (const_op > 1)
7831 {
7832 const_op -= 1;
7833 op1 = GEN_INT (const_op);
7834 code = GTU;
7835 /* ... fall through ... */
7836 }
7837
7838 /* (unsigned) >= 0x80000000 is equivalent to < 0. */
7839 else if (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1))
7840 {
7841 const_op = 0, op1 = const0_rtx;
7842 code = LT;
7843 }
7844 else
7845 break;
7846
7847 case GTU:
7848 /* unsigned > 0 is equivalent to != 0 */
7849 if (const_op == 0)
7850 code = NE;
7851
7852 /* (unsigned) > 0x7fffffff is equivalent to < 0. */
7853 else if (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1)
7854 {
7855 const_op = 0, op1 = const0_rtx;
7856 code = LT;
7857 }
7858 break;
7859 }
7860
7861 /* Compute some predicates to simplify code below. */
7862
7863 equality_comparison_p = (code == EQ || code == NE);
7864 sign_bit_comparison_p = ((code == LT || code == GE) && const_op == 0);
7865 unsigned_comparison_p = (code == LTU || code == LEU || code == GTU
7866 || code == LEU);
7867
7868 /* Now try cases based on the opcode of OP0. If none of the cases
7869 does a "continue", we exit this loop immediately after the
7870 switch. */
7871
7872 switch (GET_CODE (op0))
7873 {
7874 case ZERO_EXTRACT:
7875 /* If we are extracting a single bit from a variable position in
7876 a constant that has only a single bit set and are comparing it
7877 with zero, we can convert this into an equality comparison
7878 between the position and the location of the single bit. We can't
7879 do this if bit endian and we don't have an extzv since we then
7880 can't know what mode to use for the endianness adjustment. */
7881
7882 #if ! BITS_BIG_ENDIAN || defined (HAVE_extzv)
7883 if (GET_CODE (XEXP (op0, 0)) == CONST_INT
7884 && XEXP (op0, 1) == const1_rtx
7885 && equality_comparison_p && const_op == 0
7886 && (i = exact_log2 (INTVAL (XEXP (op0, 0)))) >= 0)
7887 {
7888 #if BITS_BIG_ENDIAN
7889 i = (GET_MODE_BITSIZE
7890 (insn_operand_mode[(int) CODE_FOR_extzv][1]) - 1 - i);
7891 #endif
7892
7893 op0 = XEXP (op0, 2);
7894 op1 = GEN_INT (i);
7895 const_op = i;
7896
7897 /* Result is nonzero iff shift count is equal to I. */
7898 code = reverse_condition (code);
7899 continue;
7900 }
7901 #endif
7902
7903 /* ... fall through ... */
7904
7905 case SIGN_EXTRACT:
7906 tem = expand_compound_operation (op0);
7907 if (tem != op0)
7908 {
7909 op0 = tem;
7910 continue;
7911 }
7912 break;
7913
7914 case NOT:
7915 /* If testing for equality, we can take the NOT of the constant. */
7916 if (equality_comparison_p
7917 && (tem = simplify_unary_operation (NOT, mode, op1, mode)) != 0)
7918 {
7919 op0 = XEXP (op0, 0);
7920 op1 = tem;
7921 continue;
7922 }
7923
7924 /* If just looking at the sign bit, reverse the sense of the
7925 comparison. */
7926 if (sign_bit_comparison_p)
7927 {
7928 op0 = XEXP (op0, 0);
7929 code = (code == GE ? LT : GE);
7930 continue;
7931 }
7932 break;
7933
7934 case NEG:
7935 /* If testing for equality, we can take the NEG of the constant. */
7936 if (equality_comparison_p
7937 && (tem = simplify_unary_operation (NEG, mode, op1, mode)) != 0)
7938 {
7939 op0 = XEXP (op0, 0);
7940 op1 = tem;
7941 continue;
7942 }
7943
7944 /* The remaining cases only apply to comparisons with zero. */
7945 if (const_op != 0)
7946 break;
7947
7948 /* When X is ABS or is known positive,
7949 (neg X) is < 0 if and only if X != 0. */
7950
7951 if (sign_bit_comparison_p
7952 && (GET_CODE (XEXP (op0, 0)) == ABS
7953 || (mode_width <= HOST_BITS_PER_WIDE_INT
7954 && (significant_bits (XEXP (op0, 0), mode)
7955 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)))
7956 {
7957 op0 = XEXP (op0, 0);
7958 code = (code == LT ? NE : EQ);
7959 continue;
7960 }
7961
7962 /* If we have NEG of something that is the result of a
7963 SIGN_EXTEND, SIGN_EXTRACT, or ASHIFTRT, we know that the
7964 two high-order bits must be the same and hence that
7965 "(-a) < 0" is equivalent to "a > 0". Otherwise, we can't
7966 do this. */
7967 if (GET_CODE (XEXP (op0, 0)) == SIGN_EXTEND
7968 || (GET_CODE (XEXP (op0, 0)) == SIGN_EXTRACT
7969 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
7970 && (INTVAL (XEXP (XEXP (op0, 0), 1))
7971 < GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (op0, 0), 0)))))
7972 || (GET_CODE (XEXP (op0, 0)) == ASHIFTRT
7973 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
7974 && XEXP (XEXP (op0, 0), 1) != const0_rtx)
7975 || ((tem = get_last_value (XEXP (op0, 0))) != 0
7976 && (GET_CODE (tem) == SIGN_EXTEND
7977 || (GET_CODE (tem) == SIGN_EXTRACT
7978 && GET_CODE (XEXP (tem, 1)) == CONST_INT
7979 && (INTVAL (XEXP (tem, 1))
7980 < GET_MODE_BITSIZE (GET_MODE (XEXP (tem, 0)))))
7981 || (GET_CODE (tem) == ASHIFTRT
7982 && GET_CODE (XEXP (tem, 1)) == CONST_INT
7983 && XEXP (tem, 1) != const0_rtx))))
7984 {
7985 op0 = XEXP (op0, 0);
7986 code = swap_condition (code);
7987 continue;
7988 }
7989 break;
7990
7991 case ROTATE:
7992 /* If we are testing equality and our count is a constant, we
7993 can perform the inverse operation on our RHS. */
7994 if (equality_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
7995 && (tem = simplify_binary_operation (ROTATERT, mode,
7996 op1, XEXP (op0, 1))) != 0)
7997 {
7998 op0 = XEXP (op0, 0);
7999 op1 = tem;
8000 continue;
8001 }
8002
8003 /* If we are doing a < 0 or >= 0 comparison, it means we are testing
8004 a particular bit. Convert it to an AND of a constant of that
8005 bit. This will be converted into a ZERO_EXTRACT. */
8006 if (const_op == 0 && sign_bit_comparison_p
8007 && GET_CODE (XEXP (op0, 1)) == CONST_INT
8008 && mode_width <= HOST_BITS_PER_WIDE_INT)
8009 {
8010 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
8011 ((HOST_WIDE_INT) 1
8012 << (mode_width - 1
8013 - INTVAL (XEXP (op0, 1)))));
8014 code = (code == LT ? NE : EQ);
8015 continue;
8016 }
8017
8018 /* ... fall through ... */
8019
8020 case ABS:
8021 /* ABS is ignorable inside an equality comparison with zero. */
8022 if (const_op == 0 && equality_comparison_p)
8023 {
8024 op0 = XEXP (op0, 0);
8025 continue;
8026 }
8027 break;
8028
8029
8030 case SIGN_EXTEND:
8031 /* Can simplify (compare (zero/sign_extend FOO) CONST)
8032 to (compare FOO CONST) if CONST fits in FOO's mode and we
8033 are either testing inequality or have an unsigned comparison
8034 with ZERO_EXTEND or a signed comparison with SIGN_EXTEND. */
8035 if (! unsigned_comparison_p
8036 && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
8037 <= HOST_BITS_PER_WIDE_INT)
8038 && ((unsigned HOST_WIDE_INT) const_op
8039 < (((HOST_WIDE_INT) 1
8040 << (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0))) - 1)))))
8041 {
8042 op0 = XEXP (op0, 0);
8043 continue;
8044 }
8045 break;
8046
8047 case SUBREG:
8048 /* Check for the case where we are comparing A - C1 with C2,
8049 both constants are smaller than 1/2 the maxium positive
8050 value in MODE, and the comparison is equality or unsigned.
8051 In that case, if A is either zero-extended to MODE or has
8052 sufficient sign bits so that the high-order bit in MODE
8053 is a copy of the sign in the inner mode, we can prove that it is
8054 safe to do the operation in the wider mode. This simplifies
8055 many range checks. */
8056
8057 if (mode_width <= HOST_BITS_PER_WIDE_INT
8058 && subreg_lowpart_p (op0)
8059 && GET_CODE (SUBREG_REG (op0)) == PLUS
8060 && GET_CODE (XEXP (SUBREG_REG (op0), 1)) == CONST_INT
8061 && INTVAL (XEXP (SUBREG_REG (op0), 1)) < 0
8062 && (- INTVAL (XEXP (SUBREG_REG (op0), 1))
8063 < GET_MODE_MASK (mode) / 2)
8064 && (unsigned) const_op < GET_MODE_MASK (mode) / 2
8065 && (0 == (significant_bits (XEXP (SUBREG_REG (op0), 0),
8066 GET_MODE (SUBREG_REG (op0)))
8067 & ~ GET_MODE_MASK (mode))
8068 || (num_sign_bit_copies (XEXP (SUBREG_REG (op0), 0),
8069 GET_MODE (SUBREG_REG (op0)))
8070 > (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
8071 - GET_MODE_BITSIZE (mode)))))
8072 {
8073 op0 = SUBREG_REG (op0);
8074 continue;
8075 }
8076
8077 /* If the inner mode is narrower and we are extracting the low part,
8078 we can treat the SUBREG as if it were a ZERO_EXTEND. */
8079 if (subreg_lowpart_p (op0)
8080 && GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) < mode_width)
8081 /* Fall through */ ;
8082 else
8083 break;
8084
8085 /* ... fall through ... */
8086
8087 case ZERO_EXTEND:
8088 if ((unsigned_comparison_p || equality_comparison_p)
8089 && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
8090 <= HOST_BITS_PER_WIDE_INT)
8091 && ((unsigned HOST_WIDE_INT) const_op
8092 < GET_MODE_MASK (GET_MODE (XEXP (op0, 0)))))
8093 {
8094 op0 = XEXP (op0, 0);
8095 continue;
8096 }
8097 break;
8098
8099 case PLUS:
8100 /* (eq (plus X C1) C2) -> (eq X (minus C2 C1)). We can only do
8101 this for equality comparisons due to pathological cases involving
8102 overflows. */
8103 if (equality_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
8104 && (tem = simplify_binary_operation (MINUS, mode, op1,
8105 XEXP (op0, 1))) != 0)
8106 {
8107 op0 = XEXP (op0, 0);
8108 op1 = tem;
8109 continue;
8110 }
8111
8112 /* (plus (abs X) (const_int -1)) is < 0 if and only if X == 0. */
8113 if (const_op == 0 && XEXP (op0, 1) == constm1_rtx
8114 && GET_CODE (XEXP (op0, 0)) == ABS && sign_bit_comparison_p)
8115 {
8116 op0 = XEXP (XEXP (op0, 0), 0);
8117 code = (code == LT ? EQ : NE);
8118 continue;
8119 }
8120 break;
8121
8122 case MINUS:
8123 /* The sign bit of (minus (ashiftrt X C) X), where C is the number
8124 of bits in X minus 1, is one iff X > 0. */
8125 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == ASHIFTRT
8126 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
8127 && INTVAL (XEXP (XEXP (op0, 0), 1)) == mode_width - 1
8128 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
8129 {
8130 op0 = XEXP (op0, 1);
8131 code = (code == GE ? LE : GT);
8132 continue;
8133 }
8134 break;
8135
8136 case XOR:
8137 /* (eq (xor A B) C) -> (eq A (xor B C)). This is a simplification
8138 if C is zero or B is a constant. */
8139 if (equality_comparison_p
8140 && 0 != (tem = simplify_binary_operation (XOR, mode,
8141 XEXP (op0, 1), op1)))
8142 {
8143 op0 = XEXP (op0, 0);
8144 op1 = tem;
8145 continue;
8146 }
8147 break;
8148
8149 case EQ: case NE:
8150 case LT: case LTU: case LE: case LEU:
8151 case GT: case GTU: case GE: case GEU:
8152 /* We can't do anything if OP0 is a condition code value, rather
8153 than an actual data value. */
8154 if (const_op != 0
8155 #ifdef HAVE_cc0
8156 || XEXP (op0, 0) == cc0_rtx
8157 #endif
8158 || GET_MODE_CLASS (GET_MODE (XEXP (op0, 0))) == MODE_CC)
8159 break;
8160
8161 /* Get the two operands being compared. */
8162 if (GET_CODE (XEXP (op0, 0)) == COMPARE)
8163 tem = XEXP (XEXP (op0, 0), 0), tem1 = XEXP (XEXP (op0, 0), 1);
8164 else
8165 tem = XEXP (op0, 0), tem1 = XEXP (op0, 1);
8166
8167 /* Check for the cases where we simply want the result of the
8168 earlier test or the opposite of that result. */
8169 if (code == NE
8170 || (code == EQ && reversible_comparison_p (op0))
8171 || (GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
8172 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
8173 && (STORE_FLAG_VALUE
8174 & (((HOST_WIDE_INT) 1
8175 << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
8176 && (code == LT
8177 || (code == GE && reversible_comparison_p (op0)))))
8178 {
8179 code = (code == LT || code == NE
8180 ? GET_CODE (op0) : reverse_condition (GET_CODE (op0)));
8181 op0 = tem, op1 = tem1;
8182 continue;
8183 }
8184 break;
8185
8186 case IOR:
8187 /* The sign bit of (ior (plus X (const_int -1)) X) is non-zero
8188 iff X <= 0. */
8189 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == PLUS
8190 && XEXP (XEXP (op0, 0), 1) == constm1_rtx
8191 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
8192 {
8193 op0 = XEXP (op0, 1);
8194 code = (code == GE ? GT : LE);
8195 continue;
8196 }
8197 break;
8198
8199 case AND:
8200 /* Convert (and (xshift 1 X) Y) to (and (lshiftrt Y X) 1). This
8201 will be converted to a ZERO_EXTRACT later. */
8202 if (const_op == 0 && equality_comparison_p
8203 && (GET_CODE (XEXP (op0, 0)) == ASHIFT
8204 || GET_CODE (XEXP (op0, 0)) == LSHIFT)
8205 && XEXP (XEXP (op0, 0), 0) == const1_rtx)
8206 {
8207 op0 = simplify_and_const_int
8208 (op0, mode, gen_rtx_combine (LSHIFTRT, mode,
8209 XEXP (op0, 1),
8210 XEXP (XEXP (op0, 0), 1)),
8211 (HOST_WIDE_INT) 1);
8212 continue;
8213 }
8214
8215 /* If we are comparing (and (lshiftrt X C1) C2) for equality with
8216 zero and X is a comparison and C1 and C2 describe only bits set
8217 in STORE_FLAG_VALUE, we can compare with X. */
8218 if (const_op == 0 && equality_comparison_p
8219 && mode_width <= HOST_BITS_PER_WIDE_INT
8220 && GET_CODE (XEXP (op0, 1)) == CONST_INT
8221 && GET_CODE (XEXP (op0, 0)) == LSHIFTRT
8222 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
8223 && INTVAL (XEXP (XEXP (op0, 0), 1)) >= 0
8224 && INTVAL (XEXP (XEXP (op0, 0), 1)) < HOST_BITS_PER_WIDE_INT)
8225 {
8226 mask = ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
8227 << INTVAL (XEXP (XEXP (op0, 0), 1)));
8228 if ((~ STORE_FLAG_VALUE & mask) == 0
8229 && (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (op0, 0), 0))) == '<'
8230 || ((tem = get_last_value (XEXP (XEXP (op0, 0), 0))) != 0
8231 && GET_RTX_CLASS (GET_CODE (tem)) == '<')))
8232 {
8233 op0 = XEXP (XEXP (op0, 0), 0);
8234 continue;
8235 }
8236 }
8237
8238 /* If we are doing an equality comparison of an AND of a bit equal
8239 to the sign bit, replace this with a LT or GE comparison of
8240 the underlying value. */
8241 if (equality_comparison_p
8242 && const_op == 0
8243 && GET_CODE (XEXP (op0, 1)) == CONST_INT
8244 && mode_width <= HOST_BITS_PER_WIDE_INT
8245 && ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
8246 == (HOST_WIDE_INT) 1 << (mode_width - 1)))
8247 {
8248 op0 = XEXP (op0, 0);
8249 code = (code == EQ ? GE : LT);
8250 continue;
8251 }
8252
8253 /* If this AND operation is really a ZERO_EXTEND from a narrower
8254 mode, the constant fits within that mode, and this is either an
8255 equality or unsigned comparison, try to do this comparison in
8256 the narrower mode. */
8257 if ((equality_comparison_p || unsigned_comparison_p)
8258 && GET_CODE (XEXP (op0, 1)) == CONST_INT
8259 && (i = exact_log2 ((INTVAL (XEXP (op0, 1))
8260 & GET_MODE_MASK (mode))
8261 + 1)) >= 0
8262 && const_op >> i == 0
8263 && (tmode = mode_for_size (i, MODE_INT, 1)) != BLKmode)
8264 {
8265 op0 = gen_lowpart_for_combine (tmode, XEXP (op0, 0));
8266 continue;
8267 }
8268 break;
8269
8270 case ASHIFT:
8271 case LSHIFT:
8272 /* If we have (compare (xshift FOO N) (const_int C)) and
8273 the high order N bits of FOO (N+1 if an inequality comparison)
8274 are not significant, we can do this by comparing FOO with C
8275 shifted right N bits so long as the low-order N bits of C are
8276 zero. */
8277 if (GET_CODE (XEXP (op0, 1)) == CONST_INT
8278 && INTVAL (XEXP (op0, 1)) >= 0
8279 && ((INTVAL (XEXP (op0, 1)) + ! equality_comparison_p)
8280 < HOST_BITS_PER_WIDE_INT)
8281 && ((const_op
8282 & ((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1) == 0)
8283 && mode_width <= HOST_BITS_PER_WIDE_INT
8284 && (significant_bits (XEXP (op0, 0), mode)
8285 & ~ (mask >> (INTVAL (XEXP (op0, 1))
8286 + ! equality_comparison_p))) == 0)
8287 {
8288 const_op >>= INTVAL (XEXP (op0, 1));
8289 op1 = GEN_INT (const_op);
8290 op0 = XEXP (op0, 0);
8291 continue;
8292 }
8293
8294 /* If we are doing a sign bit comparison, it means we are testing
8295 a particular bit. Convert it to the appropriate AND. */
8296 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
8297 && mode_width <= HOST_BITS_PER_WIDE_INT)
8298 {
8299 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
8300 ((HOST_WIDE_INT) 1
8301 << (mode_width - 1
8302 - INTVAL (XEXP (op0, 1)))));
8303 code = (code == LT ? NE : EQ);
8304 continue;
8305 }
8306
8307 /* If this an equality comparison with zero and we are shifting
8308 the low bit to the sign bit, we can convert this to an AND of the
8309 low-order bit. */
8310 if (const_op == 0 && equality_comparison_p
8311 && GET_CODE (XEXP (op0, 1)) == CONST_INT
8312 && INTVAL (XEXP (op0, 1)) == mode_width - 1)
8313 {
8314 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
8315 (HOST_WIDE_INT) 1);
8316 continue;
8317 }
8318 break;
8319
8320 case ASHIFTRT:
8321 /* If this is an equality comparison with zero, we can do this
8322 as a logical shift, which might be much simpler. */
8323 if (equality_comparison_p && const_op == 0
8324 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
8325 {
8326 op0 = simplify_shift_const (NULL_RTX, LSHIFTRT, mode,
8327 XEXP (op0, 0),
8328 INTVAL (XEXP (op0, 1)));
8329 continue;
8330 }
8331
8332 /* If OP0 is a sign extension and CODE is not an unsigned comparison,
8333 do the comparison in a narrower mode. */
8334 if (! unsigned_comparison_p
8335 && GET_CODE (XEXP (op0, 1)) == CONST_INT
8336 && GET_CODE (XEXP (op0, 0)) == ASHIFT
8337 && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1)
8338 && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)),
8339 MODE_INT, 1)) != VOIDmode
8340 && ((unsigned HOST_WIDE_INT) const_op <= GET_MODE_MASK (tmode)
8341 || ((unsigned HOST_WIDE_INT) - const_op
8342 <= GET_MODE_MASK (tmode))))
8343 {
8344 op0 = gen_lowpart_for_combine (tmode, XEXP (XEXP (op0, 0), 0));
8345 continue;
8346 }
8347
8348 /* ... fall through ... */
8349 case LSHIFTRT:
8350 /* If we have (compare (xshiftrt FOO N) (const_int C)) and
8351 the low order N bits of FOO are not significant, we can do this
8352 by comparing FOO with C shifted left N bits so long as no
8353 overflow occurs. */
8354 if (GET_CODE (XEXP (op0, 1)) == CONST_INT
8355 && INTVAL (XEXP (op0, 1)) >= 0
8356 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
8357 && mode_width <= HOST_BITS_PER_WIDE_INT
8358 && (significant_bits (XEXP (op0, 0), mode)
8359 & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0
8360 && (const_op == 0
8361 || (floor_log2 (const_op) + INTVAL (XEXP (op0, 1))
8362 < mode_width)))
8363 {
8364 const_op <<= INTVAL (XEXP (op0, 1));
8365 op1 = GEN_INT (const_op);
8366 op0 = XEXP (op0, 0);
8367 continue;
8368 }
8369
8370 /* If we are using this shift to extract just the sign bit, we
8371 can replace this with an LT or GE comparison. */
8372 if (const_op == 0
8373 && (equality_comparison_p || sign_bit_comparison_p)
8374 && GET_CODE (XEXP (op0, 1)) == CONST_INT
8375 && INTVAL (XEXP (op0, 1)) == mode_width - 1)
8376 {
8377 op0 = XEXP (op0, 0);
8378 code = (code == NE || code == GT ? LT : GE);
8379 continue;
8380 }
8381 break;
8382 }
8383
8384 break;
8385 }
8386
8387 /* Now make any compound operations involved in this comparison. Then,
8388 check for an outmost SUBREG on OP0 that isn't doing anything or is
8389 paradoxical. The latter case can only occur when it is known that the
8390 "extra" bits will be zero. Therefore, it is safe to remove the SUBREG.
8391 We can never remove a SUBREG for a non-equality comparison because the
8392 sign bit is in a different place in the underlying object. */
8393
8394 op0 = make_compound_operation (op0, op1 == const0_rtx ? COMPARE : SET);
8395 op1 = make_compound_operation (op1, SET);
8396
8397 if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
8398 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
8399 && (code == NE || code == EQ)
8400 && ((GET_MODE_SIZE (GET_MODE (op0))
8401 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0))))))
8402 {
8403 op0 = SUBREG_REG (op0);
8404 op1 = gen_lowpart_for_combine (GET_MODE (op0), op1);
8405 }
8406
8407 else if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
8408 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
8409 && (code == NE || code == EQ)
8410 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
8411 && (significant_bits (SUBREG_REG (op0), GET_MODE (SUBREG_REG (op0)))
8412 & ~ GET_MODE_MASK (GET_MODE (op0))) == 0
8413 && (tem = gen_lowpart_for_combine (GET_MODE (SUBREG_REG (op0)),
8414 op1),
8415 (significant_bits (tem, GET_MODE (SUBREG_REG (op0)))
8416 & ~ GET_MODE_MASK (GET_MODE (op0))) == 0))
8417 op0 = SUBREG_REG (op0), op1 = tem;
8418
8419 /* We now do the opposite procedure: Some machines don't have compare
8420 insns in all modes. If OP0's mode is an integer mode smaller than a
8421 word and we can't do a compare in that mode, see if there is a larger
8422 mode for which we can do the compare. There are a number of cases in
8423 which we can use the wider mode. */
8424
8425 mode = GET_MODE (op0);
8426 if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT
8427 && GET_MODE_SIZE (mode) < UNITS_PER_WORD
8428 && cmp_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
8429 for (tmode = GET_MODE_WIDER_MODE (mode);
8430 (tmode != VOIDmode
8431 && GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT);
8432 tmode = GET_MODE_WIDER_MODE (tmode))
8433 if (cmp_optab->handlers[(int) tmode].insn_code != CODE_FOR_nothing)
8434 {
8435 /* If the only significant bits in OP0 and OP1 are those in the
8436 narrower mode and this is an equality or unsigned comparison,
8437 we can use the wider mode. Similarly for sign-extended
8438 values and equality or signed comparisons. */
8439 if (((code == EQ || code == NE
8440 || code == GEU || code == GTU || code == LEU || code == LTU)
8441 && ((significant_bits (op0, tmode) & ~ GET_MODE_MASK (mode))
8442 == 0)
8443 && ((significant_bits (op1, tmode) & ~ GET_MODE_MASK (mode))
8444 == 0))
8445 || ((code == EQ || code == NE
8446 || code == GE || code == GT || code == LE || code == LT)
8447 && (num_sign_bit_copies (op0, tmode)
8448 > GET_MODE_BITSIZE (tmode) - GET_MODE_BITSIZE (mode))
8449 && (num_sign_bit_copies (op1, tmode)
8450 > GET_MODE_BITSIZE (tmode) - GET_MODE_BITSIZE (mode))))
8451 {
8452 op0 = gen_lowpart_for_combine (tmode, op0);
8453 op1 = gen_lowpart_for_combine (tmode, op1);
8454 break;
8455 }
8456
8457 /* If this is a test for negative, we can make an explicit
8458 test of the sign bit. */
8459
8460 if (op1 == const0_rtx && (code == LT || code == GE)
8461 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
8462 {
8463 op0 = gen_binary (AND, tmode,
8464 gen_lowpart_for_combine (tmode, op0),
8465 GEN_INT ((HOST_WIDE_INT) 1
8466 << (GET_MODE_BITSIZE (mode) - 1)));
8467 code = (code == LT) ? NE : EQ;
8468 break;
8469 }
8470 }
8471
8472 *pop0 = op0;
8473 *pop1 = op1;
8474
8475 return code;
8476 }
8477 \f
8478 /* Return 1 if we know that X, a comparison operation, is not operating
8479 on a floating-point value or is EQ or NE, meaning that we can safely
8480 reverse it. */
8481
8482 static int
8483 reversible_comparison_p (x)
8484 rtx x;
8485 {
8486 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
8487 || GET_CODE (x) == NE || GET_CODE (x) == EQ)
8488 return 1;
8489
8490 switch (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))))
8491 {
8492 case MODE_INT:
8493 return 1;
8494
8495 case MODE_CC:
8496 x = get_last_value (XEXP (x, 0));
8497 return (x && GET_CODE (x) == COMPARE
8498 && GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) == MODE_INT);
8499 }
8500
8501 return 0;
8502 }
8503 \f
8504 /* Utility function for following routine. Called when X is part of a value
8505 being stored into reg_last_set_value. Sets reg_last_set_table_tick
8506 for each register mentioned. Similar to mention_regs in cse.c */
8507
8508 static void
8509 update_table_tick (x)
8510 rtx x;
8511 {
8512 register enum rtx_code code = GET_CODE (x);
8513 register char *fmt = GET_RTX_FORMAT (code);
8514 register int i;
8515
8516 if (code == REG)
8517 {
8518 int regno = REGNO (x);
8519 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
8520 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
8521
8522 for (i = regno; i < endregno; i++)
8523 reg_last_set_table_tick[i] = label_tick;
8524
8525 return;
8526 }
8527
8528 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
8529 /* Note that we can't have an "E" in values stored; see
8530 get_last_value_validate. */
8531 if (fmt[i] == 'e')
8532 update_table_tick (XEXP (x, i));
8533 }
8534
8535 /* Record that REG is set to VALUE in insn INSN. If VALUE is zero, we
8536 are saying that the register is clobbered and we no longer know its
8537 value. If INSN is zero, don't update reg_last_set; this call is normally
8538 done with VALUE also zero to invalidate the register. */
8539
8540 static void
8541 record_value_for_reg (reg, insn, value)
8542 rtx reg;
8543 rtx insn;
8544 rtx value;
8545 {
8546 int regno = REGNO (reg);
8547 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
8548 ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1);
8549 int i;
8550
8551 /* If VALUE contains REG and we have a previous value for REG, substitute
8552 the previous value. */
8553 if (value && insn && reg_overlap_mentioned_p (reg, value))
8554 {
8555 rtx tem;
8556
8557 /* Set things up so get_last_value is allowed to see anything set up to
8558 our insn. */
8559 subst_low_cuid = INSN_CUID (insn);
8560 tem = get_last_value (reg);
8561
8562 if (tem)
8563 value = replace_rtx (copy_rtx (value), reg, tem);
8564 }
8565
8566 /* For each register modified, show we don't know its value, that
8567 its value has been updated, and that we don't know the location of
8568 the death of the register. */
8569 for (i = regno; i < endregno; i ++)
8570 {
8571 if (insn)
8572 reg_last_set[i] = insn;
8573 reg_last_set_value[i] = 0;
8574 reg_last_death[i] = 0;
8575 }
8576
8577 /* Mark registers that are being referenced in this value. */
8578 if (value)
8579 update_table_tick (value);
8580
8581 /* Now update the status of each register being set.
8582 If someone is using this register in this block, set this register
8583 to invalid since we will get confused between the two lives in this
8584 basic block. This makes using this register always invalid. In cse, we
8585 scan the table to invalidate all entries using this register, but this
8586 is too much work for us. */
8587
8588 for (i = regno; i < endregno; i++)
8589 {
8590 reg_last_set_label[i] = label_tick;
8591 if (value && reg_last_set_table_tick[i] == label_tick)
8592 reg_last_set_invalid[i] = 1;
8593 else
8594 reg_last_set_invalid[i] = 0;
8595 }
8596
8597 /* The value being assigned might refer to X (like in "x++;"). In that
8598 case, we must replace it with (clobber (const_int 0)) to prevent
8599 infinite loops. */
8600 if (value && ! get_last_value_validate (&value,
8601 reg_last_set_label[regno], 0))
8602 {
8603 value = copy_rtx (value);
8604 if (! get_last_value_validate (&value, reg_last_set_label[regno], 1))
8605 value = 0;
8606 }
8607
8608 /* For the main register being modified, update the value. */
8609 reg_last_set_value[regno] = value;
8610
8611 }
8612
8613 /* Used for communication between the following two routines. */
8614 static rtx record_dead_insn;
8615
8616 /* Called via note_stores from record_dead_and_set_regs to handle one
8617 SET or CLOBBER in an insn. */
8618
8619 static void
8620 record_dead_and_set_regs_1 (dest, setter)
8621 rtx dest, setter;
8622 {
8623 if (GET_CODE (dest) == REG)
8624 {
8625 /* If we are setting the whole register, we know its value. Otherwise
8626 show that we don't know the value. We can handle SUBREG in
8627 some cases. */
8628 if (GET_CODE (setter) == SET && dest == SET_DEST (setter))
8629 record_value_for_reg (dest, record_dead_insn, SET_SRC (setter));
8630 else if (GET_CODE (setter) == SET
8631 && GET_CODE (SET_DEST (setter)) == SUBREG
8632 && SUBREG_REG (SET_DEST (setter)) == dest
8633 && subreg_lowpart_p (SET_DEST (setter)))
8634 record_value_for_reg (dest, record_dead_insn,
8635 gen_lowpart_for_combine (GET_MODE (dest),
8636 SET_SRC (setter)));
8637 else
8638 record_value_for_reg (dest, record_dead_insn, NULL_RTX);
8639 }
8640 else if (GET_CODE (dest) == MEM
8641 /* Ignore pushes, they clobber nothing. */
8642 && ! push_operand (dest, GET_MODE (dest)))
8643 mem_last_set = INSN_CUID (record_dead_insn);
8644 }
8645
8646 /* Update the records of when each REG was most recently set or killed
8647 for the things done by INSN. This is the last thing done in processing
8648 INSN in the combiner loop.
8649
8650 We update reg_last_set, reg_last_set_value, reg_last_death, and also the
8651 similar information mem_last_set (which insn most recently modified memory)
8652 and last_call_cuid (which insn was the most recent subroutine call). */
8653
8654 static void
8655 record_dead_and_set_regs (insn)
8656 rtx insn;
8657 {
8658 register rtx link;
8659 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
8660 {
8661 if (REG_NOTE_KIND (link) == REG_DEAD)
8662 reg_last_death[REGNO (XEXP (link, 0))] = insn;
8663 else if (REG_NOTE_KIND (link) == REG_INC)
8664 record_value_for_reg (XEXP (link, 0), insn, NULL_RTX);
8665 }
8666
8667 if (GET_CODE (insn) == CALL_INSN)
8668 last_call_cuid = mem_last_set = INSN_CUID (insn);
8669
8670 record_dead_insn = insn;
8671 note_stores (PATTERN (insn), record_dead_and_set_regs_1);
8672 }
8673 \f
8674 /* Utility routine for the following function. Verify that all the registers
8675 mentioned in *LOC are valid when *LOC was part of a value set when
8676 label_tick == TICK. Return 0 if some are not.
8677
8678 If REPLACE is non-zero, replace the invalid reference with
8679 (clobber (const_int 0)) and return 1. This replacement is useful because
8680 we often can get useful information about the form of a value (e.g., if
8681 it was produced by a shift that always produces -1 or 0) even though
8682 we don't know exactly what registers it was produced from. */
8683
8684 static int
8685 get_last_value_validate (loc, tick, replace)
8686 rtx *loc;
8687 int tick;
8688 int replace;
8689 {
8690 rtx x = *loc;
8691 char *fmt = GET_RTX_FORMAT (GET_CODE (x));
8692 int len = GET_RTX_LENGTH (GET_CODE (x));
8693 int i;
8694
8695 if (GET_CODE (x) == REG)
8696 {
8697 int regno = REGNO (x);
8698 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
8699 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
8700 int j;
8701
8702 for (j = regno; j < endregno; j++)
8703 if (reg_last_set_invalid[j]
8704 /* If this is a pseudo-register that was only set once, it is
8705 always valid. */
8706 || (! (regno >= FIRST_PSEUDO_REGISTER && reg_n_sets[regno] == 1)
8707 && reg_last_set_label[j] > tick))
8708 {
8709 if (replace)
8710 *loc = gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
8711 return replace;
8712 }
8713
8714 return 1;
8715 }
8716
8717 for (i = 0; i < len; i++)
8718 if ((fmt[i] == 'e'
8719 && get_last_value_validate (&XEXP (x, i), tick, replace) == 0)
8720 /* Don't bother with these. They shouldn't occur anyway. */
8721 || fmt[i] == 'E')
8722 return 0;
8723
8724 /* If we haven't found a reason for it to be invalid, it is valid. */
8725 return 1;
8726 }
8727
8728 /* Get the last value assigned to X, if known. Some registers
8729 in the value may be replaced with (clobber (const_int 0)) if their value
8730 is known longer known reliably. */
8731
8732 static rtx
8733 get_last_value (x)
8734 rtx x;
8735 {
8736 int regno;
8737 rtx value;
8738
8739 /* If this is a non-paradoxical SUBREG, get the value of its operand and
8740 then convert it to the desired mode. If this is a paradoxical SUBREG,
8741 we cannot predict what values the "extra" bits might have. */
8742 if (GET_CODE (x) == SUBREG
8743 && subreg_lowpart_p (x)
8744 && (GET_MODE_SIZE (GET_MODE (x))
8745 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
8746 && (value = get_last_value (SUBREG_REG (x))) != 0)
8747 return gen_lowpart_for_combine (GET_MODE (x), value);
8748
8749 if (GET_CODE (x) != REG)
8750 return 0;
8751
8752 regno = REGNO (x);
8753 value = reg_last_set_value[regno];
8754
8755 /* If we don't have a value or if it isn't for this basic block, return 0. */
8756
8757 if (value == 0
8758 || (reg_n_sets[regno] != 1
8759 && (reg_last_set_label[regno] != label_tick)))
8760 return 0;
8761
8762 /* If the value was set in a later insn that the ones we are processing,
8763 we can't use it, but make a quick check to see if the previous insn
8764 set it to something. This is commonly the case when the same pseudo
8765 is used by repeated insns. */
8766
8767 if (reg_n_sets[regno] != 1
8768 && INSN_CUID (reg_last_set[regno]) >= subst_low_cuid)
8769 {
8770 rtx insn, set;
8771
8772 for (insn = prev_nonnote_insn (subst_insn);
8773 insn && INSN_CUID (insn) >= subst_low_cuid;
8774 insn = prev_nonnote_insn (insn))
8775 ;
8776
8777 if (insn
8778 && (set = single_set (insn)) != 0
8779 && rtx_equal_p (SET_DEST (set), x))
8780 {
8781 value = SET_SRC (set);
8782
8783 /* Make sure that VALUE doesn't reference X. Replace any
8784 expliit references with a CLOBBER. If there are any remaining
8785 references (rare), don't use the value. */
8786
8787 if (reg_mentioned_p (x, value))
8788 value = replace_rtx (copy_rtx (value), x,
8789 gen_rtx (CLOBBER, GET_MODE (x), const0_rtx));
8790
8791 if (reg_overlap_mentioned_p (x, value))
8792 return 0;
8793 }
8794 else
8795 return 0;
8796 }
8797
8798 /* If the value has all its registers valid, return it. */
8799 if (get_last_value_validate (&value, reg_last_set_label[regno], 0))
8800 return value;
8801
8802 /* Otherwise, make a copy and replace any invalid register with
8803 (clobber (const_int 0)). If that fails for some reason, return 0. */
8804
8805 value = copy_rtx (value);
8806 if (get_last_value_validate (&value, reg_last_set_label[regno], 1))
8807 return value;
8808
8809 return 0;
8810 }
8811 \f
8812 /* Return nonzero if expression X refers to a REG or to memory
8813 that is set in an instruction more recent than FROM_CUID. */
8814
8815 static int
8816 use_crosses_set_p (x, from_cuid)
8817 register rtx x;
8818 int from_cuid;
8819 {
8820 register char *fmt;
8821 register int i;
8822 register enum rtx_code code = GET_CODE (x);
8823
8824 if (code == REG)
8825 {
8826 register int regno = REGNO (x);
8827 #ifdef PUSH_ROUNDING
8828 /* Don't allow uses of the stack pointer to be moved,
8829 because we don't know whether the move crosses a push insn. */
8830 if (regno == STACK_POINTER_REGNUM)
8831 return 1;
8832 #endif
8833 return (reg_last_set[regno]
8834 && INSN_CUID (reg_last_set[regno]) > from_cuid);
8835 }
8836
8837 if (code == MEM && mem_last_set > from_cuid)
8838 return 1;
8839
8840 fmt = GET_RTX_FORMAT (code);
8841
8842 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
8843 {
8844 if (fmt[i] == 'E')
8845 {
8846 register int j;
8847 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
8848 if (use_crosses_set_p (XVECEXP (x, i, j), from_cuid))
8849 return 1;
8850 }
8851 else if (fmt[i] == 'e'
8852 && use_crosses_set_p (XEXP (x, i), from_cuid))
8853 return 1;
8854 }
8855 return 0;
8856 }
8857 \f
8858 /* Define three variables used for communication between the following
8859 routines. */
8860
8861 static int reg_dead_regno, reg_dead_endregno;
8862 static int reg_dead_flag;
8863
8864 /* Function called via note_stores from reg_dead_at_p.
8865
8866 If DEST is within [reg_dead_rengno, reg_dead_endregno), set
8867 reg_dead_flag to 1 if X is a CLOBBER and to -1 it is a SET. */
8868
8869 static void
8870 reg_dead_at_p_1 (dest, x)
8871 rtx dest;
8872 rtx x;
8873 {
8874 int regno, endregno;
8875
8876 if (GET_CODE (dest) != REG)
8877 return;
8878
8879 regno = REGNO (dest);
8880 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
8881 ? HARD_REGNO_NREGS (regno, GET_MODE (dest)) : 1);
8882
8883 if (reg_dead_endregno > regno && reg_dead_regno < endregno)
8884 reg_dead_flag = (GET_CODE (x) == CLOBBER) ? 1 : -1;
8885 }
8886
8887 /* Return non-zero if REG is known to be dead at INSN.
8888
8889 We scan backwards from INSN. If we hit a REG_DEAD note or a CLOBBER
8890 referencing REG, it is dead. If we hit a SET referencing REG, it is
8891 live. Otherwise, see if it is live or dead at the start of the basic
8892 block we are in. */
8893
8894 static int
8895 reg_dead_at_p (reg, insn)
8896 rtx reg;
8897 rtx insn;
8898 {
8899 int block, i;
8900
8901 /* Set variables for reg_dead_at_p_1. */
8902 reg_dead_regno = REGNO (reg);
8903 reg_dead_endregno = reg_dead_regno + (reg_dead_regno < FIRST_PSEUDO_REGISTER
8904 ? HARD_REGNO_NREGS (reg_dead_regno,
8905 GET_MODE (reg))
8906 : 1);
8907
8908 reg_dead_flag = 0;
8909
8910 /* Scan backwards until we find a REG_DEAD note, SET, CLOBBER, label, or
8911 beginning of function. */
8912 for (; insn && GET_CODE (insn) != CODE_LABEL;
8913 insn = prev_nonnote_insn (insn))
8914 {
8915 note_stores (PATTERN (insn), reg_dead_at_p_1);
8916 if (reg_dead_flag)
8917 return reg_dead_flag == 1 ? 1 : 0;
8918
8919 if (find_regno_note (insn, REG_DEAD, reg_dead_regno))
8920 return 1;
8921 }
8922
8923 /* Get the basic block number that we were in. */
8924 if (insn == 0)
8925 block = 0;
8926 else
8927 {
8928 for (block = 0; block < n_basic_blocks; block++)
8929 if (insn == basic_block_head[block])
8930 break;
8931
8932 if (block == n_basic_blocks)
8933 return 0;
8934 }
8935
8936 for (i = reg_dead_regno; i < reg_dead_endregno; i++)
8937 if (basic_block_live_at_start[block][i / REGSET_ELT_BITS]
8938 & ((REGSET_ELT_TYPE) 1 << (i % REGSET_ELT_BITS)))
8939 return 0;
8940
8941 return 1;
8942 }
8943 \f
8944 /* Remove register number REGNO from the dead registers list of INSN.
8945
8946 Return the note used to record the death, if there was one. */
8947
8948 rtx
8949 remove_death (regno, insn)
8950 int regno;
8951 rtx insn;
8952 {
8953 register rtx note = find_regno_note (insn, REG_DEAD, regno);
8954
8955 if (note)
8956 {
8957 reg_n_deaths[regno]--;
8958 remove_note (insn, note);
8959 }
8960
8961 return note;
8962 }
8963
8964 /* For each register (hardware or pseudo) used within expression X, if its
8965 death is in an instruction with cuid between FROM_CUID (inclusive) and
8966 TO_INSN (exclusive), put a REG_DEAD note for that register in the
8967 list headed by PNOTES.
8968
8969 This is done when X is being merged by combination into TO_INSN. These
8970 notes will then be distributed as needed. */
8971
8972 static void
8973 move_deaths (x, from_cuid, to_insn, pnotes)
8974 rtx x;
8975 int from_cuid;
8976 rtx to_insn;
8977 rtx *pnotes;
8978 {
8979 register char *fmt;
8980 register int len, i;
8981 register enum rtx_code code = GET_CODE (x);
8982
8983 if (code == REG)
8984 {
8985 register int regno = REGNO (x);
8986 register rtx where_dead = reg_last_death[regno];
8987
8988 if (where_dead && INSN_CUID (where_dead) >= from_cuid
8989 && INSN_CUID (where_dead) < INSN_CUID (to_insn))
8990 {
8991 rtx note = remove_death (regno, reg_last_death[regno]);
8992
8993 /* It is possible for the call above to return 0. This can occur
8994 when reg_last_death points to I2 or I1 that we combined with.
8995 In that case make a new note. */
8996
8997 if (note)
8998 {
8999 XEXP (note, 1) = *pnotes;
9000 *pnotes = note;
9001 }
9002 else
9003 *pnotes = gen_rtx (EXPR_LIST, REG_DEAD, x, *pnotes);
9004
9005 reg_n_deaths[regno]++;
9006 }
9007
9008 return;
9009 }
9010
9011 else if (GET_CODE (x) == SET)
9012 {
9013 rtx dest = SET_DEST (x);
9014
9015 move_deaths (SET_SRC (x), from_cuid, to_insn, pnotes);
9016
9017 /* In the case of a ZERO_EXTRACT, a STRICT_LOW_PART, or a SUBREG
9018 that accesses one word of a multi-word item, some
9019 piece of everything register in the expression is used by
9020 this insn, so remove any old death. */
9021
9022 if (GET_CODE (dest) == ZERO_EXTRACT
9023 || GET_CODE (dest) == STRICT_LOW_PART
9024 || (GET_CODE (dest) == SUBREG
9025 && (((GET_MODE_SIZE (GET_MODE (dest))
9026 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
9027 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
9028 + UNITS_PER_WORD - 1) / UNITS_PER_WORD))))
9029 {
9030 move_deaths (dest, from_cuid, to_insn, pnotes);
9031 return;
9032 }
9033
9034 /* If this is some other SUBREG, we know it replaces the entire
9035 value, so use that as the destination. */
9036 if (GET_CODE (dest) == SUBREG)
9037 dest = SUBREG_REG (dest);
9038
9039 /* If this is a MEM, adjust deaths of anything used in the address.
9040 For a REG (the only other possibility), the entire value is
9041 being replaced so the old value is not used in this insn. */
9042
9043 if (GET_CODE (dest) == MEM)
9044 move_deaths (XEXP (dest, 0), from_cuid, to_insn, pnotes);
9045 return;
9046 }
9047
9048 else if (GET_CODE (x) == CLOBBER)
9049 return;
9050
9051 len = GET_RTX_LENGTH (code);
9052 fmt = GET_RTX_FORMAT (code);
9053
9054 for (i = 0; i < len; i++)
9055 {
9056 if (fmt[i] == 'E')
9057 {
9058 register int j;
9059 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
9060 move_deaths (XVECEXP (x, i, j), from_cuid, to_insn, pnotes);
9061 }
9062 else if (fmt[i] == 'e')
9063 move_deaths (XEXP (x, i), from_cuid, to_insn, pnotes);
9064 }
9065 }
9066 \f
9067 /* Return 1 if X is the target of a bit-field assignment in BODY, the
9068 pattern of an insn. X must be a REG. */
9069
9070 static int
9071 reg_bitfield_target_p (x, body)
9072 rtx x;
9073 rtx body;
9074 {
9075 int i;
9076
9077 if (GET_CODE (body) == SET)
9078 {
9079 rtx dest = SET_DEST (body);
9080 rtx target;
9081 int regno, tregno, endregno, endtregno;
9082
9083 if (GET_CODE (dest) == ZERO_EXTRACT)
9084 target = XEXP (dest, 0);
9085 else if (GET_CODE (dest) == STRICT_LOW_PART)
9086 target = SUBREG_REG (XEXP (dest, 0));
9087 else
9088 return 0;
9089
9090 if (GET_CODE (target) == SUBREG)
9091 target = SUBREG_REG (target);
9092
9093 if (GET_CODE (target) != REG)
9094 return 0;
9095
9096 tregno = REGNO (target), regno = REGNO (x);
9097 if (tregno >= FIRST_PSEUDO_REGISTER || regno >= FIRST_PSEUDO_REGISTER)
9098 return target == x;
9099
9100 endtregno = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (target));
9101 endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
9102
9103 return endregno > tregno && regno < endtregno;
9104 }
9105
9106 else if (GET_CODE (body) == PARALLEL)
9107 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
9108 if (reg_bitfield_target_p (x, XVECEXP (body, 0, i)))
9109 return 1;
9110
9111 return 0;
9112 }
9113 \f
9114 /* Given a chain of REG_NOTES originally from FROM_INSN, try to place them
9115 as appropriate. I3 and I2 are the insns resulting from the combination
9116 insns including FROM (I2 may be zero).
9117
9118 ELIM_I2 and ELIM_I1 are either zero or registers that we know will
9119 not need REG_DEAD notes because they are being substituted for. This
9120 saves searching in the most common cases.
9121
9122 Each note in the list is either ignored or placed on some insns, depending
9123 on the type of note. */
9124
9125 static void
9126 distribute_notes (notes, from_insn, i3, i2, elim_i2, elim_i1)
9127 rtx notes;
9128 rtx from_insn;
9129 rtx i3, i2;
9130 rtx elim_i2, elim_i1;
9131 {
9132 rtx note, next_note;
9133 rtx tem;
9134
9135 for (note = notes; note; note = next_note)
9136 {
9137 rtx place = 0, place2 = 0;
9138
9139 /* If this NOTE references a pseudo register, ensure it references
9140 the latest copy of that register. */
9141 if (XEXP (note, 0) && GET_CODE (XEXP (note, 0)) == REG
9142 && REGNO (XEXP (note, 0)) >= FIRST_PSEUDO_REGISTER)
9143 XEXP (note, 0) = regno_reg_rtx[REGNO (XEXP (note, 0))];
9144
9145 next_note = XEXP (note, 1);
9146 switch (REG_NOTE_KIND (note))
9147 {
9148 case REG_UNUSED:
9149 /* If this register is set or clobbered in I3, put the note there
9150 unless there is one already. */
9151 if (reg_set_p (XEXP (note, 0), PATTERN (i3)))
9152 {
9153 if (! (GET_CODE (XEXP (note, 0)) == REG
9154 ? find_regno_note (i3, REG_UNUSED, REGNO (XEXP (note, 0)))
9155 : find_reg_note (i3, REG_UNUSED, XEXP (note, 0))))
9156 place = i3;
9157 }
9158 /* Otherwise, if this register is used by I3, then this register
9159 now dies here, so we must put a REG_DEAD note here unless there
9160 is one already. */
9161 else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3))
9162 && ! (GET_CODE (XEXP (note, 0)) == REG
9163 ? find_regno_note (i3, REG_DEAD, REGNO (XEXP (note, 0)))
9164 : find_reg_note (i3, REG_DEAD, XEXP (note, 0))))
9165 {
9166 PUT_REG_NOTE_KIND (note, REG_DEAD);
9167 place = i3;
9168 }
9169 break;
9170
9171 case REG_EQUAL:
9172 case REG_EQUIV:
9173 case REG_NONNEG:
9174 /* These notes say something about results of an insn. We can
9175 only support them if they used to be on I3 in which case they
9176 remain on I3. Otherwise they are ignored.
9177
9178 If the note refers to an expression that is not a constant, we
9179 must also ignore the note since we cannot tell whether the
9180 equivalence is still true. It might be possible to do
9181 slightly better than this (we only have a problem if I2DEST
9182 or I1DEST is present in the expression), but it doesn't
9183 seem worth the trouble. */
9184
9185 if (from_insn == i3
9186 && (XEXP (note, 0) == 0 || CONSTANT_P (XEXP (note, 0))))
9187 place = i3;
9188 break;
9189
9190 case REG_INC:
9191 case REG_NO_CONFLICT:
9192 case REG_LABEL:
9193 /* These notes say something about how a register is used. They must
9194 be present on any use of the register in I2 or I3. */
9195 if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3)))
9196 place = i3;
9197
9198 if (i2 && reg_mentioned_p (XEXP (note, 0), PATTERN (i2)))
9199 {
9200 if (place)
9201 place2 = i2;
9202 else
9203 place = i2;
9204 }
9205 break;
9206
9207 case REG_WAS_0:
9208 /* It is too much trouble to try to see if this note is still
9209 correct in all situations. It is better to simply delete it. */
9210 break;
9211
9212 case REG_RETVAL:
9213 /* If the insn previously containing this note still exists,
9214 put it back where it was. Otherwise move it to the previous
9215 insn. Adjust the corresponding REG_LIBCALL note. */
9216 if (GET_CODE (from_insn) != NOTE)
9217 place = from_insn;
9218 else
9219 {
9220 tem = find_reg_note (XEXP (note, 0), REG_LIBCALL, NULL_RTX);
9221 place = prev_real_insn (from_insn);
9222 if (tem && place)
9223 XEXP (tem, 0) = place;
9224 }
9225 break;
9226
9227 case REG_LIBCALL:
9228 /* This is handled similarly to REG_RETVAL. */
9229 if (GET_CODE (from_insn) != NOTE)
9230 place = from_insn;
9231 else
9232 {
9233 tem = find_reg_note (XEXP (note, 0), REG_RETVAL, NULL_RTX);
9234 place = next_real_insn (from_insn);
9235 if (tem && place)
9236 XEXP (tem, 0) = place;
9237 }
9238 break;
9239
9240 case REG_DEAD:
9241 /* If the register is used as an input in I3, it dies there.
9242 Similarly for I2, if it is non-zero and adjacent to I3.
9243
9244 If the register is not used as an input in either I3 or I2
9245 and it is not one of the registers we were supposed to eliminate,
9246 there are two possibilities. We might have a non-adjacent I2
9247 or we might have somehow eliminated an additional register
9248 from a computation. For example, we might have had A & B where
9249 we discover that B will always be zero. In this case we will
9250 eliminate the reference to A.
9251
9252 In both cases, we must search to see if we can find a previous
9253 use of A and put the death note there. */
9254
9255 if (reg_referenced_p (XEXP (note, 0), PATTERN (i3)))
9256 place = i3;
9257 else if (i2 != 0 && next_nonnote_insn (i2) == i3
9258 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
9259 place = i2;
9260
9261 if (XEXP (note, 0) == elim_i2 || XEXP (note, 0) == elim_i1)
9262 break;
9263
9264 /* If the register is used in both I2 and I3 and it dies in I3,
9265 we might have added another reference to it. If reg_n_refs
9266 was 2, bump it to 3. This has to be correct since the
9267 register must have been set somewhere. The reason this is
9268 done is because local-alloc.c treats 2 references as a
9269 special case. */
9270
9271 if (place == i3 && i2 != 0 && GET_CODE (XEXP (note, 0)) == REG
9272 && reg_n_refs[REGNO (XEXP (note, 0))]== 2
9273 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
9274 reg_n_refs[REGNO (XEXP (note, 0))] = 3;
9275
9276 if (place == 0)
9277 for (tem = prev_nonnote_insn (i3);
9278 tem && (GET_CODE (tem) == INSN
9279 || GET_CODE (tem) == CALL_INSN);
9280 tem = prev_nonnote_insn (tem))
9281 {
9282 /* If the register is being set at TEM, see if that is all
9283 TEM is doing. If so, delete TEM. Otherwise, make this
9284 into a REG_UNUSED note instead. */
9285 if (reg_set_p (XEXP (note, 0), PATTERN (tem)))
9286 {
9287 rtx set = single_set (tem);
9288
9289 /* Verify that it was the set, and not a clobber that
9290 modified the register. */
9291
9292 if (set != 0 && ! side_effects_p (SET_SRC (set))
9293 && rtx_equal_p (XEXP (note, 0), SET_DEST (set)))
9294 {
9295 /* Move the notes and links of TEM elsewhere.
9296 This might delete other dead insns recursively.
9297 First set the pattern to something that won't use
9298 any register. */
9299
9300 PATTERN (tem) = pc_rtx;
9301
9302 distribute_notes (REG_NOTES (tem), tem, tem,
9303 NULL_RTX, NULL_RTX, NULL_RTX);
9304 distribute_links (LOG_LINKS (tem));
9305
9306 PUT_CODE (tem, NOTE);
9307 NOTE_LINE_NUMBER (tem) = NOTE_INSN_DELETED;
9308 NOTE_SOURCE_FILE (tem) = 0;
9309 }
9310 else
9311 {
9312 PUT_REG_NOTE_KIND (note, REG_UNUSED);
9313
9314 /* If there isn't already a REG_UNUSED note, put one
9315 here. */
9316 if (! find_regno_note (tem, REG_UNUSED,
9317 REGNO (XEXP (note, 0))))
9318 place = tem;
9319 break;
9320 }
9321 }
9322 else if (reg_referenced_p (XEXP (note, 0), PATTERN (tem)))
9323 {
9324 place = tem;
9325 break;
9326 }
9327 }
9328
9329 /* If the register is set or already dead at PLACE, we needn't do
9330 anything with this note if it is still a REG_DEAD note.
9331
9332 Note that we cannot use just `dead_or_set_p' here since we can
9333 convert an assignment to a register into a bit-field assignment.
9334 Therefore, we must also omit the note if the register is the
9335 target of a bitfield assignment. */
9336
9337 if (place && REG_NOTE_KIND (note) == REG_DEAD)
9338 {
9339 int regno = REGNO (XEXP (note, 0));
9340
9341 if (dead_or_set_p (place, XEXP (note, 0))
9342 || reg_bitfield_target_p (XEXP (note, 0), PATTERN (place)))
9343 {
9344 /* Unless the register previously died in PLACE, clear
9345 reg_last_death. [I no longer understand why this is
9346 being done.] */
9347 if (reg_last_death[regno] != place)
9348 reg_last_death[regno] = 0;
9349 place = 0;
9350 }
9351 else
9352 reg_last_death[regno] = place;
9353
9354 /* If this is a death note for a hard reg that is occupying
9355 multiple registers, ensure that we are still using all
9356 parts of the object. If we find a piece of the object
9357 that is unused, we must add a USE for that piece before
9358 PLACE and put the appropriate REG_DEAD note on it.
9359
9360 An alternative would be to put a REG_UNUSED for the pieces
9361 on the insn that set the register, but that can't be done if
9362 it is not in the same block. It is simpler, though less
9363 efficient, to add the USE insns. */
9364
9365 if (place && regno < FIRST_PSEUDO_REGISTER
9366 && HARD_REGNO_NREGS (regno, GET_MODE (XEXP (note, 0))) > 1)
9367 {
9368 int endregno
9369 = regno + HARD_REGNO_NREGS (regno,
9370 GET_MODE (XEXP (note, 0)));
9371 int all_used = 1;
9372 int i;
9373
9374 for (i = regno; i < endregno; i++)
9375 if (! refers_to_regno_p (i, i + 1, PATTERN (place), 0))
9376 {
9377 rtx piece = gen_rtx (REG, word_mode, i);
9378 rtx p;
9379
9380 /* See if we already placed a USE note for this
9381 register in front of PLACE. */
9382 for (p = place;
9383 GET_CODE (PREV_INSN (p)) == INSN
9384 && GET_CODE (PATTERN (PREV_INSN (p))) == USE;
9385 p = PREV_INSN (p))
9386 if (rtx_equal_p (piece,
9387 XEXP (PATTERN (PREV_INSN (p)), 0)))
9388 {
9389 p = 0;
9390 break;
9391 }
9392
9393 if (p)
9394 {
9395 rtx use_insn
9396 = emit_insn_before (gen_rtx (USE, VOIDmode,
9397 piece),
9398 p);
9399 REG_NOTES (use_insn)
9400 = gen_rtx (EXPR_LIST, REG_DEAD, piece,
9401 REG_NOTES (use_insn));
9402 }
9403
9404 all_used = 0;
9405 }
9406
9407 if (! all_used)
9408 {
9409 /* Put only REG_DEAD notes for pieces that are
9410 still used and that are not already dead or set. */
9411
9412 for (i = regno; i < endregno; i++)
9413 {
9414 rtx piece = gen_rtx (REG, word_mode, i);
9415
9416 if (reg_referenced_p (piece, PATTERN (place))
9417 && ! dead_or_set_p (place, piece)
9418 && ! reg_bitfield_target_p (piece,
9419 PATTERN (place)))
9420 REG_NOTES (place) = gen_rtx (EXPR_LIST, REG_DEAD,
9421 piece,
9422 REG_NOTES (place));
9423 }
9424
9425 place = 0;
9426 }
9427 }
9428 }
9429 break;
9430
9431 default:
9432 /* Any other notes should not be present at this point in the
9433 compilation. */
9434 abort ();
9435 }
9436
9437 if (place)
9438 {
9439 XEXP (note, 1) = REG_NOTES (place);
9440 REG_NOTES (place) = note;
9441 }
9442 else if ((REG_NOTE_KIND (note) == REG_DEAD
9443 || REG_NOTE_KIND (note) == REG_UNUSED)
9444 && GET_CODE (XEXP (note, 0)) == REG)
9445 reg_n_deaths[REGNO (XEXP (note, 0))]--;
9446
9447 if (place2)
9448 {
9449 if ((REG_NOTE_KIND (note) == REG_DEAD
9450 || REG_NOTE_KIND (note) == REG_UNUSED)
9451 && GET_CODE (XEXP (note, 0)) == REG)
9452 reg_n_deaths[REGNO (XEXP (note, 0))]++;
9453
9454 REG_NOTES (place2) = gen_rtx (GET_CODE (note), REG_NOTE_KIND (note),
9455 XEXP (note, 0), REG_NOTES (place2));
9456 }
9457 }
9458 }
9459 \f
9460 /* Similarly to above, distribute the LOG_LINKS that used to be present on
9461 I3, I2, and I1 to new locations. This is also called in one case to
9462 add a link pointing at I3 when I3's destination is changed. */
9463
9464 static void
9465 distribute_links (links)
9466 rtx links;
9467 {
9468 rtx link, next_link;
9469
9470 for (link = links; link; link = next_link)
9471 {
9472 rtx place = 0;
9473 rtx insn;
9474 rtx set, reg;
9475
9476 next_link = XEXP (link, 1);
9477
9478 /* If the insn that this link points to is a NOTE or isn't a single
9479 set, ignore it. In the latter case, it isn't clear what we
9480 can do other than ignore the link, since we can't tell which
9481 register it was for. Such links wouldn't be used by combine
9482 anyway.
9483
9484 It is not possible for the destination of the target of the link to
9485 have been changed by combine. The only potential of this is if we
9486 replace I3, I2, and I1 by I3 and I2. But in that case the
9487 destination of I2 also remains unchanged. */
9488
9489 if (GET_CODE (XEXP (link, 0)) == NOTE
9490 || (set = single_set (XEXP (link, 0))) == 0)
9491 continue;
9492
9493 reg = SET_DEST (set);
9494 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
9495 || GET_CODE (reg) == SIGN_EXTRACT
9496 || GET_CODE (reg) == STRICT_LOW_PART)
9497 reg = XEXP (reg, 0);
9498
9499 /* A LOG_LINK is defined as being placed on the first insn that uses
9500 a register and points to the insn that sets the register. Start
9501 searching at the next insn after the target of the link and stop
9502 when we reach a set of the register or the end of the basic block.
9503
9504 Note that this correctly handles the link that used to point from
9505 I3 to I2. Also note that not much searching is typically done here
9506 since most links don't point very far away. */
9507
9508 for (insn = NEXT_INSN (XEXP (link, 0));
9509 (insn && GET_CODE (insn) != CODE_LABEL
9510 && GET_CODE (PREV_INSN (insn)) != JUMP_INSN);
9511 insn = NEXT_INSN (insn))
9512 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
9513 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
9514 {
9515 if (reg_referenced_p (reg, PATTERN (insn)))
9516 place = insn;
9517 break;
9518 }
9519
9520 /* If we found a place to put the link, place it there unless there
9521 is already a link to the same insn as LINK at that point. */
9522
9523 if (place)
9524 {
9525 rtx link2;
9526
9527 for (link2 = LOG_LINKS (place); link2; link2 = XEXP (link2, 1))
9528 if (XEXP (link2, 0) == XEXP (link, 0))
9529 break;
9530
9531 if (link2 == 0)
9532 {
9533 XEXP (link, 1) = LOG_LINKS (place);
9534 LOG_LINKS (place) = link;
9535 }
9536 }
9537 }
9538 }
9539 \f
9540 void
9541 dump_combine_stats (file)
9542 FILE *file;
9543 {
9544 fprintf
9545 (file,
9546 ";; Combiner statistics: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n\n",
9547 combine_attempts, combine_merges, combine_extras, combine_successes);
9548 }
9549
9550 void
9551 dump_combine_total_stats (file)
9552 FILE *file;
9553 {
9554 fprintf
9555 (file,
9556 "\n;; Combiner totals: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n",
9557 total_attempts, total_merges, total_extras, total_successes);
9558 }
This page took 0.551543 seconds and 6 git commands to generate.