]> gcc.gnu.org Git - gcc.git/blame - gcc/combine.c
ci -u pa.h
[gcc.git] / gcc / combine.c
CommitLineData
230d793d
RS
1/* Optimize by combining instructions for GNU compiler.
2 Copyright (C) 1987, 1988, 1992 Free Software Foundation, Inc.
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
18the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21/* This module is essentially the "combiner" phase of the U. of Arizona
22 Portable Optimizer, but redone to work on our list-structured
23 representation for RTL instead of their string representation.
24
25 The LOG_LINKS of each insn identify the most recent assignment
26 to each REG used in the insn. It is a list of previous insns,
27 each of which contains a SET for a REG that is used in this insn
28 and not used or set in between. LOG_LINKs never cross basic blocks.
29 They were set up by the preceding pass (lifetime analysis).
30
31 We try to combine each pair of insns joined by a logical link.
32 We also try to combine triples of insns A, B and C when
33 C has a link back to B and B has a link back to A.
34
35 LOG_LINKS does not have links for use of the CC0. They don't
36 need to, because the insn that sets the CC0 is always immediately
37 before the insn that tests it. So we always regard a branch
38 insn as having a logical link to the preceding insn. The same is true
39 for an insn explicitly using CC0.
40
41 We check (with use_crosses_set_p) to avoid combining in such a way
42 as to move a computation to a place where its value would be different.
43
44 Combination is done by mathematically substituting the previous
45 insn(s) values for the regs they set into the expressions in
46 the later insns that refer to these regs. If the result is a valid insn
47 for our target machine, according to the machine description,
48 we install it, delete the earlier insns, and update the data flow
49 information (LOG_LINKS and REG_NOTES) for what we did.
50
51 There are a few exceptions where the dataflow information created by
52 flow.c aren't completely updated:
53
54 - reg_live_length is not updated
55 - reg_n_refs is not adjusted in the rare case when a register is
56 no longer required in a computation
57 - there are extremely rare cases (see distribute_regnotes) when a
58 REG_DEAD note is lost
59 - a LOG_LINKS entry that refers to an insn with multiple SETs may be
60 removed because there is no way to know which register it was
61 linking
62
63 To simplify substitution, we combine only when the earlier insn(s)
64 consist of only a single assignment. To simplify updating afterward,
65 we never combine when a subroutine call appears in the middle.
66
67 Since we do not represent assignments to CC0 explicitly except when that
68 is all an insn does, there is no LOG_LINKS entry in an insn that uses
69 the condition code for the insn that set the condition code.
70 Fortunately, these two insns must be consecutive.
71 Therefore, every JUMP_INSN is taken to have an implicit logical link
72 to the preceding insn. This is not quite right, since non-jumps can
73 also use the condition code; but in practice such insns would not
74 combine anyway. */
75
230d793d
RS
76#include "config.h"
77#include "gvarargs.h"
78#include "rtl.h"
79#include "flags.h"
80#include "regs.h"
55310dad 81#include "hard-reg-set.h"
230d793d
RS
82#include "expr.h"
83#include "basic-block.h"
84#include "insn-config.h"
85#include "insn-flags.h"
86#include "insn-codes.h"
87#include "insn-attr.h"
88#include "recog.h"
89#include "real.h"
f8d97cf4 90#include <stdio.h>
230d793d
RS
91
92/* It is not safe to use ordinary gen_lowpart in combine.
93 Use gen_lowpart_for_combine instead. See comments there. */
94#define gen_lowpart dont_use_gen_lowpart_you_dummy
95
c6dc70d6
RK
96/* If byte loads either zero- or sign- extend, define BYTE_LOADS_EXTEND
97 for cases when we don't care which is true. Define LOAD_EXTEND to
98 be ZERO_EXTEND or SIGN_EXTEND, depending on which was defined. */
99
100#ifdef BYTE_LOADS_ZERO_EXTEND
101#define BYTE_LOADS_EXTEND
102#define LOAD_EXTEND ZERO_EXTEND
103#endif
104
500c518b 105#ifdef BYTE_LOADS_SIGN_EXTEND
c6dc70d6
RK
106#define BYTE_LOADS_EXTEND
107#define LOAD_EXTEND SIGN_EXTEND
108#endif
109
230d793d
RS
110/* Number of attempts to combine instructions in this function. */
111
112static int combine_attempts;
113
114/* Number of attempts that got as far as substitution in this function. */
115
116static int combine_merges;
117
118/* Number of instructions combined with added SETs in this function. */
119
120static int combine_extras;
121
122/* Number of instructions combined in this function. */
123
124static int combine_successes;
125
126/* Totals over entire compilation. */
127
128static int total_attempts, total_merges, total_extras, total_successes;
129\f
130/* Vector mapping INSN_UIDs to cuids.
5089e22e 131 The cuids are like uids but increase monotonically always.
230d793d
RS
132 Combine always uses cuids so that it can compare them.
133 But actually renumbering the uids, which we used to do,
134 proves to be a bad idea because it makes it hard to compare
135 the dumps produced by earlier passes with those from later passes. */
136
137static int *uid_cuid;
138
139/* Get the cuid of an insn. */
140
141#define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
142
143/* Maximum register number, which is the size of the tables below. */
144
145static int combine_max_regno;
146
147/* Record last point of death of (hard or pseudo) register n. */
148
149static rtx *reg_last_death;
150
151/* Record last point of modification of (hard or pseudo) register n. */
152
153static rtx *reg_last_set;
154
155/* Record the cuid of the last insn that invalidated memory
156 (anything that writes memory, and subroutine calls, but not pushes). */
157
158static int mem_last_set;
159
160/* Record the cuid of the last CALL_INSN
161 so we can tell whether a potential combination crosses any calls. */
162
163static int last_call_cuid;
164
165/* When `subst' is called, this is the insn that is being modified
166 (by combining in a previous insn). The PATTERN of this insn
167 is still the old pattern partially modified and it should not be
168 looked at, but this may be used to examine the successors of the insn
169 to judge whether a simplification is valid. */
170
171static rtx subst_insn;
172
173/* This is the lowest CUID that `subst' is currently dealing with.
174 get_last_value will not return a value if the register was set at or
175 after this CUID. If not for this mechanism, we could get confused if
176 I2 or I1 in try_combine were an insn that used the old value of a register
177 to obtain a new value. In that case, we might erroneously get the
178 new value of the register when we wanted the old one. */
179
180static int subst_low_cuid;
181
182/* This is the value of undobuf.num_undo when we started processing this
183 substitution. This will prevent gen_rtx_combine from re-used a piece
184 from the previous expression. Doing so can produce circular rtl
185 structures. */
186
187static int previous_num_undos;
188\f
189/* The next group of arrays allows the recording of the last value assigned
190 to (hard or pseudo) register n. We use this information to see if a
5089e22e 191 operation being processed is redundant given a prior operation performed
230d793d
RS
192 on the register. For example, an `and' with a constant is redundant if
193 all the zero bits are already known to be turned off.
194
195 We use an approach similar to that used by cse, but change it in the
196 following ways:
197
198 (1) We do not want to reinitialize at each label.
199 (2) It is useful, but not critical, to know the actual value assigned
200 to a register. Often just its form is helpful.
201
202 Therefore, we maintain the following arrays:
203
204 reg_last_set_value the last value assigned
205 reg_last_set_label records the value of label_tick when the
206 register was assigned
207 reg_last_set_table_tick records the value of label_tick when a
208 value using the register is assigned
209 reg_last_set_invalid set to non-zero when it is not valid
210 to use the value of this register in some
211 register's value
212
213 To understand the usage of these tables, it is important to understand
214 the distinction between the value in reg_last_set_value being valid
215 and the register being validly contained in some other expression in the
216 table.
217
218 Entry I in reg_last_set_value is valid if it is non-zero, and either
219 reg_n_sets[i] is 1 or reg_last_set_label[i] == label_tick.
220
221 Register I may validly appear in any expression returned for the value
222 of another register if reg_n_sets[i] is 1. It may also appear in the
223 value for register J if reg_last_set_label[i] < reg_last_set_label[j] or
224 reg_last_set_invalid[j] is zero.
225
226 If an expression is found in the table containing a register which may
227 not validly appear in an expression, the register is replaced by
228 something that won't match, (clobber (const_int 0)).
229
230 reg_last_set_invalid[i] is set non-zero when register I is being assigned
231 to and reg_last_set_table_tick[i] == label_tick. */
232
233/* Record last value assigned to (hard or pseudo) register n. */
234
235static rtx *reg_last_set_value;
236
237/* Record the value of label_tick when the value for register n is placed in
238 reg_last_set_value[n]. */
239
568356af 240static int *reg_last_set_label;
230d793d
RS
241
242/* Record the value of label_tick when an expression involving register n
243 is placed in reg_last_set_value. */
244
568356af 245static int *reg_last_set_table_tick;
230d793d
RS
246
247/* Set non-zero if references to register n in expressions should not be
248 used. */
249
250static char *reg_last_set_invalid;
251
252/* Incremented for each label. */
253
568356af 254static int label_tick;
230d793d
RS
255
256/* Some registers that are set more than once and used in more than one
257 basic block are nevertheless always set in similar ways. For example,
258 a QImode register may be loaded from memory in two places on a machine
259 where byte loads zero extend.
260
951553af 261 We record in the following array what we know about the nonzero
230d793d
RS
262 bits of a register, specifically which bits are known to be zero.
263
264 If an entry is zero, it means that we don't know anything special. */
265
55310dad 266static unsigned HOST_WIDE_INT *reg_nonzero_bits;
230d793d 267
951553af 268/* Mode used to compute significance in reg_nonzero_bits. It is the largest
5f4f0e22 269 integer mode that can fit in HOST_BITS_PER_WIDE_INT. */
230d793d 270
951553af 271static enum machine_mode nonzero_bits_mode;
230d793d 272
d0ab8cd3
RK
273/* Nonzero if we know that a register has some leading bits that are always
274 equal to the sign bit. */
275
276static char *reg_sign_bit_copies;
277
951553af 278/* Nonzero when reg_nonzero_bits and reg_sign_bit_copies can be safely used.
1a26b032
RK
279 It is zero while computing them and after combine has completed. This
280 former test prevents propagating values based on previously set values,
281 which can be incorrect if a variable is modified in a loop. */
230d793d 282
951553af 283static int nonzero_sign_valid;
55310dad
RK
284
285/* These arrays are maintained in parallel with reg_last_set_value
286 and are used to store the mode in which the register was last set,
287 the bits that were known to be zero when it was last set, and the
288 number of sign bits copies it was known to have when it was last set. */
289
290static enum machine_mode *reg_last_set_mode;
291static unsigned HOST_WIDE_INT *reg_last_set_nonzero_bits;
292static char *reg_last_set_sign_bit_copies;
230d793d
RS
293\f
294/* Record one modification to rtl structure
295 to be undone by storing old_contents into *where.
296 is_int is 1 if the contents are an int. */
297
298struct undo
299{
230d793d 300 int is_int;
7c046e4e
RK
301 union {rtx rtx; int i;} old_contents;
302 union {rtx *rtx; int *i;} where;
230d793d
RS
303};
304
305/* Record a bunch of changes to be undone, up to MAX_UNDO of them.
306 num_undo says how many are currently recorded.
307
308 storage is nonzero if we must undo the allocation of new storage.
309 The value of storage is what to pass to obfree.
310
311 other_insn is nonzero if we have modified some other insn in the process
312 of working on subst_insn. It must be verified too. */
313
314#define MAX_UNDO 50
315
316struct undobuf
317{
318 int num_undo;
319 char *storage;
320 struct undo undo[MAX_UNDO];
321 rtx other_insn;
322};
323
324static struct undobuf undobuf;
325
cc876596 326/* Substitute NEWVAL, an rtx expression, into INTO, a place in some
230d793d 327 insn. The substitution can be undone by undo_all. If INTO is already
cc876596
RK
328 set to NEWVAL, do not record this change. Because computing NEWVAL might
329 also call SUBST, we have to compute it before we put anything into
330 the undo table. */
230d793d
RS
331
332#define SUBST(INTO, NEWVAL) \
cc876596
RK
333 do { rtx _new = (NEWVAL); \
334 if (undobuf.num_undo < MAX_UNDO) \
230d793d 335 { \
230d793d 336 undobuf.undo[undobuf.num_undo].is_int = 0; \
7c046e4e
RK
337 undobuf.undo[undobuf.num_undo].where.rtx = &INTO; \
338 undobuf.undo[undobuf.num_undo].old_contents.rtx = INTO; \
cc876596 339 INTO = _new; \
7c046e4e 340 if (undobuf.undo[undobuf.num_undo].old_contents.rtx != INTO) \
230d793d
RS
341 undobuf.num_undo++; \
342 } \
343 } while (0)
344
345/* Similar to SUBST, but NEWVAL is an int. INTO will normally be an XINT
346 expression.
347 Note that substitution for the value of a CONST_INT is not safe. */
348
349#define SUBST_INT(INTO, NEWVAL) \
350 do { if (undobuf.num_undo < MAX_UNDO) \
351{ \
7c046e4e
RK
352 undobuf.undo[undobuf.num_undo].is_int = 1; \
353 undobuf.undo[undobuf.num_undo].where.i = (int *) &INTO; \
354 undobuf.undo[undobuf.num_undo].old_contents.i = INTO; \
230d793d 355 INTO = NEWVAL; \
7c046e4e 356 if (undobuf.undo[undobuf.num_undo].old_contents.i != INTO) \
230d793d
RS
357 undobuf.num_undo++; \
358 } \
359 } while (0)
360
361/* Number of times the pseudo being substituted for
362 was found and replaced. */
363
364static int n_occurrences;
365
951553af 366static void set_nonzero_bits_and_sign_copies ();
7988fd36 367static void setup_incoming_promotions ();
230d793d
RS
368static void move_deaths ();
369rtx remove_death ();
370static void record_value_for_reg ();
371static void record_dead_and_set_regs ();
372static int use_crosses_set_p ();
373static rtx try_combine ();
374static rtx *find_split_point ();
375static rtx subst ();
376static void undo_all ();
377static int reg_dead_at_p ();
378static rtx expand_compound_operation ();
379static rtx expand_field_assignment ();
380static rtx make_extraction ();
381static int get_pos_from_mask ();
77fa0940 382static rtx force_to_mode ();
1a26b032 383static rtx known_cond ();
230d793d
RS
384static rtx make_field_assignment ();
385static rtx make_compound_operation ();
386static rtx apply_distributive_law ();
387static rtx simplify_and_const_int ();
951553af 388static unsigned HOST_WIDE_INT nonzero_bits ();
d0ab8cd3 389static int num_sign_bit_copies ();
230d793d
RS
390static int merge_outer_ops ();
391static rtx simplify_shift_const ();
392static int recog_for_combine ();
393static rtx gen_lowpart_for_combine ();
394static rtx gen_rtx_combine ();
395static rtx gen_binary ();
396static rtx gen_unary ();
397static enum rtx_code simplify_comparison ();
398static int reversible_comparison_p ();
399static int get_last_value_validate ();
400static rtx get_last_value ();
401static void distribute_notes ();
402static void distribute_links ();
403\f
404/* Main entry point for combiner. F is the first insn of the function.
405 NREGS is the first unused pseudo-reg number. */
406
407void
408combine_instructions (f, nregs)
409 rtx f;
410 int nregs;
411{
412 register rtx insn, next, prev;
413 register int i;
414 register rtx links, nextlinks;
415
416 combine_attempts = 0;
417 combine_merges = 0;
418 combine_extras = 0;
419 combine_successes = 0;
bef9925b 420 undobuf.num_undo = previous_num_undos = 0;
230d793d
RS
421
422 combine_max_regno = nregs;
423
424 reg_last_death = (rtx *) alloca (nregs * sizeof (rtx));
425 reg_last_set = (rtx *) alloca (nregs * sizeof (rtx));
426 reg_last_set_value = (rtx *) alloca (nregs * sizeof (rtx));
568356af
RK
427 reg_last_set_table_tick = (int *) alloca (nregs * sizeof (int));
428 reg_last_set_label = (int *) alloca (nregs * sizeof (int));
5f4f0e22 429 reg_last_set_invalid = (char *) alloca (nregs * sizeof (char));
55310dad
RK
430 reg_last_set_mode
431 = (enum machine_mode *) alloca (nregs * sizeof (enum machine_mode));
432 reg_last_set_nonzero_bits
433 = (unsigned HOST_WIDE_INT *) alloca (nregs * sizeof (HOST_WIDE_INT));
434 reg_last_set_sign_bit_copies
435 = (char *) alloca (nregs * sizeof (char));
436
437 reg_nonzero_bits
438 = (unsigned HOST_WIDE_INT *) alloca (nregs * sizeof (HOST_WIDE_INT));
d0ab8cd3 439 reg_sign_bit_copies = (char *) alloca (nregs * sizeof (char));
230d793d
RS
440
441 bzero (reg_last_death, nregs * sizeof (rtx));
442 bzero (reg_last_set, nregs * sizeof (rtx));
443 bzero (reg_last_set_value, nregs * sizeof (rtx));
568356af
RK
444 bzero (reg_last_set_table_tick, nregs * sizeof (int));
445 bzero (reg_last_set_label, nregs * sizeof (int));
230d793d 446 bzero (reg_last_set_invalid, nregs * sizeof (char));
55310dad
RK
447 bzero (reg_last_set_mode, nregs * sizeof (enum machine_mode));
448 bzero (reg_last_set_nonzero_bits, nregs * sizeof (HOST_WIDE_INT));
449 bzero (reg_last_set_sign_bit_copies, nregs * sizeof (char));
951553af 450 bzero (reg_nonzero_bits, nregs * sizeof (HOST_WIDE_INT));
d0ab8cd3 451 bzero (reg_sign_bit_copies, nregs * sizeof (char));
230d793d
RS
452
453 init_recog_no_volatile ();
454
455 /* Compute maximum uid value so uid_cuid can be allocated. */
456
457 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
458 if (INSN_UID (insn) > i)
459 i = INSN_UID (insn);
460
461 uid_cuid = (int *) alloca ((i + 1) * sizeof (int));
462
951553af 463 nonzero_bits_mode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
230d793d 464
951553af 465 /* Don't use reg_nonzero_bits when computing it. This can cause problems
230d793d
RS
466 when, for example, we have j <<= 1 in a loop. */
467
951553af 468 nonzero_sign_valid = 0;
230d793d
RS
469
470 /* Compute the mapping from uids to cuids.
471 Cuids are numbers assigned to insns, like uids,
472 except that cuids increase monotonically through the code.
473
474 Scan all SETs and see if we can deduce anything about what
951553af 475 bits are known to be zero for some registers and how many copies
d79f08e0
RK
476 of the sign bit are known to exist for those registers.
477
478 Also set any known values so that we can use it while searching
479 for what bits are known to be set. */
480
481 label_tick = 1;
230d793d 482
7988fd36
RK
483 setup_incoming_promotions ();
484
230d793d
RS
485 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
486 {
487 INSN_CUID (insn) = ++i;
d79f08e0
RK
488 subst_low_cuid = i;
489 subst_insn = insn;
490
230d793d 491 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
d79f08e0
RK
492 {
493 note_stores (PATTERN (insn), set_nonzero_bits_and_sign_copies);
494 record_dead_and_set_regs (insn);
495 }
496
497 if (GET_CODE (insn) == CODE_LABEL)
498 label_tick++;
230d793d
RS
499 }
500
951553af 501 nonzero_sign_valid = 1;
230d793d
RS
502
503 /* Now scan all the insns in forward order. */
504
505 label_tick = 1;
506 last_call_cuid = 0;
507 mem_last_set = 0;
d79f08e0
RK
508 bzero (reg_last_death, nregs * sizeof (rtx));
509 bzero (reg_last_set, nregs * sizeof (rtx));
510 bzero (reg_last_set_value, nregs * sizeof (rtx));
568356af
RK
511 bzero (reg_last_set_table_tick, nregs * sizeof (int));
512 bzero (reg_last_set_label, nregs * sizeof (int));
d79f08e0 513 bzero (reg_last_set_invalid, nregs * sizeof (char));
230d793d 514
7988fd36
RK
515 setup_incoming_promotions ();
516
230d793d
RS
517 for (insn = f; insn; insn = next ? next : NEXT_INSN (insn))
518 {
519 next = 0;
520
521 if (GET_CODE (insn) == CODE_LABEL)
522 label_tick++;
523
524 else if (GET_CODE (insn) == INSN
525 || GET_CODE (insn) == CALL_INSN
526 || GET_CODE (insn) == JUMP_INSN)
527 {
528 /* Try this insn with each insn it links back to. */
529
530 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
5f4f0e22 531 if ((next = try_combine (insn, XEXP (links, 0), NULL_RTX)) != 0)
230d793d
RS
532 goto retry;
533
534 /* Try each sequence of three linked insns ending with this one. */
535
536 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
537 for (nextlinks = LOG_LINKS (XEXP (links, 0)); nextlinks;
538 nextlinks = XEXP (nextlinks, 1))
539 if ((next = try_combine (insn, XEXP (links, 0),
540 XEXP (nextlinks, 0))) != 0)
541 goto retry;
542
543#ifdef HAVE_cc0
544 /* Try to combine a jump insn that uses CC0
545 with a preceding insn that sets CC0, and maybe with its
546 logical predecessor as well.
547 This is how we make decrement-and-branch insns.
548 We need this special code because data flow connections
549 via CC0 do not get entered in LOG_LINKS. */
550
551 if (GET_CODE (insn) == JUMP_INSN
552 && (prev = prev_nonnote_insn (insn)) != 0
553 && GET_CODE (prev) == INSN
554 && sets_cc0_p (PATTERN (prev)))
555 {
5f4f0e22 556 if ((next = try_combine (insn, prev, NULL_RTX)) != 0)
230d793d
RS
557 goto retry;
558
559 for (nextlinks = LOG_LINKS (prev); nextlinks;
560 nextlinks = XEXP (nextlinks, 1))
561 if ((next = try_combine (insn, prev,
562 XEXP (nextlinks, 0))) != 0)
563 goto retry;
564 }
565
566 /* Do the same for an insn that explicitly references CC0. */
567 if (GET_CODE (insn) == INSN
568 && (prev = prev_nonnote_insn (insn)) != 0
569 && GET_CODE (prev) == INSN
570 && sets_cc0_p (PATTERN (prev))
571 && GET_CODE (PATTERN (insn)) == SET
572 && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (insn))))
573 {
5f4f0e22 574 if ((next = try_combine (insn, prev, NULL_RTX)) != 0)
230d793d
RS
575 goto retry;
576
577 for (nextlinks = LOG_LINKS (prev); nextlinks;
578 nextlinks = XEXP (nextlinks, 1))
579 if ((next = try_combine (insn, prev,
580 XEXP (nextlinks, 0))) != 0)
581 goto retry;
582 }
583
584 /* Finally, see if any of the insns that this insn links to
585 explicitly references CC0. If so, try this insn, that insn,
5089e22e 586 and its predecessor if it sets CC0. */
230d793d
RS
587 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
588 if (GET_CODE (XEXP (links, 0)) == INSN
589 && GET_CODE (PATTERN (XEXP (links, 0))) == SET
590 && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (XEXP (links, 0))))
591 && (prev = prev_nonnote_insn (XEXP (links, 0))) != 0
592 && GET_CODE (prev) == INSN
593 && sets_cc0_p (PATTERN (prev))
594 && (next = try_combine (insn, XEXP (links, 0), prev)) != 0)
595 goto retry;
596#endif
597
598 /* Try combining an insn with two different insns whose results it
599 uses. */
600 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
601 for (nextlinks = XEXP (links, 1); nextlinks;
602 nextlinks = XEXP (nextlinks, 1))
603 if ((next = try_combine (insn, XEXP (links, 0),
604 XEXP (nextlinks, 0))) != 0)
605 goto retry;
606
607 if (GET_CODE (insn) != NOTE)
608 record_dead_and_set_regs (insn);
609
610 retry:
611 ;
612 }
613 }
614
615 total_attempts += combine_attempts;
616 total_merges += combine_merges;
617 total_extras += combine_extras;
618 total_successes += combine_successes;
1a26b032 619
951553af 620 nonzero_sign_valid = 0;
230d793d
RS
621}
622\f
7988fd36
RK
623/* Set up any promoted values for incoming argument registers. */
624
ee791cc3 625static void
7988fd36
RK
626setup_incoming_promotions ()
627{
628#ifdef PROMOTE_FUNCTION_ARGS
629 int regno;
630 rtx reg;
631 enum machine_mode mode;
632 int unsignedp;
633 rtx first = get_insns ();
634
635 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
636 if (FUNCTION_ARG_REGNO_P (regno)
637 && (reg = promoted_input_arg (regno, &mode, &unsignedp)) != 0)
638 record_value_for_reg (reg, first,
639 gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
500c518b
RK
640 GET_MODE (reg),
641 gen_rtx (CLOBBER, mode, const0_rtx)));
7988fd36
RK
642#endif
643}
644\f
230d793d 645/* Called via note_stores. If X is a pseudo that is used in more than
5f4f0e22 646 one basic block, is narrower that HOST_BITS_PER_WIDE_INT, and is being
951553af 647 set, record what bits are known zero. If we are clobbering X,
230d793d
RS
648 ignore this "set" because the clobbered value won't be used.
649
650 If we are setting only a portion of X and we can't figure out what
651 portion, assume all bits will be used since we don't know what will
d0ab8cd3
RK
652 be happening.
653
654 Similarly, set how many bits of X are known to be copies of the sign bit
655 at all locations in the function. This is the smallest number implied
656 by any set of X. */
230d793d
RS
657
658static void
951553af 659set_nonzero_bits_and_sign_copies (x, set)
230d793d
RS
660 rtx x;
661 rtx set;
662{
d0ab8cd3
RK
663 int num;
664
230d793d
RS
665 if (GET_CODE (x) == REG
666 && REGNO (x) >= FIRST_PSEUDO_REGISTER
667 && reg_n_sets[REGNO (x)] > 1
668 && reg_basic_block[REGNO (x)] < 0
5f4f0e22 669 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
230d793d
RS
670 {
671 if (GET_CODE (set) == CLOBBER)
672 return;
673
674 /* If this is a complex assignment, see if we can convert it into a
5089e22e 675 simple assignment. */
230d793d 676 set = expand_field_assignment (set);
d79f08e0
RK
677
678 /* If this is a simple assignment, or we have a paradoxical SUBREG,
679 set what we know about X. */
680
681 if (SET_DEST (set) == x
682 || (GET_CODE (SET_DEST (set)) == SUBREG
705c7b3b
JW
683 && (GET_MODE_SIZE (GET_MODE (SET_DEST (set)))
684 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (set)))))
d79f08e0 685 && SUBREG_REG (SET_DEST (set)) == x))
d0ab8cd3 686 {
951553af
RK
687 reg_nonzero_bits[REGNO (x)]
688 |= nonzero_bits (SET_SRC (set), nonzero_bits_mode);
d0ab8cd3
RK
689 num = num_sign_bit_copies (SET_SRC (set), GET_MODE (x));
690 if (reg_sign_bit_copies[REGNO (x)] == 0
691 || reg_sign_bit_copies[REGNO (x)] > num)
692 reg_sign_bit_copies[REGNO (x)] = num;
693 }
230d793d 694 else
d0ab8cd3 695 {
951553af 696 reg_nonzero_bits[REGNO (x)] = GET_MODE_MASK (GET_MODE (x));
d0ab8cd3
RK
697 reg_sign_bit_copies[REGNO (x)] = 0;
698 }
230d793d
RS
699 }
700}
701\f
702/* See if INSN can be combined into I3. PRED and SUCC are optionally
703 insns that were previously combined into I3 or that will be combined
704 into the merger of INSN and I3.
705
706 Return 0 if the combination is not allowed for any reason.
707
708 If the combination is allowed, *PDEST will be set to the single
709 destination of INSN and *PSRC to the single source, and this function
710 will return 1. */
711
712static int
713can_combine_p (insn, i3, pred, succ, pdest, psrc)
714 rtx insn;
715 rtx i3;
716 rtx pred, succ;
717 rtx *pdest, *psrc;
718{
719 int i;
720 rtx set = 0, src, dest;
721 rtx p, link;
722 int all_adjacent = (succ ? (next_active_insn (insn) == succ
723 && next_active_insn (succ) == i3)
724 : next_active_insn (insn) == i3);
725
726 /* Can combine only if previous insn is a SET of a REG, a SUBREG or CC0.
727 or a PARALLEL consisting of such a SET and CLOBBERs.
728
729 If INSN has CLOBBER parallel parts, ignore them for our processing.
730 By definition, these happen during the execution of the insn. When it
731 is merged with another insn, all bets are off. If they are, in fact,
732 needed and aren't also supplied in I3, they may be added by
733 recog_for_combine. Otherwise, it won't match.
734
735 We can also ignore a SET whose SET_DEST is mentioned in a REG_UNUSED
736 note.
737
738 Get the source and destination of INSN. If more than one, can't
739 combine. */
740
741 if (GET_CODE (PATTERN (insn)) == SET)
742 set = PATTERN (insn);
743 else if (GET_CODE (PATTERN (insn)) == PARALLEL
744 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
745 {
746 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
747 {
748 rtx elt = XVECEXP (PATTERN (insn), 0, i);
749
750 switch (GET_CODE (elt))
751 {
752 /* We can ignore CLOBBERs. */
753 case CLOBBER:
754 break;
755
756 case SET:
757 /* Ignore SETs whose result isn't used but not those that
758 have side-effects. */
759 if (find_reg_note (insn, REG_UNUSED, SET_DEST (elt))
760 && ! side_effects_p (elt))
761 break;
762
763 /* If we have already found a SET, this is a second one and
764 so we cannot combine with this insn. */
765 if (set)
766 return 0;
767
768 set = elt;
769 break;
770
771 default:
772 /* Anything else means we can't combine. */
773 return 0;
774 }
775 }
776
777 if (set == 0
778 /* If SET_SRC is an ASM_OPERANDS we can't throw away these CLOBBERs,
779 so don't do anything with it. */
780 || GET_CODE (SET_SRC (set)) == ASM_OPERANDS)
781 return 0;
782 }
783 else
784 return 0;
785
786 if (set == 0)
787 return 0;
788
789 set = expand_field_assignment (set);
790 src = SET_SRC (set), dest = SET_DEST (set);
791
792 /* Don't eliminate a store in the stack pointer. */
793 if (dest == stack_pointer_rtx
794 /* Don't install a subreg involving two modes not tieable.
795 It can worsen register allocation, and can even make invalid reload
796 insns, since the reg inside may need to be copied from in the
797 outside mode, and that may be invalid if it is an fp reg copied in
5089e22e
RS
798 integer mode. As a special exception, we can allow this if
799 I3 is simply copying DEST, a REG, to CC0. */
230d793d 800 || (GET_CODE (src) == SUBREG
5089e22e
RS
801 && ! MODES_TIEABLE_P (GET_MODE (src), GET_MODE (SUBREG_REG (src)))
802#ifdef HAVE_cc0
803 && ! (GET_CODE (i3) == INSN && GET_CODE (PATTERN (i3)) == SET
804 && SET_DEST (PATTERN (i3)) == cc0_rtx
805 && GET_CODE (dest) == REG && dest == SET_SRC (PATTERN (i3)))
806#endif
807 )
230d793d
RS
808 /* If we couldn't eliminate a field assignment, we can't combine. */
809 || GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == STRICT_LOW_PART
810 /* Don't combine with an insn that sets a register to itself if it has
811 a REG_EQUAL note. This may be part of a REG_NO_CONFLICT sequence. */
5f4f0e22 812 || (rtx_equal_p (src, dest) && find_reg_note (insn, REG_EQUAL, NULL_RTX))
230d793d
RS
813 /* Can't merge a function call. */
814 || GET_CODE (src) == CALL
815 /* Don't substitute into an incremented register. */
816 || FIND_REG_INC_NOTE (i3, dest)
817 || (succ && FIND_REG_INC_NOTE (succ, dest))
818 /* Don't combine the end of a libcall into anything. */
5f4f0e22 819 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
230d793d
RS
820 /* Make sure that DEST is not used after SUCC but before I3. */
821 || (succ && ! all_adjacent
822 && reg_used_between_p (dest, succ, i3))
823 /* Make sure that the value that is to be substituted for the register
824 does not use any registers whose values alter in between. However,
825 If the insns are adjacent, a use can't cross a set even though we
826 think it might (this can happen for a sequence of insns each setting
827 the same destination; reg_last_set of that register might point to
a66a10c7
RS
828 a NOTE). Also, don't move a volatile asm or UNSPEC_VOLATILE across
829 any other insns. */
230d793d
RS
830 || (! all_adjacent
831 && (use_crosses_set_p (src, INSN_CUID (insn))
a66a10c7
RS
832 || (GET_CODE (src) == ASM_OPERANDS && MEM_VOLATILE_P (src))
833 || GET_CODE (src) == UNSPEC_VOLATILE))
230d793d
RS
834 /* If there is a REG_NO_CONFLICT note for DEST in I3 or SUCC, we get
835 better register allocation by not doing the combine. */
836 || find_reg_note (i3, REG_NO_CONFLICT, dest)
837 || (succ && find_reg_note (succ, REG_NO_CONFLICT, dest))
838 /* Don't combine across a CALL_INSN, because that would possibly
839 change whether the life span of some REGs crosses calls or not,
840 and it is a pain to update that information.
841 Exception: if source is a constant, moving it later can't hurt.
842 Accept that special case, because it helps -fforce-addr a lot. */
843 || (INSN_CUID (insn) < last_call_cuid && ! CONSTANT_P (src)))
844 return 0;
845
846 /* DEST must either be a REG or CC0. */
847 if (GET_CODE (dest) == REG)
848 {
849 /* If register alignment is being enforced for multi-word items in all
850 cases except for parameters, it is possible to have a register copy
851 insn referencing a hard register that is not allowed to contain the
852 mode being copied and which would not be valid as an operand of most
853 insns. Eliminate this problem by not combining with such an insn.
854
855 Also, on some machines we don't want to extend the life of a hard
856 register. */
857
858 if (GET_CODE (src) == REG
859 && ((REGNO (dest) < FIRST_PSEUDO_REGISTER
860 && ! HARD_REGNO_MODE_OK (REGNO (dest), GET_MODE (dest)))
861#ifdef SMALL_REGISTER_CLASSES
862 /* Don't extend the life of a hard register. */
863 || REGNO (src) < FIRST_PSEUDO_REGISTER
864#else
865 || (REGNO (src) < FIRST_PSEUDO_REGISTER
866 && ! HARD_REGNO_MODE_OK (REGNO (src), GET_MODE (src)))
867#endif
868 ))
869 return 0;
870 }
871 else if (GET_CODE (dest) != CC0)
872 return 0;
873
5f96750d
RS
874 /* Don't substitute for a register intended as a clobberable operand.
875 Similarly, don't substitute an expression containing a register that
876 will be clobbered in I3. */
230d793d
RS
877 if (GET_CODE (PATTERN (i3)) == PARALLEL)
878 for (i = XVECLEN (PATTERN (i3), 0) - 1; i >= 0; i--)
879 if (GET_CODE (XVECEXP (PATTERN (i3), 0, i)) == CLOBBER
5f96750d
RS
880 && (reg_overlap_mentioned_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0),
881 src)
882 || rtx_equal_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0), dest)))
230d793d
RS
883 return 0;
884
885 /* If INSN contains anything volatile, or is an `asm' (whether volatile
886 or not), reject, unless nothing volatile comes between it and I3,
887 with the exception of SUCC. */
888
889 if (GET_CODE (src) == ASM_OPERANDS || volatile_refs_p (src))
890 for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
891 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
892 && p != succ && volatile_refs_p (PATTERN (p)))
893 return 0;
894
895 /* If INSN or I2 contains an autoincrement or autodecrement,
896 make sure that register is not used between there and I3,
897 and not already used in I3 either.
898 Also insist that I3 not be a jump; if it were one
899 and the incremented register were spilled, we would lose. */
900
901#ifdef AUTO_INC_DEC
902 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
903 if (REG_NOTE_KIND (link) == REG_INC
904 && (GET_CODE (i3) == JUMP_INSN
905 || reg_used_between_p (XEXP (link, 0), insn, i3)
906 || reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i3))))
907 return 0;
908#endif
909
910#ifdef HAVE_cc0
911 /* Don't combine an insn that follows a CC0-setting insn.
912 An insn that uses CC0 must not be separated from the one that sets it.
913 We do, however, allow I2 to follow a CC0-setting insn if that insn
914 is passed as I1; in that case it will be deleted also.
915 We also allow combining in this case if all the insns are adjacent
916 because that would leave the two CC0 insns adjacent as well.
917 It would be more logical to test whether CC0 occurs inside I1 or I2,
918 but that would be much slower, and this ought to be equivalent. */
919
920 p = prev_nonnote_insn (insn);
921 if (p && p != pred && GET_CODE (p) == INSN && sets_cc0_p (PATTERN (p))
922 && ! all_adjacent)
923 return 0;
924#endif
925
926 /* If we get here, we have passed all the tests and the combination is
927 to be allowed. */
928
929 *pdest = dest;
930 *psrc = src;
931
932 return 1;
933}
934\f
935/* LOC is the location within I3 that contains its pattern or the component
936 of a PARALLEL of the pattern. We validate that it is valid for combining.
937
938 One problem is if I3 modifies its output, as opposed to replacing it
939 entirely, we can't allow the output to contain I2DEST or I1DEST as doing
940 so would produce an insn that is not equivalent to the original insns.
941
942 Consider:
943
944 (set (reg:DI 101) (reg:DI 100))
945 (set (subreg:SI (reg:DI 101) 0) <foo>)
946
947 This is NOT equivalent to:
948
949 (parallel [(set (subreg:SI (reg:DI 100) 0) <foo>)
950 (set (reg:DI 101) (reg:DI 100))])
951
952 Not only does this modify 100 (in which case it might still be valid
953 if 100 were dead in I2), it sets 101 to the ORIGINAL value of 100.
954
955 We can also run into a problem if I2 sets a register that I1
956 uses and I1 gets directly substituted into I3 (not via I2). In that
957 case, we would be getting the wrong value of I2DEST into I3, so we
958 must reject the combination. This case occurs when I2 and I1 both
959 feed into I3, rather than when I1 feeds into I2, which feeds into I3.
960 If I1_NOT_IN_SRC is non-zero, it means that finding I1 in the source
961 of a SET must prevent combination from occurring.
962
963 On machines where SMALL_REGISTER_CLASSES is defined, we don't combine
964 if the destination of a SET is a hard register.
965
966 Before doing the above check, we first try to expand a field assignment
967 into a set of logical operations.
968
969 If PI3_DEST_KILLED is non-zero, it is a pointer to a location in which
970 we place a register that is both set and used within I3. If more than one
971 such register is detected, we fail.
972
973 Return 1 if the combination is valid, zero otherwise. */
974
975static int
976combinable_i3pat (i3, loc, i2dest, i1dest, i1_not_in_src, pi3dest_killed)
977 rtx i3;
978 rtx *loc;
979 rtx i2dest;
980 rtx i1dest;
981 int i1_not_in_src;
982 rtx *pi3dest_killed;
983{
984 rtx x = *loc;
985
986 if (GET_CODE (x) == SET)
987 {
988 rtx set = expand_field_assignment (x);
989 rtx dest = SET_DEST (set);
990 rtx src = SET_SRC (set);
991 rtx inner_dest = dest, inner_src = src;
992
993 SUBST (*loc, set);
994
995 while (GET_CODE (inner_dest) == STRICT_LOW_PART
996 || GET_CODE (inner_dest) == SUBREG
997 || GET_CODE (inner_dest) == ZERO_EXTRACT)
998 inner_dest = XEXP (inner_dest, 0);
999
1000 /* We probably don't need this any more now that LIMIT_RELOAD_CLASS
1001 was added. */
1002#if 0
1003 while (GET_CODE (inner_src) == STRICT_LOW_PART
1004 || GET_CODE (inner_src) == SUBREG
1005 || GET_CODE (inner_src) == ZERO_EXTRACT)
1006 inner_src = XEXP (inner_src, 0);
1007
1008 /* If it is better that two different modes keep two different pseudos,
1009 avoid combining them. This avoids producing the following pattern
1010 on a 386:
1011 (set (subreg:SI (reg/v:QI 21) 0)
1012 (lshiftrt:SI (reg/v:SI 20)
1013 (const_int 24)))
1014 If that were made, reload could not handle the pair of
1015 reg 20/21, since it would try to get any GENERAL_REGS
1016 but some of them don't handle QImode. */
1017
1018 if (rtx_equal_p (inner_src, i2dest)
1019 && GET_CODE (inner_dest) == REG
1020 && ! MODES_TIEABLE_P (GET_MODE (i2dest), GET_MODE (inner_dest)))
1021 return 0;
1022#endif
1023
1024 /* Check for the case where I3 modifies its output, as
1025 discussed above. */
1026 if ((inner_dest != dest
1027 && (reg_overlap_mentioned_p (i2dest, inner_dest)
1028 || (i1dest && reg_overlap_mentioned_p (i1dest, inner_dest))))
3f508eca
RK
1029 /* This is the same test done in can_combine_p except that we
1030 allow a hard register with SMALL_REGISTER_CLASSES if SRC is a
1031 CALL operation. */
230d793d 1032 || (GET_CODE (inner_dest) == REG
dfbe1b2f 1033 && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER
3f508eca
RK
1034#ifdef SMALL_REGISTER_CLASSES
1035 && GET_CODE (src) != CALL
1036#else
dfbe1b2f
RK
1037 && ! HARD_REGNO_MODE_OK (REGNO (inner_dest),
1038 GET_MODE (inner_dest))
230d793d 1039#endif
dfbe1b2f
RK
1040 )
1041
230d793d
RS
1042 || (i1_not_in_src && reg_overlap_mentioned_p (i1dest, src)))
1043 return 0;
1044
1045 /* If DEST is used in I3, it is being killed in this insn,
1046 so record that for later. */
1047 if (pi3dest_killed && GET_CODE (dest) == REG
1048 && reg_referenced_p (dest, PATTERN (i3)))
1049 {
1050 if (*pi3dest_killed)
1051 return 0;
1052
1053 *pi3dest_killed = dest;
1054 }
1055 }
1056
1057 else if (GET_CODE (x) == PARALLEL)
1058 {
1059 int i;
1060
1061 for (i = 0; i < XVECLEN (x, 0); i++)
1062 if (! combinable_i3pat (i3, &XVECEXP (x, 0, i), i2dest, i1dest,
1063 i1_not_in_src, pi3dest_killed))
1064 return 0;
1065 }
1066
1067 return 1;
1068}
1069\f
1070/* Try to combine the insns I1 and I2 into I3.
1071 Here I1 and I2 appear earlier than I3.
1072 I1 can be zero; then we combine just I2 into I3.
1073
1074 It we are combining three insns and the resulting insn is not recognized,
1075 try splitting it into two insns. If that happens, I2 and I3 are retained
1076 and I1 is pseudo-deleted by turning it into a NOTE. Otherwise, I1 and I2
1077 are pseudo-deleted.
1078
1079 If we created two insns, return I2; otherwise return I3.
1080 Return 0 if the combination does not work. Then nothing is changed. */
1081
1082static rtx
1083try_combine (i3, i2, i1)
1084 register rtx i3, i2, i1;
1085{
1086 /* New patterns for I3 and I3, respectively. */
1087 rtx newpat, newi2pat = 0;
1088 /* Indicates need to preserve SET in I1 or I2 in I3 if it is not dead. */
1089 int added_sets_1, added_sets_2;
1090 /* Total number of SETs to put into I3. */
1091 int total_sets;
1092 /* Nonzero is I2's body now appears in I3. */
1093 int i2_is_used;
1094 /* INSN_CODEs for new I3, new I2, and user of condition code. */
1095 int insn_code_number, i2_code_number, other_code_number;
1096 /* Contains I3 if the destination of I3 is used in its source, which means
1097 that the old life of I3 is being killed. If that usage is placed into
1098 I2 and not in I3, a REG_DEAD note must be made. */
1099 rtx i3dest_killed = 0;
1100 /* SET_DEST and SET_SRC of I2 and I1. */
1101 rtx i2dest, i2src, i1dest = 0, i1src = 0;
1102 /* PATTERN (I2), or a copy of it in certain cases. */
1103 rtx i2pat;
1104 /* Indicates if I2DEST or I1DEST is in I2SRC or I1_SRC. */
1105 int i2dest_in_i2src, i1dest_in_i1src = 0, i2dest_in_i1src = 0;
1106 int i1_feeds_i3 = 0;
1107 /* Notes that must be added to REG_NOTES in I3 and I2. */
1108 rtx new_i3_notes, new_i2_notes;
1109
1110 int maxreg;
1111 rtx temp;
1112 register rtx link;
1113 int i;
1114
1115 /* If any of I1, I2, and I3 isn't really an insn, we can't do anything.
1116 This can occur when flow deletes an insn that it has merged into an
1117 auto-increment address. We also can't do anything if I3 has a
1118 REG_LIBCALL note since we don't want to disrupt the contiguity of a
1119 libcall. */
1120
1121 if (GET_RTX_CLASS (GET_CODE (i3)) != 'i'
1122 || GET_RTX_CLASS (GET_CODE (i2)) != 'i'
1123 || (i1 && GET_RTX_CLASS (GET_CODE (i1)) != 'i')
5f4f0e22 1124 || find_reg_note (i3, REG_LIBCALL, NULL_RTX))
230d793d
RS
1125 return 0;
1126
1127 combine_attempts++;
1128
1129 undobuf.num_undo = previous_num_undos = 0;
1130 undobuf.other_insn = 0;
1131
1132 /* Save the current high-water-mark so we can free storage if we didn't
1133 accept this combination. */
1134 undobuf.storage = (char *) oballoc (0);
1135
1136 /* If I1 and I2 both feed I3, they can be in any order. To simplify the
1137 code below, set I1 to be the earlier of the two insns. */
1138 if (i1 && INSN_CUID (i1) > INSN_CUID (i2))
1139 temp = i1, i1 = i2, i2 = temp;
1140
1141 /* First check for one important special-case that the code below will
1142 not handle. Namely, the case where I1 is zero, I2 has multiple sets,
1143 and I3 is a SET whose SET_SRC is a SET_DEST in I2. In that case,
1144 we may be able to replace that destination with the destination of I3.
1145 This occurs in the common code where we compute both a quotient and
1146 remainder into a structure, in which case we want to do the computation
1147 directly into the structure to avoid register-register copies.
1148
1149 We make very conservative checks below and only try to handle the
1150 most common cases of this. For example, we only handle the case
1151 where I2 and I3 are adjacent to avoid making difficult register
1152 usage tests. */
1153
1154 if (i1 == 0 && GET_CODE (i3) == INSN && GET_CODE (PATTERN (i3)) == SET
1155 && GET_CODE (SET_SRC (PATTERN (i3))) == REG
1156 && REGNO (SET_SRC (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER
1157#ifdef SMALL_REGISTER_CLASSES
1158 && (GET_CODE (SET_DEST (PATTERN (i3))) != REG
1159 || REGNO (SET_DEST (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER)
1160#endif
1161 && find_reg_note (i3, REG_DEAD, SET_SRC (PATTERN (i3)))
1162 && GET_CODE (PATTERN (i2)) == PARALLEL
1163 && ! side_effects_p (SET_DEST (PATTERN (i3)))
5089e22e
RS
1164 /* If the dest of I3 is a ZERO_EXTRACT or STRICT_LOW_PART, the code
1165 below would need to check what is inside (and reg_overlap_mentioned_p
1166 doesn't support those codes anyway). Don't allow those destinations;
1167 the resulting insn isn't likely to be recognized anyway. */
1168 && GET_CODE (SET_DEST (PATTERN (i3))) != ZERO_EXTRACT
1169 && GET_CODE (SET_DEST (PATTERN (i3))) != STRICT_LOW_PART
230d793d
RS
1170 && ! reg_overlap_mentioned_p (SET_SRC (PATTERN (i3)),
1171 SET_DEST (PATTERN (i3)))
1172 && next_real_insn (i2) == i3)
5089e22e
RS
1173 {
1174 rtx p2 = PATTERN (i2);
1175
1176 /* Make sure that the destination of I3,
1177 which we are going to substitute into one output of I2,
1178 is not used within another output of I2. We must avoid making this:
1179 (parallel [(set (mem (reg 69)) ...)
1180 (set (reg 69) ...)])
1181 which is not well-defined as to order of actions.
1182 (Besides, reload can't handle output reloads for this.)
1183
1184 The problem can also happen if the dest of I3 is a memory ref,
1185 if another dest in I2 is an indirect memory ref. */
1186 for (i = 0; i < XVECLEN (p2, 0); i++)
1187 if (GET_CODE (XVECEXP (p2, 0, i)) == SET
1188 && reg_overlap_mentioned_p (SET_DEST (PATTERN (i3)),
1189 SET_DEST (XVECEXP (p2, 0, i))))
1190 break;
230d793d 1191
5089e22e
RS
1192 if (i == XVECLEN (p2, 0))
1193 for (i = 0; i < XVECLEN (p2, 0); i++)
1194 if (SET_DEST (XVECEXP (p2, 0, i)) == SET_SRC (PATTERN (i3)))
1195 {
1196 combine_merges++;
230d793d 1197
5089e22e
RS
1198 subst_insn = i3;
1199 subst_low_cuid = INSN_CUID (i2);
230d793d 1200
5089e22e
RS
1201 added_sets_2 = 0;
1202 i2dest = SET_SRC (PATTERN (i3));
230d793d 1203
5089e22e
RS
1204 /* Replace the dest in I2 with our dest and make the resulting
1205 insn the new pattern for I3. Then skip to where we
1206 validate the pattern. Everything was set up above. */
1207 SUBST (SET_DEST (XVECEXP (p2, 0, i)),
1208 SET_DEST (PATTERN (i3)));
1209
1210 newpat = p2;
1211 goto validate_replacement;
1212 }
1213 }
230d793d
RS
1214
1215#ifndef HAVE_cc0
1216 /* If we have no I1 and I2 looks like:
1217 (parallel [(set (reg:CC X) (compare:CC OP (const_int 0)))
1218 (set Y OP)])
1219 make up a dummy I1 that is
1220 (set Y OP)
1221 and change I2 to be
1222 (set (reg:CC X) (compare:CC Y (const_int 0)))
1223
1224 (We can ignore any trailing CLOBBERs.)
1225
1226 This undoes a previous combination and allows us to match a branch-and-
1227 decrement insn. */
1228
1229 if (i1 == 0 && GET_CODE (PATTERN (i2)) == PARALLEL
1230 && XVECLEN (PATTERN (i2), 0) >= 2
1231 && GET_CODE (XVECEXP (PATTERN (i2), 0, 0)) == SET
1232 && (GET_MODE_CLASS (GET_MODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 0))))
1233 == MODE_CC)
1234 && GET_CODE (SET_SRC (XVECEXP (PATTERN (i2), 0, 0))) == COMPARE
1235 && XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 1) == const0_rtx
1236 && GET_CODE (XVECEXP (PATTERN (i2), 0, 1)) == SET
1237 && GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 1))) == REG
1238 && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 0),
1239 SET_SRC (XVECEXP (PATTERN (i2), 0, 1))))
1240 {
1241 for (i = XVECLEN (PATTERN (i2), 0) - 1; i >= 2; i--)
1242 if (GET_CODE (XVECEXP (PATTERN (i2), 0, i)) != CLOBBER)
1243 break;
1244
1245 if (i == 1)
1246 {
1247 /* We make I1 with the same INSN_UID as I2. This gives it
1248 the same INSN_CUID for value tracking. Our fake I1 will
1249 never appear in the insn stream so giving it the same INSN_UID
1250 as I2 will not cause a problem. */
1251
1252 i1 = gen_rtx (INSN, VOIDmode, INSN_UID (i2), 0, i2,
1253 XVECEXP (PATTERN (i2), 0, 1), -1, 0, 0);
1254
1255 SUBST (PATTERN (i2), XVECEXP (PATTERN (i2), 0, 0));
1256 SUBST (XEXP (SET_SRC (PATTERN (i2)), 0),
1257 SET_DEST (PATTERN (i1)));
1258 }
1259 }
1260#endif
1261
1262 /* Verify that I2 and I1 are valid for combining. */
5f4f0e22
CH
1263 if (! can_combine_p (i2, i3, i1, NULL_RTX, &i2dest, &i2src)
1264 || (i1 && ! can_combine_p (i1, i3, NULL_RTX, i2, &i1dest, &i1src)))
230d793d
RS
1265 {
1266 undo_all ();
1267 return 0;
1268 }
1269
1270 /* Record whether I2DEST is used in I2SRC and similarly for the other
1271 cases. Knowing this will help in register status updating below. */
1272 i2dest_in_i2src = reg_overlap_mentioned_p (i2dest, i2src);
1273 i1dest_in_i1src = i1 && reg_overlap_mentioned_p (i1dest, i1src);
1274 i2dest_in_i1src = i1 && reg_overlap_mentioned_p (i2dest, i1src);
1275
916f14f1 1276 /* See if I1 directly feeds into I3. It does if I1DEST is not used
230d793d
RS
1277 in I2SRC. */
1278 i1_feeds_i3 = i1 && ! reg_overlap_mentioned_p (i1dest, i2src);
1279
1280 /* Ensure that I3's pattern can be the destination of combines. */
1281 if (! combinable_i3pat (i3, &PATTERN (i3), i2dest, i1dest,
1282 i1 && i2dest_in_i1src && i1_feeds_i3,
1283 &i3dest_killed))
1284 {
1285 undo_all ();
1286 return 0;
1287 }
1288
1289 /* If I3 has an inc, then give up if I1 or I2 uses the reg that is inc'd.
1290 We used to do this EXCEPT in one case: I3 has a post-inc in an
1291 output operand. However, that exception can give rise to insns like
1292 mov r3,(r3)+
1293 which is a famous insn on the PDP-11 where the value of r3 used as the
5089e22e 1294 source was model-dependent. Avoid this sort of thing. */
230d793d
RS
1295
1296#if 0
1297 if (!(GET_CODE (PATTERN (i3)) == SET
1298 && GET_CODE (SET_SRC (PATTERN (i3))) == REG
1299 && GET_CODE (SET_DEST (PATTERN (i3))) == MEM
1300 && (GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_INC
1301 || GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_DEC)))
1302 /* It's not the exception. */
1303#endif
1304#ifdef AUTO_INC_DEC
1305 for (link = REG_NOTES (i3); link; link = XEXP (link, 1))
1306 if (REG_NOTE_KIND (link) == REG_INC
1307 && (reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i2))
1308 || (i1 != 0
1309 && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i1)))))
1310 {
1311 undo_all ();
1312 return 0;
1313 }
1314#endif
1315
1316 /* See if the SETs in I1 or I2 need to be kept around in the merged
1317 instruction: whenever the value set there is still needed past I3.
1318 For the SETs in I2, this is easy: we see if I2DEST dies or is set in I3.
1319
1320 For the SET in I1, we have two cases: If I1 and I2 independently
1321 feed into I3, the set in I1 needs to be kept around if I1DEST dies
1322 or is set in I3. Otherwise (if I1 feeds I2 which feeds I3), the set
1323 in I1 needs to be kept around unless I1DEST dies or is set in either
1324 I2 or I3. We can distinguish these cases by seeing if I2SRC mentions
1325 I1DEST. If so, we know I1 feeds into I2. */
1326
1327 added_sets_2 = ! dead_or_set_p (i3, i2dest);
1328
1329 added_sets_1
1330 = i1 && ! (i1_feeds_i3 ? dead_or_set_p (i3, i1dest)
1331 : (dead_or_set_p (i3, i1dest) || dead_or_set_p (i2, i1dest)));
1332
1333 /* If the set in I2 needs to be kept around, we must make a copy of
1334 PATTERN (I2), so that when we substitute I1SRC for I1DEST in
5089e22e 1335 PATTERN (I2), we are only substituting for the original I1DEST, not into
230d793d
RS
1336 an already-substituted copy. This also prevents making self-referential
1337 rtx. If I2 is a PARALLEL, we just need the piece that assigns I2SRC to
1338 I2DEST. */
1339
1340 i2pat = (GET_CODE (PATTERN (i2)) == PARALLEL
1341 ? gen_rtx (SET, VOIDmode, i2dest, i2src)
1342 : PATTERN (i2));
1343
1344 if (added_sets_2)
1345 i2pat = copy_rtx (i2pat);
1346
1347 combine_merges++;
1348
1349 /* Substitute in the latest insn for the regs set by the earlier ones. */
1350
1351 maxreg = max_reg_num ();
1352
1353 subst_insn = i3;
230d793d
RS
1354
1355 /* It is possible that the source of I2 or I1 may be performing an
1356 unneeded operation, such as a ZERO_EXTEND of something that is known
1357 to have the high part zero. Handle that case by letting subst look at
1358 the innermost one of them.
1359
1360 Another way to do this would be to have a function that tries to
1361 simplify a single insn instead of merging two or more insns. We don't
1362 do this because of the potential of infinite loops and because
1363 of the potential extra memory required. However, doing it the way
1364 we are is a bit of a kludge and doesn't catch all cases.
1365
1366 But only do this if -fexpensive-optimizations since it slows things down
1367 and doesn't usually win. */
1368
1369 if (flag_expensive_optimizations)
1370 {
1371 /* Pass pc_rtx so no substitutions are done, just simplifications.
1372 The cases that we are interested in here do not involve the few
1373 cases were is_replaced is checked. */
1374 if (i1)
d0ab8cd3
RK
1375 {
1376 subst_low_cuid = INSN_CUID (i1);
1377 i1src = subst (i1src, pc_rtx, pc_rtx, 0, 0);
1378 }
230d793d 1379 else
d0ab8cd3
RK
1380 {
1381 subst_low_cuid = INSN_CUID (i2);
1382 i2src = subst (i2src, pc_rtx, pc_rtx, 0, 0);
1383 }
230d793d
RS
1384
1385 previous_num_undos = undobuf.num_undo;
1386 }
1387
1388#ifndef HAVE_cc0
1389 /* Many machines that don't use CC0 have insns that can both perform an
1390 arithmetic operation and set the condition code. These operations will
1391 be represented as a PARALLEL with the first element of the vector
1392 being a COMPARE of an arithmetic operation with the constant zero.
1393 The second element of the vector will set some pseudo to the result
1394 of the same arithmetic operation. If we simplify the COMPARE, we won't
1395 match such a pattern and so will generate an extra insn. Here we test
1396 for this case, where both the comparison and the operation result are
1397 needed, and make the PARALLEL by just replacing I2DEST in I3SRC with
1398 I2SRC. Later we will make the PARALLEL that contains I2. */
1399
1400 if (i1 == 0 && added_sets_2 && GET_CODE (PATTERN (i3)) == SET
1401 && GET_CODE (SET_SRC (PATTERN (i3))) == COMPARE
1402 && XEXP (SET_SRC (PATTERN (i3)), 1) == const0_rtx
1403 && rtx_equal_p (XEXP (SET_SRC (PATTERN (i3)), 0), i2dest))
1404 {
1405 rtx *cc_use;
1406 enum machine_mode compare_mode;
1407
1408 newpat = PATTERN (i3);
1409 SUBST (XEXP (SET_SRC (newpat), 0), i2src);
1410
1411 i2_is_used = 1;
1412
1413#ifdef EXTRA_CC_MODES
1414 /* See if a COMPARE with the operand we substituted in should be done
1415 with the mode that is currently being used. If not, do the same
1416 processing we do in `subst' for a SET; namely, if the destination
1417 is used only once, try to replace it with a register of the proper
1418 mode and also replace the COMPARE. */
1419 if (undobuf.other_insn == 0
1420 && (cc_use = find_single_use (SET_DEST (newpat), i3,
1421 &undobuf.other_insn))
77fa0940
RK
1422 && ((compare_mode = SELECT_CC_MODE (GET_CODE (*cc_use),
1423 i2src, const0_rtx))
230d793d
RS
1424 != GET_MODE (SET_DEST (newpat))))
1425 {
1426 int regno = REGNO (SET_DEST (newpat));
1427 rtx new_dest = gen_rtx (REG, compare_mode, regno);
1428
1429 if (regno < FIRST_PSEUDO_REGISTER
1430 || (reg_n_sets[regno] == 1 && ! added_sets_2
1431 && ! REG_USERVAR_P (SET_DEST (newpat))))
1432 {
1433 if (regno >= FIRST_PSEUDO_REGISTER)
1434 SUBST (regno_reg_rtx[regno], new_dest);
1435
1436 SUBST (SET_DEST (newpat), new_dest);
1437 SUBST (XEXP (*cc_use, 0), new_dest);
1438 SUBST (SET_SRC (newpat),
1439 gen_rtx_combine (COMPARE, compare_mode,
1440 i2src, const0_rtx));
1441 }
1442 else
1443 undobuf.other_insn = 0;
1444 }
1445#endif
1446 }
1447 else
1448#endif
1449 {
1450 n_occurrences = 0; /* `subst' counts here */
1451
1452 /* If I1 feeds into I2 (not into I3) and I1DEST is in I1SRC, we
1453 need to make a unique copy of I2SRC each time we substitute it
1454 to avoid self-referential rtl. */
1455
d0ab8cd3 1456 subst_low_cuid = INSN_CUID (i2);
230d793d
RS
1457 newpat = subst (PATTERN (i3), i2dest, i2src, 0,
1458 ! i1_feeds_i3 && i1dest_in_i1src);
1459 previous_num_undos = undobuf.num_undo;
1460
1461 /* Record whether i2's body now appears within i3's body. */
1462 i2_is_used = n_occurrences;
1463 }
1464
1465 /* If we already got a failure, don't try to do more. Otherwise,
1466 try to substitute in I1 if we have it. */
1467
1468 if (i1 && GET_CODE (newpat) != CLOBBER)
1469 {
1470 /* Before we can do this substitution, we must redo the test done
1471 above (see detailed comments there) that ensures that I1DEST
1472 isn't mentioned in any SETs in NEWPAT that are field assignments. */
1473
5f4f0e22
CH
1474 if (! combinable_i3pat (NULL_RTX, &newpat, i1dest, NULL_RTX,
1475 0, NULL_PTR))
230d793d
RS
1476 {
1477 undo_all ();
1478 return 0;
1479 }
1480
1481 n_occurrences = 0;
d0ab8cd3 1482 subst_low_cuid = INSN_CUID (i1);
230d793d
RS
1483 newpat = subst (newpat, i1dest, i1src, 0, 0);
1484 previous_num_undos = undobuf.num_undo;
1485 }
1486
916f14f1
RK
1487 /* Fail if an autoincrement side-effect has been duplicated. Be careful
1488 to count all the ways that I2SRC and I1SRC can be used. */
5f4f0e22 1489 if ((FIND_REG_INC_NOTE (i2, NULL_RTX) != 0
916f14f1 1490 && i2_is_used + added_sets_2 > 1)
5f4f0e22 1491 || (i1 != 0 && FIND_REG_INC_NOTE (i1, NULL_RTX) != 0
916f14f1
RK
1492 && (n_occurrences + added_sets_1 + (added_sets_2 && ! i1_feeds_i3)
1493 > 1))
230d793d
RS
1494 /* Fail if we tried to make a new register (we used to abort, but there's
1495 really no reason to). */
1496 || max_reg_num () != maxreg
1497 /* Fail if we couldn't do something and have a CLOBBER. */
1498 || GET_CODE (newpat) == CLOBBER)
1499 {
1500 undo_all ();
1501 return 0;
1502 }
1503
1504 /* If the actions of the earlier insns must be kept
1505 in addition to substituting them into the latest one,
1506 we must make a new PARALLEL for the latest insn
1507 to hold additional the SETs. */
1508
1509 if (added_sets_1 || added_sets_2)
1510 {
1511 combine_extras++;
1512
1513 if (GET_CODE (newpat) == PARALLEL)
1514 {
1515 rtvec old = XVEC (newpat, 0);
1516 total_sets = XVECLEN (newpat, 0) + added_sets_1 + added_sets_2;
1517 newpat = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (total_sets));
1518 bcopy (&old->elem[0], &XVECEXP (newpat, 0, 0),
1519 sizeof (old->elem[0]) * old->num_elem);
1520 }
1521 else
1522 {
1523 rtx old = newpat;
1524 total_sets = 1 + added_sets_1 + added_sets_2;
1525 newpat = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (total_sets));
1526 XVECEXP (newpat, 0, 0) = old;
1527 }
1528
1529 if (added_sets_1)
1530 XVECEXP (newpat, 0, --total_sets)
1531 = (GET_CODE (PATTERN (i1)) == PARALLEL
1532 ? gen_rtx (SET, VOIDmode, i1dest, i1src) : PATTERN (i1));
1533
1534 if (added_sets_2)
1535 {
1536 /* If there is no I1, use I2's body as is. We used to also not do
1537 the subst call below if I2 was substituted into I3,
1538 but that could lose a simplification. */
1539 if (i1 == 0)
1540 XVECEXP (newpat, 0, --total_sets) = i2pat;
1541 else
1542 /* See comment where i2pat is assigned. */
1543 XVECEXP (newpat, 0, --total_sets)
1544 = subst (i2pat, i1dest, i1src, 0, 0);
1545 }
1546 }
1547
1548 /* We come here when we are replacing a destination in I2 with the
1549 destination of I3. */
1550 validate_replacement:
1551
1552 /* Is the result of combination a valid instruction? */
1553 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1554
1555 /* If the result isn't valid, see if it is a PARALLEL of two SETs where
1556 the second SET's destination is a register that is unused. In that case,
1557 we just need the first SET. This can occur when simplifying a divmod
1558 insn. We *must* test for this case here because the code below that
1559 splits two independent SETs doesn't handle this case correctly when it
1560 updates the register status. Also check the case where the first
1561 SET's destination is unused. That would not cause incorrect code, but
1562 does cause an unneeded insn to remain. */
1563
1564 if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
1565 && XVECLEN (newpat, 0) == 2
1566 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1567 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1568 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == REG
1569 && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 1)))
1570 && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 1)))
1571 && asm_noperands (newpat) < 0)
1572 {
1573 newpat = XVECEXP (newpat, 0, 0);
1574 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1575 }
1576
1577 else if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
1578 && XVECLEN (newpat, 0) == 2
1579 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1580 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1581 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) == REG
1582 && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 0)))
1583 && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 0)))
1584 && asm_noperands (newpat) < 0)
1585 {
1586 newpat = XVECEXP (newpat, 0, 1);
1587 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1588 }
1589
d0ab8cd3
RK
1590 /* See if this is an XOR. If so, perhaps the problem is that the
1591 constant is out of range. Replace it with a complemented XOR with
1592 a complemented constant; it might be in range. */
1593
1594 else if (insn_code_number < 0 && GET_CODE (newpat) == SET
1595 && GET_CODE (SET_SRC (newpat)) == XOR
1596 && GET_CODE (XEXP (SET_SRC (newpat), 1)) == CONST_INT
1597 && ((temp = simplify_unary_operation (NOT,
1598 GET_MODE (SET_SRC (newpat)),
1599 XEXP (SET_SRC (newpat), 1),
1600 GET_MODE (SET_SRC (newpat))))
1601 != 0))
1602 {
1603 enum machine_mode i_mode = GET_MODE (SET_SRC (newpat));
1604 rtx pat
1605 = gen_rtx_combine (SET, VOIDmode, SET_DEST (newpat),
1606 gen_unary (NOT, i_mode,
1607 gen_binary (XOR, i_mode,
1608 XEXP (SET_SRC (newpat), 0),
1609 temp)));
1610
1611 insn_code_number = recog_for_combine (&pat, i3, &new_i3_notes);
1612 if (insn_code_number >= 0)
1613 newpat = pat;
1614 }
1615
230d793d
RS
1616 /* If we were combining three insns and the result is a simple SET
1617 with no ASM_OPERANDS that wasn't recognized, try to split it into two
916f14f1
RK
1618 insns. There are two ways to do this. It can be split using a
1619 machine-specific method (like when you have an addition of a large
1620 constant) or by combine in the function find_split_point. */
1621
230d793d
RS
1622 if (i1 && insn_code_number < 0 && GET_CODE (newpat) == SET
1623 && asm_noperands (newpat) < 0)
1624 {
916f14f1 1625 rtx m_split, *split;
42495ca0 1626 rtx ni2dest = i2dest;
916f14f1
RK
1627
1628 /* See if the MD file can split NEWPAT. If it can't, see if letting it
42495ca0
RK
1629 use I2DEST as a scratch register will help. In the latter case,
1630 convert I2DEST to the mode of the source of NEWPAT if we can. */
916f14f1
RK
1631
1632 m_split = split_insns (newpat, i3);
a70c61d9
JW
1633
1634 /* We can only use I2DEST as a scratch reg if it doesn't overlap any
1635 inputs of NEWPAT. */
1636
1637 /* ??? If I2DEST is not safe, and I1DEST exists, then it would be
1638 possible to try that as a scratch reg. This would require adding
1639 more code to make it work though. */
1640
1641 if (m_split == 0 && ! reg_overlap_mentioned_p (ni2dest, newpat))
42495ca0
RK
1642 {
1643 /* If I2DEST is a hard register or the only use of a pseudo,
1644 we can change its mode. */
1645 if (GET_MODE (SET_DEST (newpat)) != GET_MODE (i2dest)
02f4ada4 1646 && GET_MODE (SET_DEST (newpat)) != VOIDmode
60654f77 1647 && GET_CODE (i2dest) == REG
42495ca0
RK
1648 && (REGNO (i2dest) < FIRST_PSEUDO_REGISTER
1649 || (reg_n_sets[REGNO (i2dest)] == 1 && ! added_sets_2
1650 && ! REG_USERVAR_P (i2dest))))
1651 ni2dest = gen_rtx (REG, GET_MODE (SET_DEST (newpat)),
1652 REGNO (i2dest));
1653
1654 m_split = split_insns (gen_rtx (PARALLEL, VOIDmode,
1655 gen_rtvec (2, newpat,
1656 gen_rtx (CLOBBER,
1657 VOIDmode,
1658 ni2dest))),
1659 i3);
1660 }
916f14f1
RK
1661
1662 if (m_split && GET_CODE (m_split) == SEQUENCE
3f508eca
RK
1663 && XVECLEN (m_split, 0) == 2
1664 && (next_real_insn (i2) == i3
1665 || ! use_crosses_set_p (PATTERN (XVECEXP (m_split, 0, 0)),
1666 INSN_CUID (i2))))
916f14f1 1667 {
1a26b032 1668 rtx i2set, i3set;
d0ab8cd3 1669 rtx newi3pat = PATTERN (XVECEXP (m_split, 0, 1));
916f14f1 1670 newi2pat = PATTERN (XVECEXP (m_split, 0, 0));
916f14f1 1671
e4ba89be
RK
1672 i3set = single_set (XVECEXP (m_split, 0, 1));
1673 i2set = single_set (XVECEXP (m_split, 0, 0));
1a26b032 1674
42495ca0
RK
1675 /* In case we changed the mode of I2DEST, replace it in the
1676 pseudo-register table here. We can't do it above in case this
1677 code doesn't get executed and we do a split the other way. */
1678
1679 if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
1680 SUBST (regno_reg_rtx[REGNO (i2dest)], ni2dest);
1681
916f14f1 1682 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
1a26b032
RK
1683
1684 /* If I2 or I3 has multiple SETs, we won't know how to track
1685 register status, so don't use these insns. */
1686
1687 if (i2_code_number >= 0 && i2set && i3set)
8888fada
RK
1688 insn_code_number = recog_for_combine (&newi3pat, i3,
1689 &new_i3_notes);
c767f54b 1690
d0ab8cd3
RK
1691 if (insn_code_number >= 0)
1692 newpat = newi3pat;
1693
c767f54b 1694 /* It is possible that both insns now set the destination of I3.
22609cbf 1695 If so, we must show an extra use of it. */
c767f54b 1696
1a26b032
RK
1697 if (insn_code_number >= 0 && GET_CODE (SET_DEST (i3set)) == REG
1698 && GET_CODE (SET_DEST (i2set)) == REG
1699 && REGNO (SET_DEST (i3set)) == REGNO (SET_DEST (i2set)))
22609cbf 1700 reg_n_sets[REGNO (SET_DEST (i2set))]++;
916f14f1 1701 }
230d793d
RS
1702
1703 /* If we can split it and use I2DEST, go ahead and see if that
1704 helps things be recognized. Verify that none of the registers
1705 are set between I2 and I3. */
d0ab8cd3 1706 if (insn_code_number < 0 && (split = find_split_point (&newpat, i3)) != 0
230d793d
RS
1707#ifdef HAVE_cc0
1708 && GET_CODE (i2dest) == REG
1709#endif
1710 /* We need I2DEST in the proper mode. If it is a hard register
1711 or the only use of a pseudo, we can change its mode. */
1712 && (GET_MODE (*split) == GET_MODE (i2dest)
1713 || GET_MODE (*split) == VOIDmode
1714 || REGNO (i2dest) < FIRST_PSEUDO_REGISTER
1715 || (reg_n_sets[REGNO (i2dest)] == 1 && ! added_sets_2
1716 && ! REG_USERVAR_P (i2dest)))
1717 && (next_real_insn (i2) == i3
1718 || ! use_crosses_set_p (*split, INSN_CUID (i2)))
1719 /* We can't overwrite I2DEST if its value is still used by
1720 NEWPAT. */
1721 && ! reg_referenced_p (i2dest, newpat))
1722 {
1723 rtx newdest = i2dest;
1724
1725 /* Get NEWDEST as a register in the proper mode. We have already
1726 validated that we can do this. */
1727 if (GET_MODE (i2dest) != GET_MODE (*split)
1728 && GET_MODE (*split) != VOIDmode)
1729 {
1730 newdest = gen_rtx (REG, GET_MODE (*split), REGNO (i2dest));
1731
1732 if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
1733 SUBST (regno_reg_rtx[REGNO (i2dest)], newdest);
1734 }
1735
1736 /* If *SPLIT is a (mult FOO (const_int pow2)), convert it to
1737 an ASHIFT. This can occur if it was inside a PLUS and hence
1738 appeared to be a memory address. This is a kludge. */
1739 if (GET_CODE (*split) == MULT
1740 && GET_CODE (XEXP (*split, 1)) == CONST_INT
1741 && (i = exact_log2 (INTVAL (XEXP (*split, 1)))) >= 0)
1742 SUBST (*split, gen_rtx_combine (ASHIFT, GET_MODE (*split),
5f4f0e22 1743 XEXP (*split, 0), GEN_INT (i)));
230d793d
RS
1744
1745#ifdef INSN_SCHEDULING
1746 /* If *SPLIT is a paradoxical SUBREG, when we split it, it should
1747 be written as a ZERO_EXTEND. */
1748 if (GET_CODE (*split) == SUBREG
1749 && GET_CODE (SUBREG_REG (*split)) == MEM)
1750 SUBST (*split, gen_rtx_combine (ZERO_EXTEND, GET_MODE (*split),
1751 XEXP (*split, 0)));
1752#endif
1753
1754 newi2pat = gen_rtx_combine (SET, VOIDmode, newdest, *split);
1755 SUBST (*split, newdest);
1756 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
1757 if (i2_code_number >= 0)
1758 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1759 }
1760 }
1761
1762 /* Check for a case where we loaded from memory in a narrow mode and
1763 then sign extended it, but we need both registers. In that case,
1764 we have a PARALLEL with both loads from the same memory location.
1765 We can split this into a load from memory followed by a register-register
1766 copy. This saves at least one insn, more if register allocation can
1767 eliminate the copy. */
1768
1769 else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
1770 && GET_CODE (newpat) == PARALLEL
1771 && XVECLEN (newpat, 0) == 2
1772 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1773 && GET_CODE (SET_SRC (XVECEXP (newpat, 0, 0))) == SIGN_EXTEND
1774 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1775 && rtx_equal_p (SET_SRC (XVECEXP (newpat, 0, 1)),
1776 XEXP (SET_SRC (XVECEXP (newpat, 0, 0)), 0))
1777 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
1778 INSN_CUID (i2))
1779 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
1780 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
1781 && ! reg_overlap_mentioned_p (SET_DEST (XVECEXP (newpat, 0, 1)),
1782 SET_SRC (XVECEXP (newpat, 0, 1)))
1783 && ! find_reg_note (i3, REG_UNUSED,
1784 SET_DEST (XVECEXP (newpat, 0, 0))))
1785 {
472fbdd1
RK
1786 rtx ni2dest;
1787
230d793d 1788 newi2pat = XVECEXP (newpat, 0, 0);
472fbdd1 1789 ni2dest = SET_DEST (XVECEXP (newpat, 0, 0));
230d793d
RS
1790 newpat = XVECEXP (newpat, 0, 1);
1791 SUBST (SET_SRC (newpat),
472fbdd1 1792 gen_lowpart_for_combine (GET_MODE (SET_SRC (newpat)), ni2dest));
230d793d
RS
1793 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
1794 if (i2_code_number >= 0)
1795 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
5089e22e
RS
1796
1797 if (insn_code_number >= 0)
1798 {
1799 rtx insn;
1800 rtx link;
1801
1802 /* If we will be able to accept this, we have made a change to the
1803 destination of I3. This can invalidate a LOG_LINKS pointing
1804 to I3. No other part of combine.c makes such a transformation.
1805
1806 The new I3 will have a destination that was previously the
1807 destination of I1 or I2 and which was used in i2 or I3. Call
1808 distribute_links to make a LOG_LINK from the next use of
1809 that destination. */
1810
1811 PATTERN (i3) = newpat;
5f4f0e22 1812 distribute_links (gen_rtx (INSN_LIST, VOIDmode, i3, NULL_RTX));
5089e22e
RS
1813
1814 /* I3 now uses what used to be its destination and which is
1815 now I2's destination. That means we need a LOG_LINK from
1816 I3 to I2. But we used to have one, so we still will.
1817
1818 However, some later insn might be using I2's dest and have
1819 a LOG_LINK pointing at I3. We must remove this link.
1820 The simplest way to remove the link is to point it at I1,
1821 which we know will be a NOTE. */
1822
1823 for (insn = NEXT_INSN (i3);
1824 insn && GET_CODE (insn) != CODE_LABEL
1825 && GET_CODE (PREV_INSN (insn)) != JUMP_INSN;
1826 insn = NEXT_INSN (insn))
1827 {
1828 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
472fbdd1 1829 && reg_referenced_p (ni2dest, PATTERN (insn)))
5089e22e
RS
1830 {
1831 for (link = LOG_LINKS (insn); link;
1832 link = XEXP (link, 1))
1833 if (XEXP (link, 0) == i3)
1834 XEXP (link, 0) = i1;
1835
1836 break;
1837 }
1838 }
1839 }
230d793d
RS
1840 }
1841
1842 /* Similarly, check for a case where we have a PARALLEL of two independent
1843 SETs but we started with three insns. In this case, we can do the sets
1844 as two separate insns. This case occurs when some SET allows two
1845 other insns to combine, but the destination of that SET is still live. */
1846
1847 else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
1848 && GET_CODE (newpat) == PARALLEL
1849 && XVECLEN (newpat, 0) == 2
1850 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1851 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != ZERO_EXTRACT
1852 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != STRICT_LOW_PART
1853 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1854 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
1855 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
1856 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
1857 INSN_CUID (i2))
1858 /* Don't pass sets with (USE (MEM ...)) dests to the following. */
1859 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != USE
1860 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != USE
1861 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 1)),
1862 XVECEXP (newpat, 0, 0))
1863 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 0)),
1864 XVECEXP (newpat, 0, 1)))
1865 {
1866 newi2pat = XVECEXP (newpat, 0, 1);
1867 newpat = XVECEXP (newpat, 0, 0);
1868
1869 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
1870 if (i2_code_number >= 0)
1871 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1872 }
1873
1874 /* If it still isn't recognized, fail and change things back the way they
1875 were. */
1876 if ((insn_code_number < 0
1877 /* Is the result a reasonable ASM_OPERANDS? */
1878 && (! check_asm_operands (newpat) || added_sets_1 || added_sets_2)))
1879 {
1880 undo_all ();
1881 return 0;
1882 }
1883
1884 /* If we had to change another insn, make sure it is valid also. */
1885 if (undobuf.other_insn)
1886 {
1887 rtx other_notes = REG_NOTES (undobuf.other_insn);
1888 rtx other_pat = PATTERN (undobuf.other_insn);
1889 rtx new_other_notes;
1890 rtx note, next;
1891
1892 other_code_number = recog_for_combine (&other_pat, undobuf.other_insn,
1893 &new_other_notes);
1894
1895 if (other_code_number < 0 && ! check_asm_operands (other_pat))
1896 {
1897 undo_all ();
1898 return 0;
1899 }
1900
1901 PATTERN (undobuf.other_insn) = other_pat;
1902
1903 /* If any of the notes in OTHER_INSN were REG_UNUSED, ensure that they
1904 are still valid. Then add any non-duplicate notes added by
1905 recog_for_combine. */
1906 for (note = REG_NOTES (undobuf.other_insn); note; note = next)
1907 {
1908 next = XEXP (note, 1);
1909
1910 if (REG_NOTE_KIND (note) == REG_UNUSED
1911 && ! reg_set_p (XEXP (note, 0), PATTERN (undobuf.other_insn)))
1a26b032
RK
1912 {
1913 if (GET_CODE (XEXP (note, 0)) == REG)
1914 reg_n_deaths[REGNO (XEXP (note, 0))]--;
1915
1916 remove_note (undobuf.other_insn, note);
1917 }
230d793d
RS
1918 }
1919
1a26b032
RK
1920 for (note = new_other_notes; note; note = XEXP (note, 1))
1921 if (GET_CODE (XEXP (note, 0)) == REG)
1922 reg_n_deaths[REGNO (XEXP (note, 0))]++;
1923
230d793d 1924 distribute_notes (new_other_notes, undobuf.other_insn,
5f4f0e22 1925 undobuf.other_insn, NULL_RTX, NULL_RTX, NULL_RTX);
230d793d
RS
1926 }
1927
1928 /* We now know that we can do this combination. Merge the insns and
1929 update the status of registers and LOG_LINKS. */
1930
1931 {
1932 rtx i3notes, i2notes, i1notes = 0;
1933 rtx i3links, i2links, i1links = 0;
1934 rtx midnotes = 0;
1935 int all_adjacent = (next_real_insn (i2) == i3
1936 && (i1 == 0 || next_real_insn (i1) == i2));
1937 register int regno;
1938 /* Compute which registers we expect to eliminate. */
1939 rtx elim_i2 = (newi2pat || i2dest_in_i2src || i2dest_in_i1src
1940 ? 0 : i2dest);
1941 rtx elim_i1 = i1 == 0 || i1dest_in_i1src ? 0 : i1dest;
1942
1943 /* Get the old REG_NOTES and LOG_LINKS from all our insns and
1944 clear them. */
1945 i3notes = REG_NOTES (i3), i3links = LOG_LINKS (i3);
1946 i2notes = REG_NOTES (i2), i2links = LOG_LINKS (i2);
1947 if (i1)
1948 i1notes = REG_NOTES (i1), i1links = LOG_LINKS (i1);
1949
1950 /* Ensure that we do not have something that should not be shared but
1951 occurs multiple times in the new insns. Check this by first
5089e22e 1952 resetting all the `used' flags and then copying anything is shared. */
230d793d
RS
1953
1954 reset_used_flags (i3notes);
1955 reset_used_flags (i2notes);
1956 reset_used_flags (i1notes);
1957 reset_used_flags (newpat);
1958 reset_used_flags (newi2pat);
1959 if (undobuf.other_insn)
1960 reset_used_flags (PATTERN (undobuf.other_insn));
1961
1962 i3notes = copy_rtx_if_shared (i3notes);
1963 i2notes = copy_rtx_if_shared (i2notes);
1964 i1notes = copy_rtx_if_shared (i1notes);
1965 newpat = copy_rtx_if_shared (newpat);
1966 newi2pat = copy_rtx_if_shared (newi2pat);
1967 if (undobuf.other_insn)
1968 reset_used_flags (PATTERN (undobuf.other_insn));
1969
1970 INSN_CODE (i3) = insn_code_number;
1971 PATTERN (i3) = newpat;
1972 if (undobuf.other_insn)
1973 INSN_CODE (undobuf.other_insn) = other_code_number;
1974
1975 /* We had one special case above where I2 had more than one set and
1976 we replaced a destination of one of those sets with the destination
1977 of I3. In that case, we have to update LOG_LINKS of insns later
1978 in this basic block. Note that this (expensive) case is rare. */
1979
1980 if (GET_CODE (PATTERN (i2)) == PARALLEL)
1981 for (i = 0; i < XVECLEN (PATTERN (i2), 0); i++)
1982 if (GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, i))) == REG
1983 && SET_DEST (XVECEXP (PATTERN (i2), 0, i)) != i2dest
1984 && ! find_reg_note (i2, REG_UNUSED,
1985 SET_DEST (XVECEXP (PATTERN (i2), 0, i))))
1986 {
1987 register rtx insn;
1988
1989 for (insn = NEXT_INSN (i2); insn; insn = NEXT_INSN (insn))
1990 {
1991 if (insn != i3 && GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1992 for (link = LOG_LINKS (insn); link; link = XEXP (link, 1))
1993 if (XEXP (link, 0) == i2)
1994 XEXP (link, 0) = i3;
1995
1996 if (GET_CODE (insn) == CODE_LABEL
1997 || GET_CODE (insn) == JUMP_INSN)
1998 break;
1999 }
2000 }
2001
2002 LOG_LINKS (i3) = 0;
2003 REG_NOTES (i3) = 0;
2004 LOG_LINKS (i2) = 0;
2005 REG_NOTES (i2) = 0;
2006
2007 if (newi2pat)
2008 {
2009 INSN_CODE (i2) = i2_code_number;
2010 PATTERN (i2) = newi2pat;
2011 }
2012 else
2013 {
2014 PUT_CODE (i2, NOTE);
2015 NOTE_LINE_NUMBER (i2) = NOTE_INSN_DELETED;
2016 NOTE_SOURCE_FILE (i2) = 0;
2017 }
2018
2019 if (i1)
2020 {
2021 LOG_LINKS (i1) = 0;
2022 REG_NOTES (i1) = 0;
2023 PUT_CODE (i1, NOTE);
2024 NOTE_LINE_NUMBER (i1) = NOTE_INSN_DELETED;
2025 NOTE_SOURCE_FILE (i1) = 0;
2026 }
2027
2028 /* Get death notes for everything that is now used in either I3 or
2029 I2 and used to die in a previous insn. */
2030
2031 move_deaths (newpat, i1 ? INSN_CUID (i1) : INSN_CUID (i2), i3, &midnotes);
2032 if (newi2pat)
2033 move_deaths (newi2pat, INSN_CUID (i1), i2, &midnotes);
2034
2035 /* Distribute all the LOG_LINKS and REG_NOTES from I1, I2, and I3. */
2036 if (i3notes)
5f4f0e22
CH
2037 distribute_notes (i3notes, i3, i3, newi2pat ? i2 : NULL_RTX,
2038 elim_i2, elim_i1);
230d793d 2039 if (i2notes)
5f4f0e22
CH
2040 distribute_notes (i2notes, i2, i3, newi2pat ? i2 : NULL_RTX,
2041 elim_i2, elim_i1);
230d793d 2042 if (i1notes)
5f4f0e22
CH
2043 distribute_notes (i1notes, i1, i3, newi2pat ? i2 : NULL_RTX,
2044 elim_i2, elim_i1);
230d793d 2045 if (midnotes)
5f4f0e22
CH
2046 distribute_notes (midnotes, NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2047 elim_i2, elim_i1);
230d793d
RS
2048
2049 /* Distribute any notes added to I2 or I3 by recog_for_combine. We
2050 know these are REG_UNUSED and want them to go to the desired insn,
1a26b032
RK
2051 so we always pass it as i3. We have not counted the notes in
2052 reg_n_deaths yet, so we need to do so now. */
2053
230d793d 2054 if (newi2pat && new_i2_notes)
1a26b032
RK
2055 {
2056 for (temp = new_i2_notes; temp; temp = XEXP (temp, 1))
2057 if (GET_CODE (XEXP (temp, 0)) == REG)
2058 reg_n_deaths[REGNO (XEXP (temp, 0))]++;
2059
2060 distribute_notes (new_i2_notes, i2, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2061 }
2062
230d793d 2063 if (new_i3_notes)
1a26b032
RK
2064 {
2065 for (temp = new_i3_notes; temp; temp = XEXP (temp, 1))
2066 if (GET_CODE (XEXP (temp, 0)) == REG)
2067 reg_n_deaths[REGNO (XEXP (temp, 0))]++;
2068
2069 distribute_notes (new_i3_notes, i3, i3, NULL_RTX, NULL_RTX, NULL_RTX);
2070 }
230d793d
RS
2071
2072 /* If I3DEST was used in I3SRC, it really died in I3. We may need to
1a26b032
RK
2073 put a REG_DEAD note for it somewhere. Similarly for I2 and I1.
2074 Show an additional death due to the REG_DEAD note we make here. If
2075 we discard it in distribute_notes, we will decrement it again. */
d0ab8cd3 2076
230d793d 2077 if (i3dest_killed)
1a26b032
RK
2078 {
2079 if (GET_CODE (i3dest_killed) == REG)
2080 reg_n_deaths[REGNO (i3dest_killed)]++;
2081
2082 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i3dest_killed,
2083 NULL_RTX),
2084 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2085 NULL_RTX, NULL_RTX);
2086 }
58c8c593
RK
2087
2088 /* For I2 and I1, we have to be careful. If NEWI2PAT exists and sets
2089 I2DEST or I1DEST, the death must be somewhere before I2, not I3. If
2090 we passed I3 in that case, it might delete I2. */
2091
230d793d 2092 if (i2dest_in_i2src)
58c8c593 2093 {
1a26b032
RK
2094 if (GET_CODE (i2dest) == REG)
2095 reg_n_deaths[REGNO (i2dest)]++;
2096
58c8c593
RK
2097 if (newi2pat && reg_set_p (i2dest, newi2pat))
2098 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i2dest, NULL_RTX),
2099 NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2100 else
2101 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i2dest, NULL_RTX),
2102 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2103 NULL_RTX, NULL_RTX);
2104 }
2105
230d793d 2106 if (i1dest_in_i1src)
58c8c593 2107 {
1a26b032
RK
2108 if (GET_CODE (i1dest) == REG)
2109 reg_n_deaths[REGNO (i1dest)]++;
2110
58c8c593
RK
2111 if (newi2pat && reg_set_p (i1dest, newi2pat))
2112 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i1dest, NULL_RTX),
2113 NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2114 else
2115 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i1dest, NULL_RTX),
2116 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2117 NULL_RTX, NULL_RTX);
2118 }
230d793d
RS
2119
2120 distribute_links (i3links);
2121 distribute_links (i2links);
2122 distribute_links (i1links);
2123
2124 if (GET_CODE (i2dest) == REG)
2125 {
d0ab8cd3
RK
2126 rtx link;
2127 rtx i2_insn = 0, i2_val = 0, set;
2128
2129 /* The insn that used to set this register doesn't exist, and
2130 this life of the register may not exist either. See if one of
2131 I3's links points to an insn that sets I2DEST. If it does,
2132 that is now the last known value for I2DEST. If we don't update
2133 this and I2 set the register to a value that depended on its old
230d793d
RS
2134 contents, we will get confused. If this insn is used, thing
2135 will be set correctly in combine_instructions. */
d0ab8cd3
RK
2136
2137 for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2138 if ((set = single_set (XEXP (link, 0))) != 0
2139 && rtx_equal_p (i2dest, SET_DEST (set)))
2140 i2_insn = XEXP (link, 0), i2_val = SET_SRC (set);
2141
2142 record_value_for_reg (i2dest, i2_insn, i2_val);
230d793d
RS
2143
2144 /* If the reg formerly set in I2 died only once and that was in I3,
2145 zero its use count so it won't make `reload' do any work. */
2146 if (! added_sets_2 && newi2pat == 0)
2147 {
2148 regno = REGNO (i2dest);
2149 reg_n_sets[regno]--;
2150 if (reg_n_sets[regno] == 0
5f4f0e22
CH
2151 && ! (basic_block_live_at_start[0][regno / REGSET_ELT_BITS]
2152 & ((REGSET_ELT_TYPE) 1 << (regno % REGSET_ELT_BITS))))
230d793d
RS
2153 reg_n_refs[regno] = 0;
2154 }
2155 }
2156
2157 if (i1 && GET_CODE (i1dest) == REG)
2158 {
d0ab8cd3
RK
2159 rtx link;
2160 rtx i1_insn = 0, i1_val = 0, set;
2161
2162 for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2163 if ((set = single_set (XEXP (link, 0))) != 0
2164 && rtx_equal_p (i1dest, SET_DEST (set)))
2165 i1_insn = XEXP (link, 0), i1_val = SET_SRC (set);
2166
2167 record_value_for_reg (i1dest, i1_insn, i1_val);
2168
230d793d
RS
2169 regno = REGNO (i1dest);
2170 if (! added_sets_1)
2171 {
2172 reg_n_sets[regno]--;
2173 if (reg_n_sets[regno] == 0
5f4f0e22
CH
2174 && ! (basic_block_live_at_start[0][regno / REGSET_ELT_BITS]
2175 & ((REGSET_ELT_TYPE) 1 << (regno % REGSET_ELT_BITS))))
230d793d
RS
2176 reg_n_refs[regno] = 0;
2177 }
2178 }
2179
951553af 2180 /* Update reg_nonzero_bits et al for any changes that may have been made
22609cbf
RK
2181 to this insn. */
2182
951553af 2183 note_stores (newpat, set_nonzero_bits_and_sign_copies);
22609cbf 2184 if (newi2pat)
951553af 2185 note_stores (newi2pat, set_nonzero_bits_and_sign_copies);
22609cbf 2186
230d793d
RS
2187 /* If I3 is now an unconditional jump, ensure that it has a
2188 BARRIER following it since it may have initially been a
381ee8af 2189 conditional jump. It may also be the last nonnote insn. */
230d793d
RS
2190
2191 if ((GET_CODE (newpat) == RETURN || simplejump_p (i3))
381ee8af
TW
2192 && ((temp = next_nonnote_insn (i3)) == NULL_RTX
2193 || GET_CODE (temp) != BARRIER))
230d793d
RS
2194 emit_barrier_after (i3);
2195 }
2196
2197 combine_successes++;
2198
2199 return newi2pat ? i2 : i3;
2200}
2201\f
2202/* Undo all the modifications recorded in undobuf. */
2203
2204static void
2205undo_all ()
2206{
2207 register int i;
2208 if (undobuf.num_undo > MAX_UNDO)
2209 undobuf.num_undo = MAX_UNDO;
2210 for (i = undobuf.num_undo - 1; i >= 0; i--)
7c046e4e
RK
2211 {
2212 if (undobuf.undo[i].is_int)
2213 *undobuf.undo[i].where.i = undobuf.undo[i].old_contents.i;
2214 else
2215 *undobuf.undo[i].where.rtx = undobuf.undo[i].old_contents.rtx;
2216
2217 }
230d793d
RS
2218
2219 obfree (undobuf.storage);
2220 undobuf.num_undo = 0;
2221}
2222\f
2223/* Find the innermost point within the rtx at LOC, possibly LOC itself,
d0ab8cd3
RK
2224 where we have an arithmetic expression and return that point. LOC will
2225 be inside INSN.
230d793d
RS
2226
2227 try_combine will call this function to see if an insn can be split into
2228 two insns. */
2229
2230static rtx *
d0ab8cd3 2231find_split_point (loc, insn)
230d793d 2232 rtx *loc;
d0ab8cd3 2233 rtx insn;
230d793d
RS
2234{
2235 rtx x = *loc;
2236 enum rtx_code code = GET_CODE (x);
2237 rtx *split;
2238 int len = 0, pos, unsignedp;
2239 rtx inner;
2240
2241 /* First special-case some codes. */
2242 switch (code)
2243 {
2244 case SUBREG:
2245#ifdef INSN_SCHEDULING
2246 /* If we are making a paradoxical SUBREG invalid, it becomes a split
2247 point. */
2248 if (GET_CODE (SUBREG_REG (x)) == MEM)
2249 return loc;
2250#endif
d0ab8cd3 2251 return find_split_point (&SUBREG_REG (x), insn);
230d793d 2252
230d793d 2253 case MEM:
916f14f1 2254#ifdef HAVE_lo_sum
230d793d
RS
2255 /* If we have (mem (const ..)) or (mem (symbol_ref ...)), split it
2256 using LO_SUM and HIGH. */
2257 if (GET_CODE (XEXP (x, 0)) == CONST
2258 || GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
2259 {
2260 SUBST (XEXP (x, 0),
2261 gen_rtx_combine (LO_SUM, Pmode,
2262 gen_rtx_combine (HIGH, Pmode, XEXP (x, 0)),
2263 XEXP (x, 0)));
2264 return &XEXP (XEXP (x, 0), 0);
2265 }
230d793d
RS
2266#endif
2267
916f14f1
RK
2268 /* If we have a PLUS whose second operand is a constant and the
2269 address is not valid, perhaps will can split it up using
2270 the machine-specific way to split large constants. We use
d0ab8cd3 2271 the first psuedo-reg (one of the virtual regs) as a placeholder;
916f14f1
RK
2272 it will not remain in the result. */
2273 if (GET_CODE (XEXP (x, 0)) == PLUS
2274 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2275 && ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
2276 {
2277 rtx reg = regno_reg_rtx[FIRST_PSEUDO_REGISTER];
2278 rtx seq = split_insns (gen_rtx (SET, VOIDmode, reg, XEXP (x, 0)),
2279 subst_insn);
2280
2281 /* This should have produced two insns, each of which sets our
2282 placeholder. If the source of the second is a valid address,
2283 we can make put both sources together and make a split point
2284 in the middle. */
2285
2286 if (seq && XVECLEN (seq, 0) == 2
2287 && GET_CODE (XVECEXP (seq, 0, 0)) == INSN
2288 && GET_CODE (PATTERN (XVECEXP (seq, 0, 0))) == SET
2289 && SET_DEST (PATTERN (XVECEXP (seq, 0, 0))) == reg
2290 && ! reg_mentioned_p (reg,
2291 SET_SRC (PATTERN (XVECEXP (seq, 0, 0))))
2292 && GET_CODE (XVECEXP (seq, 0, 1)) == INSN
2293 && GET_CODE (PATTERN (XVECEXP (seq, 0, 1))) == SET
2294 && SET_DEST (PATTERN (XVECEXP (seq, 0, 1))) == reg
2295 && memory_address_p (GET_MODE (x),
2296 SET_SRC (PATTERN (XVECEXP (seq, 0, 1)))))
2297 {
2298 rtx src1 = SET_SRC (PATTERN (XVECEXP (seq, 0, 0)));
2299 rtx src2 = SET_SRC (PATTERN (XVECEXP (seq, 0, 1)));
2300
2301 /* Replace the placeholder in SRC2 with SRC1. If we can
2302 find where in SRC2 it was placed, that can become our
2303 split point and we can replace this address with SRC2.
2304 Just try two obvious places. */
2305
2306 src2 = replace_rtx (src2, reg, src1);
2307 split = 0;
2308 if (XEXP (src2, 0) == src1)
2309 split = &XEXP (src2, 0);
2310 else if (GET_RTX_FORMAT (GET_CODE (XEXP (src2, 0)))[0] == 'e'
2311 && XEXP (XEXP (src2, 0), 0) == src1)
2312 split = &XEXP (XEXP (src2, 0), 0);
2313
2314 if (split)
2315 {
2316 SUBST (XEXP (x, 0), src2);
2317 return split;
2318 }
2319 }
1a26b032
RK
2320
2321 /* If that didn't work, perhaps the first operand is complex and
2322 needs to be computed separately, so make a split point there.
2323 This will occur on machines that just support REG + CONST
2324 and have a constant moved through some previous computation. */
2325
2326 else if (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (x, 0), 0))) != 'o'
2327 && ! (GET_CODE (XEXP (XEXP (x, 0), 0)) == SUBREG
2328 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (XEXP (x, 0), 0))))
2329 == 'o')))
2330 return &XEXP (XEXP (x, 0), 0);
916f14f1
RK
2331 }
2332 break;
2333
230d793d
RS
2334 case SET:
2335#ifdef HAVE_cc0
2336 /* If SET_DEST is CC0 and SET_SRC is not an operand, a COMPARE, or a
2337 ZERO_EXTRACT, the most likely reason why this doesn't match is that
2338 we need to put the operand into a register. So split at that
2339 point. */
2340
2341 if (SET_DEST (x) == cc0_rtx
2342 && GET_CODE (SET_SRC (x)) != COMPARE
2343 && GET_CODE (SET_SRC (x)) != ZERO_EXTRACT
2344 && GET_RTX_CLASS (GET_CODE (SET_SRC (x))) != 'o'
2345 && ! (GET_CODE (SET_SRC (x)) == SUBREG
2346 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (SET_SRC (x)))) == 'o'))
2347 return &SET_SRC (x);
2348#endif
2349
2350 /* See if we can split SET_SRC as it stands. */
d0ab8cd3 2351 split = find_split_point (&SET_SRC (x), insn);
230d793d
RS
2352 if (split && split != &SET_SRC (x))
2353 return split;
2354
2355 /* See if this is a bitfield assignment with everything constant. If
2356 so, this is an IOR of an AND, so split it into that. */
2357 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2358 && (GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
5f4f0e22 2359 <= HOST_BITS_PER_WIDE_INT)
230d793d
RS
2360 && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT
2361 && GET_CODE (XEXP (SET_DEST (x), 2)) == CONST_INT
2362 && GET_CODE (SET_SRC (x)) == CONST_INT
2363 && ((INTVAL (XEXP (SET_DEST (x), 1))
2364 + INTVAL (XEXP (SET_DEST (x), 2)))
2365 <= GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0))))
2366 && ! side_effects_p (XEXP (SET_DEST (x), 0)))
2367 {
2368 int pos = INTVAL (XEXP (SET_DEST (x), 2));
2369 int len = INTVAL (XEXP (SET_DEST (x), 1));
2370 int src = INTVAL (SET_SRC (x));
2371 rtx dest = XEXP (SET_DEST (x), 0);
2372 enum machine_mode mode = GET_MODE (dest);
5f4f0e22 2373 unsigned HOST_WIDE_INT mask = ((HOST_WIDE_INT) 1 << len) - 1;
230d793d
RS
2374
2375#if BITS_BIG_ENDIAN
2376 pos = GET_MODE_BITSIZE (mode) - len - pos;
2377#endif
2378
2379 if (src == mask)
2380 SUBST (SET_SRC (x),
5f4f0e22 2381 gen_binary (IOR, mode, dest, GEN_INT (src << pos)));
230d793d
RS
2382 else
2383 SUBST (SET_SRC (x),
2384 gen_binary (IOR, mode,
2385 gen_binary (AND, mode, dest,
5f4f0e22
CH
2386 GEN_INT (~ (mask << pos)
2387 & GET_MODE_MASK (mode))),
2388 GEN_INT (src << pos)));
230d793d
RS
2389
2390 SUBST (SET_DEST (x), dest);
2391
d0ab8cd3 2392 split = find_split_point (&SET_SRC (x), insn);
230d793d
RS
2393 if (split && split != &SET_SRC (x))
2394 return split;
2395 }
2396
2397 /* Otherwise, see if this is an operation that we can split into two.
2398 If so, try to split that. */
2399 code = GET_CODE (SET_SRC (x));
2400
2401 switch (code)
2402 {
d0ab8cd3
RK
2403 case AND:
2404 /* If we are AND'ing with a large constant that is only a single
2405 bit and the result is only being used in a context where we
2406 need to know if it is zero or non-zero, replace it with a bit
2407 extraction. This will avoid the large constant, which might
2408 have taken more than one insn to make. If the constant were
2409 not a valid argument to the AND but took only one insn to make,
2410 this is no worse, but if it took more than one insn, it will
2411 be better. */
2412
2413 if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
2414 && GET_CODE (XEXP (SET_SRC (x), 0)) == REG
2415 && (pos = exact_log2 (INTVAL (XEXP (SET_SRC (x), 1)))) >= 7
2416 && GET_CODE (SET_DEST (x)) == REG
2417 && (split = find_single_use (SET_DEST (x), insn, NULL_PTR)) != 0
2418 && (GET_CODE (*split) == EQ || GET_CODE (*split) == NE)
2419 && XEXP (*split, 0) == SET_DEST (x)
2420 && XEXP (*split, 1) == const0_rtx)
2421 {
2422 SUBST (SET_SRC (x),
2423 make_extraction (GET_MODE (SET_DEST (x)),
2424 XEXP (SET_SRC (x), 0),
2425 pos, NULL_RTX, 1, 1, 0, 0));
2426 return find_split_point (loc, insn);
2427 }
2428 break;
2429
230d793d
RS
2430 case SIGN_EXTEND:
2431 inner = XEXP (SET_SRC (x), 0);
2432 pos = 0;
2433 len = GET_MODE_BITSIZE (GET_MODE (inner));
2434 unsignedp = 0;
2435 break;
2436
2437 case SIGN_EXTRACT:
2438 case ZERO_EXTRACT:
2439 if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
2440 && GET_CODE (XEXP (SET_SRC (x), 2)) == CONST_INT)
2441 {
2442 inner = XEXP (SET_SRC (x), 0);
2443 len = INTVAL (XEXP (SET_SRC (x), 1));
2444 pos = INTVAL (XEXP (SET_SRC (x), 2));
2445
2446#if BITS_BIG_ENDIAN
2447 pos = GET_MODE_BITSIZE (GET_MODE (inner)) - len - pos;
2448#endif
2449 unsignedp = (code == ZERO_EXTRACT);
2450 }
2451 break;
2452 }
2453
2454 if (len && pos >= 0 && pos + len <= GET_MODE_BITSIZE (GET_MODE (inner)))
2455 {
2456 enum machine_mode mode = GET_MODE (SET_SRC (x));
2457
d0ab8cd3
RK
2458 /* For unsigned, we have a choice of a shift followed by an
2459 AND or two shifts. Use two shifts for field sizes where the
2460 constant might be too large. We assume here that we can
2461 always at least get 8-bit constants in an AND insn, which is
2462 true for every current RISC. */
2463
2464 if (unsignedp && len <= 8)
230d793d
RS
2465 {
2466 SUBST (SET_SRC (x),
2467 gen_rtx_combine
2468 (AND, mode,
2469 gen_rtx_combine (LSHIFTRT, mode,
2470 gen_lowpart_for_combine (mode, inner),
5f4f0e22
CH
2471 GEN_INT (pos)),
2472 GEN_INT (((HOST_WIDE_INT) 1 << len) - 1)));
230d793d 2473
d0ab8cd3 2474 split = find_split_point (&SET_SRC (x), insn);
230d793d
RS
2475 if (split && split != &SET_SRC (x))
2476 return split;
2477 }
2478 else
2479 {
2480 SUBST (SET_SRC (x),
2481 gen_rtx_combine
d0ab8cd3 2482 (unsignedp ? LSHIFTRT : ASHIFTRT, mode,
230d793d
RS
2483 gen_rtx_combine (ASHIFT, mode,
2484 gen_lowpart_for_combine (mode, inner),
5f4f0e22
CH
2485 GEN_INT (GET_MODE_BITSIZE (mode)
2486 - len - pos)),
2487 GEN_INT (GET_MODE_BITSIZE (mode) - len)));
230d793d 2488
d0ab8cd3 2489 split = find_split_point (&SET_SRC (x), insn);
230d793d
RS
2490 if (split && split != &SET_SRC (x))
2491 return split;
2492 }
2493 }
2494
2495 /* See if this is a simple operation with a constant as the second
2496 operand. It might be that this constant is out of range and hence
2497 could be used as a split point. */
2498 if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
2499 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
2500 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<')
2501 && CONSTANT_P (XEXP (SET_SRC (x), 1))
2502 && (GET_RTX_CLASS (GET_CODE (XEXP (SET_SRC (x), 0))) == 'o'
2503 || (GET_CODE (XEXP (SET_SRC (x), 0)) == SUBREG
2504 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (SET_SRC (x), 0))))
2505 == 'o'))))
2506 return &XEXP (SET_SRC (x), 1);
2507
2508 /* Finally, see if this is a simple operation with its first operand
2509 not in a register. The operation might require this operand in a
2510 register, so return it as a split point. We can always do this
2511 because if the first operand were another operation, we would have
2512 already found it as a split point. */
2513 if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
2514 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
2515 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<'
2516 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '1')
2517 && ! register_operand (XEXP (SET_SRC (x), 0), VOIDmode))
2518 return &XEXP (SET_SRC (x), 0);
2519
2520 return 0;
2521
2522 case AND:
2523 case IOR:
2524 /* We write NOR as (and (not A) (not B)), but if we don't have a NOR,
2525 it is better to write this as (not (ior A B)) so we can split it.
2526 Similarly for IOR. */
2527 if (GET_CODE (XEXP (x, 0)) == NOT && GET_CODE (XEXP (x, 1)) == NOT)
2528 {
2529 SUBST (*loc,
2530 gen_rtx_combine (NOT, GET_MODE (x),
2531 gen_rtx_combine (code == IOR ? AND : IOR,
2532 GET_MODE (x),
2533 XEXP (XEXP (x, 0), 0),
2534 XEXP (XEXP (x, 1), 0))));
d0ab8cd3 2535 return find_split_point (loc, insn);
230d793d
RS
2536 }
2537
2538 /* Many RISC machines have a large set of logical insns. If the
2539 second operand is a NOT, put it first so we will try to split the
2540 other operand first. */
2541 if (GET_CODE (XEXP (x, 1)) == NOT)
2542 {
2543 rtx tem = XEXP (x, 0);
2544 SUBST (XEXP (x, 0), XEXP (x, 1));
2545 SUBST (XEXP (x, 1), tem);
2546 }
2547 break;
2548 }
2549
2550 /* Otherwise, select our actions depending on our rtx class. */
2551 switch (GET_RTX_CLASS (code))
2552 {
2553 case 'b': /* This is ZERO_EXTRACT and SIGN_EXTRACT. */
2554 case '3':
d0ab8cd3 2555 split = find_split_point (&XEXP (x, 2), insn);
230d793d
RS
2556 if (split)
2557 return split;
2558 /* ... fall through ... */
2559 case '2':
2560 case 'c':
2561 case '<':
d0ab8cd3 2562 split = find_split_point (&XEXP (x, 1), insn);
230d793d
RS
2563 if (split)
2564 return split;
2565 /* ... fall through ... */
2566 case '1':
2567 /* Some machines have (and (shift ...) ...) insns. If X is not
2568 an AND, but XEXP (X, 0) is, use it as our split point. */
2569 if (GET_CODE (x) != AND && GET_CODE (XEXP (x, 0)) == AND)
2570 return &XEXP (x, 0);
2571
d0ab8cd3 2572 split = find_split_point (&XEXP (x, 0), insn);
230d793d
RS
2573 if (split)
2574 return split;
2575 return loc;
2576 }
2577
2578 /* Otherwise, we don't have a split point. */
2579 return 0;
2580}
2581\f
2582/* Throughout X, replace FROM with TO, and return the result.
2583 The result is TO if X is FROM;
2584 otherwise the result is X, but its contents may have been modified.
2585 If they were modified, a record was made in undobuf so that
2586 undo_all will (among other things) return X to its original state.
2587
2588 If the number of changes necessary is too much to record to undo,
2589 the excess changes are not made, so the result is invalid.
2590 The changes already made can still be undone.
2591 undobuf.num_undo is incremented for such changes, so by testing that
2592 the caller can tell whether the result is valid.
2593
2594 `n_occurrences' is incremented each time FROM is replaced.
2595
2596 IN_DEST is non-zero if we are processing the SET_DEST of a SET.
2597
5089e22e 2598 UNIQUE_COPY is non-zero if each substitution must be unique. We do this
230d793d
RS
2599 by copying if `n_occurrences' is non-zero. */
2600
2601static rtx
2602subst (x, from, to, in_dest, unique_copy)
2603 register rtx x, from, to;
2604 int in_dest;
2605 int unique_copy;
2606{
2607 register char *fmt;
2608 register int len, i;
2609 register enum rtx_code code = GET_CODE (x), orig_code = code;
2610 rtx temp;
2611 enum machine_mode mode = GET_MODE (x);
2612 enum machine_mode op0_mode = VOIDmode;
2613 rtx other_insn;
2614 rtx *cc_use;
2615 int n_restarts = 0;
2616
2617/* FAKE_EXTEND_SAFE_P (MODE, FROM) is 1 if (subreg:MODE FROM 0) is a safe
2618 replacement for (zero_extend:MODE FROM) or (sign_extend:MODE FROM).
2619 If it is 0, that cannot be done. We can now do this for any MEM
2620 because (SUBREG (MEM...)) is guaranteed to cause the MEM to be reloaded.
2621 If not for that, MEM's would very rarely be safe. */
2622
2623/* Reject MODEs bigger than a word, because we might not be able
2624 to reference a two-register group starting with an arbitrary register
2625 (and currently gen_lowpart might crash for a SUBREG). */
2626
2627#define FAKE_EXTEND_SAFE_P(MODE, FROM) \
2628 (GET_MODE_SIZE (MODE) <= UNITS_PER_WORD)
2629
2630/* Two expressions are equal if they are identical copies of a shared
2631 RTX or if they are both registers with the same register number
2632 and mode. */
2633
2634#define COMBINE_RTX_EQUAL_P(X,Y) \
2635 ((X) == (Y) \
2636 || (GET_CODE (X) == REG && GET_CODE (Y) == REG \
2637 && REGNO (X) == REGNO (Y) && GET_MODE (X) == GET_MODE (Y)))
2638
2639 if (! in_dest && COMBINE_RTX_EQUAL_P (x, from))
2640 {
2641 n_occurrences++;
2642 return (unique_copy && n_occurrences > 1 ? copy_rtx (to) : to);
2643 }
2644
2645 /* If X and FROM are the same register but different modes, they will
2646 not have been seen as equal above. However, flow.c will make a
2647 LOG_LINKS entry for that case. If we do nothing, we will try to
2648 rerecognize our original insn and, when it succeeds, we will
2649 delete the feeding insn, which is incorrect.
2650
2651 So force this insn not to match in this (rare) case. */
2652 if (! in_dest && code == REG && GET_CODE (from) == REG
2653 && REGNO (x) == REGNO (from))
2654 return gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
2655
2656 /* If this is an object, we are done unless it is a MEM or LO_SUM, both
2657 of which may contain things that can be combined. */
2658 if (code != MEM && code != LO_SUM && GET_RTX_CLASS (code) == 'o')
2659 return x;
2660
2661 /* It is possible to have a subexpression appear twice in the insn.
2662 Suppose that FROM is a register that appears within TO.
2663 Then, after that subexpression has been scanned once by `subst',
2664 the second time it is scanned, TO may be found. If we were
2665 to scan TO here, we would find FROM within it and create a
2666 self-referent rtl structure which is completely wrong. */
2667 if (COMBINE_RTX_EQUAL_P (x, to))
2668 return to;
2669
2670 len = GET_RTX_LENGTH (code);
2671 fmt = GET_RTX_FORMAT (code);
2672
2673 /* We don't need to process a SET_DEST that is a register, CC0, or PC, so
2674 set up to skip this common case. All other cases where we want to
2675 suppress replacing something inside a SET_SRC are handled via the
2676 IN_DEST operand. */
2677 if (code == SET
2678 && (GET_CODE (SET_DEST (x)) == REG
2679 || GET_CODE (SET_DEST (x)) == CC0
2680 || GET_CODE (SET_DEST (x)) == PC))
2681 fmt = "ie";
2682
2683 /* Get the mode of operand 0 in case X is now a SIGN_EXTEND of a constant. */
2684 if (fmt[0] == 'e')
2685 op0_mode = GET_MODE (XEXP (x, 0));
2686
2687 for (i = 0; i < len; i++)
2688 {
2689 if (fmt[i] == 'E')
2690 {
2691 register int j;
2692 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2693 {
2694 register rtx new;
2695 if (COMBINE_RTX_EQUAL_P (XVECEXP (x, i, j), from))
2696 {
2697 new = (unique_copy && n_occurrences ? copy_rtx (to) : to);
2698 n_occurrences++;
2699 }
2700 else
2701 {
2702 new = subst (XVECEXP (x, i, j), from, to, 0, unique_copy);
2703
2704 /* If this substitution failed, this whole thing fails. */
2705 if (GET_CODE (new) == CLOBBER && XEXP (new, 0) == const0_rtx)
2706 return new;
2707 }
2708
2709 SUBST (XVECEXP (x, i, j), new);
2710 }
2711 }
2712 else if (fmt[i] == 'e')
2713 {
2714 register rtx new;
2715
2716 if (COMBINE_RTX_EQUAL_P (XEXP (x, i), from))
2717 {
2718 new = (unique_copy && n_occurrences ? copy_rtx (to) : to);
2719 n_occurrences++;
2720 }
2721 else
2722 /* If we are in a SET_DEST, suppress most cases unless we
2723 have gone inside a MEM, in which case we want to
2724 simplify the address. We assume here that things that
2725 are actually part of the destination have their inner
2726 parts in the first expression. This is true for SUBREG,
2727 STRICT_LOW_PART, and ZERO_EXTRACT, which are the only
2728 things aside from REG and MEM that should appear in a
2729 SET_DEST. */
2730 new = subst (XEXP (x, i), from, to,
2731 (((in_dest
2732 && (code == SUBREG || code == STRICT_LOW_PART
2733 || code == ZERO_EXTRACT))
2734 || code == SET)
2735 && i == 0), unique_copy);
2736
2737 /* If we found that we will have to reject this combination,
2738 indicate that by returning the CLOBBER ourselves, rather than
2739 an expression containing it. This will speed things up as
2740 well as prevent accidents where two CLOBBERs are considered
2741 to be equal, thus producing an incorrect simplification. */
2742
2743 if (GET_CODE (new) == CLOBBER && XEXP (new, 0) == const0_rtx)
2744 return new;
2745
2746 SUBST (XEXP (x, i), new);
2747 }
2748 }
2749
d0ab8cd3
RK
2750 /* We come back to here if we have replaced the expression with one of
2751 a different code and it is likely that further simplification will be
2752 possible. */
2753
2754 restart:
2755
eeb43d32
RK
2756 /* If we have restarted more than 4 times, we are probably looping, so
2757 give up. */
2758 if (++n_restarts > 4)
2759 return x;
2760
2761 /* If we are restarting at all, it means that we no longer know the
2762 original mode of operand 0 (since we have probably changed the
2763 form of X). */
2764
2765 if (n_restarts > 1)
2766 op0_mode = VOIDmode;
2767
d0ab8cd3
RK
2768 code = GET_CODE (x);
2769
230d793d
RS
2770 /* If this is a commutative operation, put a constant last and a complex
2771 expression first. We don't need to do this for comparisons here. */
2772 if (GET_RTX_CLASS (code) == 'c'
2773 && ((CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
2774 || (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == 'o'
2775 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o')
2776 || (GET_CODE (XEXP (x, 0)) == SUBREG
2777 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0)))) == 'o'
2778 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o')))
2779 {
2780 temp = XEXP (x, 0);
2781 SUBST (XEXP (x, 0), XEXP (x, 1));
2782 SUBST (XEXP (x, 1), temp);
2783 }
2784
22609cbf
RK
2785 /* If this is a PLUS, MINUS, or MULT, and the first operand is the
2786 sign extension of a PLUS with a constant, reverse the order of the sign
2787 extension and the addition. Note that this not the same as the original
2788 code, but overflow is undefined for signed values. Also note that the
2789 PLUS will have been partially moved "inside" the sign-extension, so that
2790 the first operand of X will really look like:
2791 (ashiftrt (plus (ashift A C4) C5) C4).
2792 We convert this to
2793 (plus (ashiftrt (ashift A C4) C2) C4)
2794 and replace the first operand of X with that expression. Later parts
2795 of this function may simplify the expression further.
2796
2797 For example, if we start with (mult (sign_extend (plus A C1)) C2),
2798 we swap the SIGN_EXTEND and PLUS. Later code will apply the
2799 distributive law to produce (plus (mult (sign_extend X) C1) C3).
2800
2801 We do this to simplify address expressions. */
2802
2803 if ((code == PLUS || code == MINUS || code == MULT)
2804 && GET_CODE (XEXP (x, 0)) == ASHIFTRT
2805 && GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS
2806 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ASHIFT
2807 && GET_CODE (XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1)) == CONST_INT
2808 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2809 && XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1) == XEXP (XEXP (x, 0), 1)
2810 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
2811 && (temp = simplify_binary_operation (ASHIFTRT, mode,
2812 XEXP (XEXP (XEXP (x, 0), 0), 1),
2813 XEXP (XEXP (x, 0), 1))) != 0)
2814 {
2815 rtx new
2816 = simplify_shift_const (NULL_RTX, ASHIFT, mode,
2817 XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 0),
2818 INTVAL (XEXP (XEXP (x, 0), 1)));
2819
2820 new = simplify_shift_const (NULL_RTX, ASHIFTRT, mode, new,
2821 INTVAL (XEXP (XEXP (x, 0), 1)));
2822
2823 SUBST (XEXP (x, 0), gen_binary (PLUS, mode, new, temp));
2824 }
2825
d0ab8cd3
RK
2826 /* If this is a simple operation applied to an IF_THEN_ELSE, try
2827 applying it to the arms of the IF_THEN_ELSE. This often simplifies
2828 things. Don't deal with operations that change modes here. */
2829
2830 if ((GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c')
2831 && GET_CODE (XEXP (x, 0)) == IF_THEN_ELSE)
2832 {
58744483
RK
2833 /* Don't do this by using SUBST inside X since we might be messing
2834 up a shared expression. */
2835 rtx cond = XEXP (XEXP (x, 0), 0);
2836 rtx t_arm = subst (gen_binary (code, mode, XEXP (XEXP (x, 0), 1),
2837 XEXP (x, 1)),
1a26b032 2838 pc_rtx, pc_rtx, 0, 0);
58744483
RK
2839 rtx f_arm = subst (gen_binary (code, mode, XEXP (XEXP (x, 0), 2),
2840 XEXP (x, 1)),
1a26b032 2841 pc_rtx, pc_rtx, 0, 0);
58744483
RK
2842
2843
2844 x = gen_rtx (IF_THEN_ELSE, mode, cond, t_arm, f_arm);
d0ab8cd3
RK
2845 goto restart;
2846 }
2847
2848 else if (GET_RTX_CLASS (code) == '1'
2849 && GET_CODE (XEXP (x, 0)) == IF_THEN_ELSE
2850 && GET_MODE (XEXP (x, 0)) == mode)
2851 {
58744483
RK
2852 rtx cond = XEXP (XEXP (x, 0), 0);
2853 rtx t_arm = subst (gen_unary (code, mode, XEXP (XEXP (x, 0), 1)),
1a26b032 2854 pc_rtx, pc_rtx, 0, 0);
58744483 2855 rtx f_arm = subst (gen_unary (code, mode, XEXP (XEXP (x, 0), 2)),
1a26b032 2856 pc_rtx, pc_rtx, 0, 0);
58744483
RK
2857
2858 x = gen_rtx_combine (IF_THEN_ELSE, mode, cond, t_arm, f_arm);
d0ab8cd3
RK
2859 goto restart;
2860 }
2861
230d793d
RS
2862 /* Try to fold this expression in case we have constants that weren't
2863 present before. */
2864 temp = 0;
2865 switch (GET_RTX_CLASS (code))
2866 {
2867 case '1':
2868 temp = simplify_unary_operation (code, mode, XEXP (x, 0), op0_mode);
2869 break;
2870 case '<':
2871 temp = simplify_relational_operation (code, op0_mode,
2872 XEXP (x, 0), XEXP (x, 1));
77fa0940
RK
2873#ifdef FLOAT_STORE_FLAG_VALUE
2874 if (temp != 0 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2875 temp = ((temp == const0_rtx) ? CONST0_RTX (GET_MODE (x))
2876 : immed_real_const_1 (FLOAT_STORE_FLAG_VALUE, GET_MODE (x)));
2877#endif
230d793d
RS
2878 break;
2879 case 'c':
2880 case '2':
2881 temp = simplify_binary_operation (code, mode, XEXP (x, 0), XEXP (x, 1));
2882 break;
2883 case 'b':
2884 case '3':
2885 temp = simplify_ternary_operation (code, mode, op0_mode, XEXP (x, 0),
2886 XEXP (x, 1), XEXP (x, 2));
2887 break;
2888 }
2889
2890 if (temp)
d0ab8cd3 2891 x = temp, code = GET_CODE (temp);
230d793d 2892
230d793d
RS
2893 /* First see if we can apply the inverse distributive law. */
2894 if (code == PLUS || code == MINUS || code == IOR || code == XOR)
2895 {
2896 x = apply_distributive_law (x);
2897 code = GET_CODE (x);
2898 }
2899
2900 /* If CODE is an associative operation not otherwise handled, see if we
2901 can associate some operands. This can win if they are constants or
2902 if they are logically related (i.e. (a & b) & a. */
2903 if ((code == PLUS || code == MINUS
2904 || code == MULT || code == AND || code == IOR || code == XOR
2905 || code == DIV || code == UDIV
2906 || code == SMAX || code == SMIN || code == UMAX || code == UMIN)
2907 && GET_MODE_CLASS (mode) == MODE_INT)
2908 {
2909 if (GET_CODE (XEXP (x, 0)) == code)
2910 {
2911 rtx other = XEXP (XEXP (x, 0), 0);
2912 rtx inner_op0 = XEXP (XEXP (x, 0), 1);
2913 rtx inner_op1 = XEXP (x, 1);
2914 rtx inner;
2915
2916 /* Make sure we pass the constant operand if any as the second
2917 one if this is a commutative operation. */
2918 if (CONSTANT_P (inner_op0) && GET_RTX_CLASS (code) == 'c')
2919 {
2920 rtx tem = inner_op0;
2921 inner_op0 = inner_op1;
2922 inner_op1 = tem;
2923 }
2924 inner = simplify_binary_operation (code == MINUS ? PLUS
2925 : code == DIV ? MULT
2926 : code == UDIV ? MULT
2927 : code,
2928 mode, inner_op0, inner_op1);
2929
2930 /* For commutative operations, try the other pair if that one
2931 didn't simplify. */
2932 if (inner == 0 && GET_RTX_CLASS (code) == 'c')
2933 {
2934 other = XEXP (XEXP (x, 0), 1);
2935 inner = simplify_binary_operation (code, mode,
2936 XEXP (XEXP (x, 0), 0),
2937 XEXP (x, 1));
2938 }
2939
2940 if (inner)
2941 {
2942 x = gen_binary (code, mode, other, inner);
2943 goto restart;
2944
2945 }
2946 }
2947 }
2948
2949 /* A little bit of algebraic simplification here. */
2950 switch (code)
2951 {
2952 case MEM:
2953 /* Ensure that our address has any ASHIFTs converted to MULT in case
2954 address-recognizing predicates are called later. */
2955 temp = make_compound_operation (XEXP (x, 0), MEM);
2956 SUBST (XEXP (x, 0), temp);
2957 break;
2958
2959 case SUBREG:
2960 /* (subreg:A (mem:B X) N) becomes a modified MEM unless the SUBREG
2961 is paradoxical. If we can't do that safely, then it becomes
2962 something nonsensical so that this combination won't take place. */
2963
2964 if (GET_CODE (SUBREG_REG (x)) == MEM
2965 && (GET_MODE_SIZE (mode)
2966 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
2967 {
2968 rtx inner = SUBREG_REG (x);
2969 int endian_offset = 0;
2970 /* Don't change the mode of the MEM
2971 if that would change the meaning of the address. */
2972 if (MEM_VOLATILE_P (SUBREG_REG (x))
2973 || mode_dependent_address_p (XEXP (inner, 0)))
2974 return gen_rtx (CLOBBER, mode, const0_rtx);
2975
2976#if BYTES_BIG_ENDIAN
2977 if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
2978 endian_offset += UNITS_PER_WORD - GET_MODE_SIZE (mode);
2979 if (GET_MODE_SIZE (GET_MODE (inner)) < UNITS_PER_WORD)
2980 endian_offset -= UNITS_PER_WORD - GET_MODE_SIZE (GET_MODE (inner));
2981#endif
2982 /* Note if the plus_constant doesn't make a valid address
2983 then this combination won't be accepted. */
2984 x = gen_rtx (MEM, mode,
2985 plus_constant (XEXP (inner, 0),
2986 (SUBREG_WORD (x) * UNITS_PER_WORD
2987 + endian_offset)));
2988 MEM_VOLATILE_P (x) = MEM_VOLATILE_P (inner);
2989 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (inner);
2990 MEM_IN_STRUCT_P (x) = MEM_IN_STRUCT_P (inner);
2991 return x;
2992 }
2993
2994 /* If we are in a SET_DEST, these other cases can't apply. */
2995 if (in_dest)
2996 return x;
2997
2998 /* Changing mode twice with SUBREG => just change it once,
2999 or not at all if changing back to starting mode. */
3000 if (GET_CODE (SUBREG_REG (x)) == SUBREG)
3001 {
3002 if (mode == GET_MODE (SUBREG_REG (SUBREG_REG (x)))
3003 && SUBREG_WORD (x) == 0 && SUBREG_WORD (SUBREG_REG (x)) == 0)
3004 return SUBREG_REG (SUBREG_REG (x));
3005
3006 SUBST_INT (SUBREG_WORD (x),
3007 SUBREG_WORD (x) + SUBREG_WORD (SUBREG_REG (x)));
3008 SUBST (SUBREG_REG (x), SUBREG_REG (SUBREG_REG (x)));
3009 }
3010
3011 /* SUBREG of a hard register => just change the register number
3012 and/or mode. If the hard register is not valid in that mode,
26ecfc76
RK
3013 suppress this combination. If the hard register is the stack,
3014 frame, or argument pointer, leave this as a SUBREG. */
230d793d
RS
3015
3016 if (GET_CODE (SUBREG_REG (x)) == REG
26ecfc76
RK
3017 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
3018 && REGNO (SUBREG_REG (x)) != FRAME_POINTER_REGNUM
3019#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3020 && REGNO (SUBREG_REG (x)) != ARG_POINTER_REGNUM
3021#endif
3022 && REGNO (SUBREG_REG (x)) != STACK_POINTER_REGNUM)
230d793d
RS
3023 {
3024 if (HARD_REGNO_MODE_OK (REGNO (SUBREG_REG (x)) + SUBREG_WORD (x),
3025 mode))
3026 return gen_rtx (REG, mode,
3027 REGNO (SUBREG_REG (x)) + SUBREG_WORD (x));
3028 else
3029 return gen_rtx (CLOBBER, mode, const0_rtx);
3030 }
3031
3032 /* For a constant, try to pick up the part we want. Handle a full
a4bde0b1
RK
3033 word and low-order part. Only do this if we are narrowing
3034 the constant; if it is being widened, we have no idea what
3035 the extra bits will have been set to. */
230d793d
RS
3036
3037 if (CONSTANT_P (SUBREG_REG (x)) && op0_mode != VOIDmode
3038 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
a4bde0b1 3039 && GET_MODE_SIZE (op0_mode) < UNITS_PER_WORD
230d793d
RS
3040 && GET_MODE_CLASS (mode) == MODE_INT)
3041 {
3042 temp = operand_subword (SUBREG_REG (x), SUBREG_WORD (x),
5f4f0e22 3043 0, op0_mode);
230d793d
RS
3044 if (temp)
3045 return temp;
3046 }
3047
a4bde0b1
RK
3048 if (CONSTANT_P (SUBREG_REG (x)) && subreg_lowpart_p (x)
3049 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (op0_mode))
230d793d
RS
3050 return gen_lowpart_for_combine (mode, SUBREG_REG (x));
3051
3052 /* If we are narrowing the object, we need to see if we can simplify
3053 the expression for the object knowing that we only need the
d0ab8cd3
RK
3054 low-order bits. */
3055
230d793d 3056 if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
d0ab8cd3
RK
3057 && subreg_lowpart_p (x))
3058 return force_to_mode (SUBREG_REG (x), mode, GET_MODE_BITSIZE (mode),
3059 NULL_RTX);
230d793d
RS
3060 break;
3061
3062 case NOT:
3063 /* (not (plus X -1)) can become (neg X). */
3064 if (GET_CODE (XEXP (x, 0)) == PLUS
3065 && XEXP (XEXP (x, 0), 1) == constm1_rtx)
3066 {
3067 x = gen_rtx_combine (NEG, mode, XEXP (XEXP (x, 0), 0));
3068 goto restart;
3069 }
3070
3071 /* Similarly, (not (neg X)) is (plus X -1). */
3072 if (GET_CODE (XEXP (x, 0)) == NEG)
3073 {
3074 x = gen_rtx_combine (PLUS, mode, XEXP (XEXP (x, 0), 0), constm1_rtx);
3075 goto restart;
3076 }
3077
d0ab8cd3
RK
3078 /* (not (xor X C)) for C constant is (xor X D) with D = ~ C. */
3079 if (GET_CODE (XEXP (x, 0)) == XOR
3080 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3081 && (temp = simplify_unary_operation (NOT, mode,
3082 XEXP (XEXP (x, 0), 1),
3083 mode)) != 0)
3084 {
3085 SUBST (XEXP (XEXP (x, 0), 1), temp);
3086 return XEXP (x, 0);
3087 }
3088
230d793d
RS
3089 /* (not (ashift 1 X)) is (rotate ~1 X). We used to do this for operands
3090 other than 1, but that is not valid. We could do a similar
3091 simplification for (not (lshiftrt C X)) where C is just the sign bit,
3092 but this doesn't seem common enough to bother with. */
3093 if (GET_CODE (XEXP (x, 0)) == ASHIFT
3094 && XEXP (XEXP (x, 0), 0) == const1_rtx)
3095 {
3096 x = gen_rtx (ROTATE, mode, gen_unary (NOT, mode, const1_rtx),
3097 XEXP (XEXP (x, 0), 1));
3098 goto restart;
3099 }
3100
3101 if (GET_CODE (XEXP (x, 0)) == SUBREG
3102 && subreg_lowpart_p (XEXP (x, 0))
3103 && (GET_MODE_SIZE (GET_MODE (XEXP (x, 0)))
3104 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (x, 0)))))
3105 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == ASHIFT
3106 && XEXP (SUBREG_REG (XEXP (x, 0)), 0) == const1_rtx)
3107 {
3108 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (XEXP (x, 0)));
3109
3110 x = gen_rtx (ROTATE, inner_mode,
3111 gen_unary (NOT, inner_mode, const1_rtx),
3112 XEXP (SUBREG_REG (XEXP (x, 0)), 1));
3113 x = gen_lowpart_for_combine (mode, x);
3114 goto restart;
3115 }
3116
3117#if STORE_FLAG_VALUE == -1
3118 /* (not (comparison foo bar)) can be done by reversing the comparison
3119 code if valid. */
3120 if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3121 && reversible_comparison_p (XEXP (x, 0)))
3122 return gen_rtx_combine (reverse_condition (GET_CODE (XEXP (x, 0))),
3123 mode, XEXP (XEXP (x, 0), 0),
3124 XEXP (XEXP (x, 0), 1));
500c518b
RK
3125
3126 /* (ashiftrt foo C) where C is the number of bits in FOO minus 1
3127 is (lt foo (const_int 0)), so we can perform the above
3128 simplification. */
3129
3130 if (XEXP (x, 1) == const1_rtx
3131 && GET_CODE (XEXP (x, 0)) == ASHIFTRT
3132 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3133 && INTVAL (XEXP (XEXP (x, 0), 1)) == GET_MODE_BITSIZE (mode) - 1)
3134 return gen_rtx_combine (GE, mode, XEXP (XEXP (x, 0), 0), const0_rtx);
230d793d
RS
3135#endif
3136
3137 /* Apply De Morgan's laws to reduce number of patterns for machines
3138 with negating logical insns (and-not, nand, etc.). If result has
3139 only one NOT, put it first, since that is how the patterns are
3140 coded. */
3141
3142 if (GET_CODE (XEXP (x, 0)) == IOR || GET_CODE (XEXP (x, 0)) == AND)
3143 {
3144 rtx in1 = XEXP (XEXP (x, 0), 0), in2 = XEXP (XEXP (x, 0), 1);
3145
3146 if (GET_CODE (in1) == NOT)
3147 in1 = XEXP (in1, 0);
3148 else
3149 in1 = gen_rtx_combine (NOT, GET_MODE (in1), in1);
3150
3151 if (GET_CODE (in2) == NOT)
3152 in2 = XEXP (in2, 0);
3153 else if (GET_CODE (in2) == CONST_INT
5f4f0e22
CH
3154 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3155 in2 = GEN_INT (GET_MODE_MASK (mode) & ~ INTVAL (in2));
230d793d
RS
3156 else
3157 in2 = gen_rtx_combine (NOT, GET_MODE (in2), in2);
3158
3159 if (GET_CODE (in2) == NOT)
3160 {
3161 rtx tem = in2;
3162 in2 = in1; in1 = tem;
3163 }
3164
3165 x = gen_rtx_combine (GET_CODE (XEXP (x, 0)) == IOR ? AND : IOR,
3166 mode, in1, in2);
3167 goto restart;
3168 }
3169 break;
3170
3171 case NEG:
3172 /* (neg (plus X 1)) can become (not X). */
3173 if (GET_CODE (XEXP (x, 0)) == PLUS
3174 && XEXP (XEXP (x, 0), 1) == const1_rtx)
3175 {
3176 x = gen_rtx_combine (NOT, mode, XEXP (XEXP (x, 0), 0));
3177 goto restart;
3178 }
3179
3180 /* Similarly, (neg (not X)) is (plus X 1). */
3181 if (GET_CODE (XEXP (x, 0)) == NOT)
3182 {
3183 x = gen_rtx_combine (PLUS, mode, XEXP (XEXP (x, 0), 0), const1_rtx);
3184 goto restart;
3185 }
3186
230d793d
RS
3187 /* (neg (minus X Y)) can become (minus Y X). */
3188 if (GET_CODE (XEXP (x, 0)) == MINUS
3189 && (GET_MODE_CLASS (mode) != MODE_FLOAT
3190 /* x-y != -(y-x) with IEEE floating point. */
3191 || TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT))
3192 {
3193 x = gen_binary (MINUS, mode, XEXP (XEXP (x, 0), 1),
3194 XEXP (XEXP (x, 0), 0));
3195 goto restart;
3196 }
3197
d0ab8cd3
RK
3198 /* (neg (xor A 1)) is (plus A -1) if A is known to be either 0 or 1. */
3199 if (GET_CODE (XEXP (x, 0)) == XOR && XEXP (XEXP (x, 0), 1) == const1_rtx
951553af 3200 && nonzero_bits (XEXP (XEXP (x, 0), 0), mode) == 1)
d0ab8cd3
RK
3201 {
3202 x = gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0), constm1_rtx);
3203 goto restart;
3204 }
3205
230d793d
RS
3206 /* NEG commutes with ASHIFT since it is multiplication. Only do this
3207 if we can then eliminate the NEG (e.g.,
3208 if the operand is a constant). */
3209
3210 if (GET_CODE (XEXP (x, 0)) == ASHIFT)
3211 {
3212 temp = simplify_unary_operation (NEG, mode,
3213 XEXP (XEXP (x, 0), 0), mode);
3214 if (temp)
3215 {
3216 SUBST (XEXP (XEXP (x, 0), 0), temp);
3217 return XEXP (x, 0);
3218 }
3219 }
3220
3221 temp = expand_compound_operation (XEXP (x, 0));
3222
3223 /* For C equal to the width of MODE minus 1, (neg (ashiftrt X C)) can be
3224 replaced by (lshiftrt X C). This will convert
3225 (neg (sign_extract X 1 Y)) to (zero_extract X 1 Y). */
3226
3227 if (GET_CODE (temp) == ASHIFTRT
3228 && GET_CODE (XEXP (temp, 1)) == CONST_INT
3229 && INTVAL (XEXP (temp, 1)) == GET_MODE_BITSIZE (mode) - 1)
3230 {
3231 x = simplify_shift_const (temp, LSHIFTRT, mode, XEXP (temp, 0),
3232 INTVAL (XEXP (temp, 1)));
3233 goto restart;
3234 }
3235
951553af 3236 /* If X has only a single bit that might be nonzero, say, bit I, convert
230d793d
RS
3237 (neg X) to (ashiftrt (ashift X C-I) C-I) where C is the bitsize of
3238 MODE minus 1. This will convert (neg (zero_extract X 1 Y)) to
3239 (sign_extract X 1 Y). But only do this if TEMP isn't a register
3240 or a SUBREG of one since we'd be making the expression more
3241 complex if it was just a register. */
3242
3243 if (GET_CODE (temp) != REG
3244 && ! (GET_CODE (temp) == SUBREG
3245 && GET_CODE (SUBREG_REG (temp)) == REG)
951553af 3246 && (i = exact_log2 (nonzero_bits (temp, mode))) >= 0)
230d793d
RS
3247 {
3248 rtx temp1 = simplify_shift_const
5f4f0e22
CH
3249 (NULL_RTX, ASHIFTRT, mode,
3250 simplify_shift_const (NULL_RTX, ASHIFT, mode, temp,
230d793d
RS
3251 GET_MODE_BITSIZE (mode) - 1 - i),
3252 GET_MODE_BITSIZE (mode) - 1 - i);
3253
3254 /* If all we did was surround TEMP with the two shifts, we
3255 haven't improved anything, so don't use it. Otherwise,
3256 we are better off with TEMP1. */
3257 if (GET_CODE (temp1) != ASHIFTRT
3258 || GET_CODE (XEXP (temp1, 0)) != ASHIFT
3259 || XEXP (XEXP (temp1, 0), 0) != temp)
3260 {
3261 x = temp1;
3262 goto restart;
3263 }
3264 }
3265 break;
3266
3267 case FLOAT_TRUNCATE:
3268 /* (float_truncate:SF (float_extend:DF foo:SF)) = foo:SF. */
3269 if (GET_CODE (XEXP (x, 0)) == FLOAT_EXTEND
3270 && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode)
3271 return XEXP (XEXP (x, 0), 0);
3272 break;
3273
3274#ifdef HAVE_cc0
3275 case COMPARE:
3276 /* Convert (compare FOO (const_int 0)) to FOO unless we aren't
3277 using cc0, in which case we want to leave it as a COMPARE
3278 so we can distinguish it from a register-register-copy. */
3279 if (XEXP (x, 1) == const0_rtx)
3280 return XEXP (x, 0);
3281
3282 /* In IEEE floating point, x-0 is not the same as x. */
3283 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3284 || GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) == MODE_INT)
3285 && XEXP (x, 1) == CONST0_RTX (GET_MODE (XEXP (x, 0))))
3286 return XEXP (x, 0);
3287 break;
3288#endif
3289
3290 case CONST:
3291 /* (const (const X)) can become (const X). Do it this way rather than
3292 returning the inner CONST since CONST can be shared with a
3293 REG_EQUAL note. */
3294 if (GET_CODE (XEXP (x, 0)) == CONST)
3295 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
3296 break;
3297
3298#ifdef HAVE_lo_sum
3299 case LO_SUM:
3300 /* Convert (lo_sum (high FOO) FOO) to FOO. This is necessary so we
3301 can add in an offset. find_split_point will split this address up
3302 again if it doesn't match. */
3303 if (GET_CODE (XEXP (x, 0)) == HIGH
3304 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1)))
3305 return XEXP (x, 1);
3306 break;
3307#endif
3308
3309 case PLUS:
3310 /* If we have (plus (plus (A const) B)), associate it so that CONST is
3311 outermost. That's because that's the way indexed addresses are
3312 supposed to appear. This code used to check many more cases, but
3313 they are now checked elsewhere. */
3314 if (GET_CODE (XEXP (x, 0)) == PLUS
3315 && CONSTANT_ADDRESS_P (XEXP (XEXP (x, 0), 1)))
3316 return gen_binary (PLUS, mode,
3317 gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0),
3318 XEXP (x, 1)),
3319 XEXP (XEXP (x, 0), 1));
3320
3321 /* (plus (xor (and <foo> (const_int pow2 - 1)) <c>) <-c>)
3322 when c is (const_int (pow2 + 1) / 2) is a sign extension of a
3323 bit-field and can be replaced by either a sign_extend or a
3324 sign_extract. The `and' may be a zero_extend. */
3325 if (GET_CODE (XEXP (x, 0)) == XOR
3326 && GET_CODE (XEXP (x, 1)) == CONST_INT
3327 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3328 && INTVAL (XEXP (x, 1)) == - INTVAL (XEXP (XEXP (x, 0), 1))
3329 && (i = exact_log2 (INTVAL (XEXP (XEXP (x, 0), 1)))) >= 0
5f4f0e22 3330 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
230d793d
RS
3331 && ((GET_CODE (XEXP (XEXP (x, 0), 0)) == AND
3332 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
3333 && (INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))
5f4f0e22 3334 == ((HOST_WIDE_INT) 1 << (i + 1)) - 1))
230d793d
RS
3335 || (GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND
3336 && (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)))
3337 == i + 1))))
3338 {
3339 x = simplify_shift_const
5f4f0e22
CH
3340 (NULL_RTX, ASHIFTRT, mode,
3341 simplify_shift_const (NULL_RTX, ASHIFT, mode,
230d793d
RS
3342 XEXP (XEXP (XEXP (x, 0), 0), 0),
3343 GET_MODE_BITSIZE (mode) - (i + 1)),
3344 GET_MODE_BITSIZE (mode) - (i + 1));
3345 goto restart;
3346 }
3347
951553af 3348 /* If only the low-order bit of X is possible nonzero, (plus x -1)
230d793d
RS
3349 can become (ashiftrt (ashift (xor x 1) C) C) where C is
3350 the bitsize of the mode - 1. This allows simplification of
3351 "a = (b & 8) == 0;" */
3352 if (XEXP (x, 1) == constm1_rtx
3353 && GET_CODE (XEXP (x, 0)) != REG
3354 && ! (GET_CODE (XEXP (x,0)) == SUBREG
3355 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG)
951553af 3356 && nonzero_bits (XEXP (x, 0), mode) == 1)
230d793d
RS
3357 {
3358 x = simplify_shift_const
5f4f0e22
CH
3359 (NULL_RTX, ASHIFTRT, mode,
3360 simplify_shift_const (NULL_RTX, ASHIFT, mode,
230d793d
RS
3361 gen_rtx_combine (XOR, mode,
3362 XEXP (x, 0), const1_rtx),
3363 GET_MODE_BITSIZE (mode) - 1),
3364 GET_MODE_BITSIZE (mode) - 1);
3365 goto restart;
3366 }
02f4ada4
RK
3367
3368 /* If we are adding two things that have no bits in common, convert
3369 the addition into an IOR. This will often be further simplified,
3370 for example in cases like ((a & 1) + (a & 2)), which can
3371 become a & 3. */
3372
ac49a949 3373 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
951553af
RK
3374 && (nonzero_bits (XEXP (x, 0), mode)
3375 & nonzero_bits (XEXP (x, 1), mode)) == 0)
02f4ada4
RK
3376 {
3377 x = gen_binary (IOR, mode, XEXP (x, 0), XEXP (x, 1));
3378 goto restart;
3379 }
230d793d
RS
3380 break;
3381
3382 case MINUS:
3383 /* (minus <foo> (and <foo> (const_int -pow2))) becomes
3384 (and <foo> (const_int pow2-1)) */
3385 if (GET_CODE (XEXP (x, 1)) == AND
3386 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
3387 && exact_log2 (- INTVAL (XEXP (XEXP (x, 1), 1))) >= 0
3388 && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0)))
3389 {
5f4f0e22 3390 x = simplify_and_const_int (NULL_RTX, mode, XEXP (x, 0),
230d793d
RS
3391 - INTVAL (XEXP (XEXP (x, 1), 1)) - 1);
3392 goto restart;
3393 }
3394 break;
3395
3396 case MULT:
3397 /* If we have (mult (plus A B) C), apply the distributive law and then
3398 the inverse distributive law to see if things simplify. This
3399 occurs mostly in addresses, often when unrolling loops. */
3400
3401 if (GET_CODE (XEXP (x, 0)) == PLUS)
3402 {
3403 x = apply_distributive_law
3404 (gen_binary (PLUS, mode,
3405 gen_binary (MULT, mode,
3406 XEXP (XEXP (x, 0), 0), XEXP (x, 1)),
3407 gen_binary (MULT, mode,
3408 XEXP (XEXP (x, 0), 1), XEXP (x, 1))));
3409
3410 if (GET_CODE (x) != MULT)
3411 goto restart;
3412 }
3413
3414 /* If this is multiplication by a power of two and its first operand is
3415 a shift, treat the multiply as a shift to allow the shifts to
3416 possibly combine. */
3417 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3418 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0
3419 && (GET_CODE (XEXP (x, 0)) == ASHIFT
3420 || GET_CODE (XEXP (x, 0)) == LSHIFTRT
3421 || GET_CODE (XEXP (x, 0)) == ASHIFTRT
3422 || GET_CODE (XEXP (x, 0)) == ROTATE
3423 || GET_CODE (XEXP (x, 0)) == ROTATERT))
3424 {
5f4f0e22 3425 x = simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (x, 0), i);
230d793d
RS
3426 goto restart;
3427 }
3428
3429 /* Convert (mult (ashift (const_int 1) A) B) to (ashift B A). */
3430 if (GET_CODE (XEXP (x, 0)) == ASHIFT
3431 && XEXP (XEXP (x, 0), 0) == const1_rtx)
3432 return gen_rtx_combine (ASHIFT, mode, XEXP (x, 1),
3433 XEXP (XEXP (x, 0), 1));
3434 break;
3435
3436 case UDIV:
3437 /* If this is a divide by a power of two, treat it as a shift if
3438 its first operand is a shift. */
3439 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3440 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0
3441 && (GET_CODE (XEXP (x, 0)) == ASHIFT
3442 || GET_CODE (XEXP (x, 0)) == LSHIFTRT
3443 || GET_CODE (XEXP (x, 0)) == ASHIFTRT
3444 || GET_CODE (XEXP (x, 0)) == ROTATE
3445 || GET_CODE (XEXP (x, 0)) == ROTATERT))
3446 {
5f4f0e22 3447 x = simplify_shift_const (NULL_RTX, LSHIFTRT, mode, XEXP (x, 0), i);
230d793d
RS
3448 goto restart;
3449 }
3450 break;
3451
3452 case EQ: case NE:
3453 case GT: case GTU: case GE: case GEU:
3454 case LT: case LTU: case LE: case LEU:
3455 /* If the first operand is a condition code, we can't do anything
3456 with it. */
3457 if (GET_CODE (XEXP (x, 0)) == COMPARE
3458 || (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) != MODE_CC
3459#ifdef HAVE_cc0
3460 && XEXP (x, 0) != cc0_rtx
3461#endif
3462 ))
3463 {
3464 rtx op0 = XEXP (x, 0);
3465 rtx op1 = XEXP (x, 1);
3466 enum rtx_code new_code;
3467
3468 if (GET_CODE (op0) == COMPARE)
3469 op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
3470
3471 /* Simplify our comparison, if possible. */
3472 new_code = simplify_comparison (code, &op0, &op1);
3473
3474#if STORE_FLAG_VALUE == 1
3475 /* If STORE_FLAG_VALUE is 1, we can convert (ne x 0) to simply X
951553af 3476 if only the low-order bit is possibly nonzero in X (such as when
230d793d 3477 X is a ZERO_EXTRACT of one bit. Similarly, we can convert
818b11b9
RK
3478 EQ to (xor X 1). Remove any ZERO_EXTRACT we made when thinking
3479 this was a comparison. It may now be simpler to use, e.g., an
3480 AND. If a ZERO_EXTRACT is indeed appropriate, it will
3481 be placed back by the call to make_compound_operation in the
3482 SET case. */
3f508eca 3483 if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
230d793d 3484 && op1 == const0_rtx
951553af 3485 && nonzero_bits (op0, GET_MODE (op0)) == 1)
818b11b9
RK
3486 return gen_lowpart_for_combine (mode,
3487 expand_compound_operation (op0));
3f508eca 3488 else if (new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
230d793d 3489 && op1 == const0_rtx
951553af 3490 && nonzero_bits (op0, GET_MODE (op0)) == 1)
818b11b9
RK
3491 {
3492 op0 = expand_compound_operation (op0);
3493
3494 x = gen_rtx_combine (XOR, mode,
3495 gen_lowpart_for_combine (mode, op0),
3496 const1_rtx);
3497 goto restart;
3498 }
230d793d
RS
3499#endif
3500
3501#if STORE_FLAG_VALUE == -1
3502 /* If STORE_FLAG_VALUE is -1, we can convert (ne x 0)
951553af 3503 to (neg x) if only the low-order bit of X can be nonzero.
230d793d
RS
3504 This converts (ne (zero_extract X 1 Y) 0) to
3505 (sign_extract X 1 Y). */
3f508eca 3506 if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
230d793d 3507 && op1 == const0_rtx
951553af 3508 && nonzero_bits (op0, GET_MODE (op0)) == 1)
230d793d 3509 {
818b11b9 3510 op0 = expand_compound_operation (op0);
230d793d
RS
3511 x = gen_rtx_combine (NEG, mode,
3512 gen_lowpart_for_combine (mode, op0));
3513 goto restart;
3514 }
3515#endif
3516
3517 /* If STORE_FLAG_VALUE says to just test the sign bit and X has just
951553af
RK
3518 one bit that might be nonzero, we can convert (ne x 0) to
3519 (ashift x c) where C puts the bit in the sign bit. Remove any
3520 AND with STORE_FLAG_VALUE when we are done, since we are only
3521 going to test the sign bit. */
3f508eca 3522 if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
5f4f0e22
CH
3523 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3524 && (STORE_FLAG_VALUE
3525 == (HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
230d793d
RS
3526 && op1 == const0_rtx
3527 && mode == GET_MODE (op0)
951553af 3528 && (i = exact_log2 (nonzero_bits (op0, GET_MODE (op0)))) >= 0)
230d793d 3529 {
818b11b9
RK
3530 x = simplify_shift_const (NULL_RTX, ASHIFT, mode,
3531 expand_compound_operation (op0),
230d793d
RS
3532 GET_MODE_BITSIZE (mode) - 1 - i);
3533 if (GET_CODE (x) == AND && XEXP (x, 1) == const_true_rtx)
3534 return XEXP (x, 0);
3535 else
3536 return x;
3537 }
3538
3539 /* If the code changed, return a whole new comparison. */
3540 if (new_code != code)
3541 return gen_rtx_combine (new_code, mode, op0, op1);
3542
3543 /* Otherwise, keep this operation, but maybe change its operands.
3544 This also converts (ne (compare FOO BAR) 0) to (ne FOO BAR). */
3545 SUBST (XEXP (x, 0), op0);
3546 SUBST (XEXP (x, 1), op1);
3547 }
3548 break;
3549
3550 case IF_THEN_ELSE:
1a26b032
RK
3551 /* Sometimes we can simplify the arm of an IF_THEN_ELSE if a register
3552 used in it is being compared against certain values. Get the
3553 true and false comparisons and see if that says anything about the
3554 value of each arm. */
d0ab8cd3 3555
1a26b032
RK
3556 if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3557 && reversible_comparison_p (XEXP (x, 0))
d0ab8cd3
RK
3558 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG)
3559 {
951553af 3560 HOST_WIDE_INT nzb;
d0ab8cd3 3561 rtx from = XEXP (XEXP (x, 0), 0);
1a26b032
RK
3562 enum rtx_code true_code = GET_CODE (XEXP (x, 0));
3563 enum rtx_code false_code = reverse_condition (true_code);
3564 rtx true_val = XEXP (XEXP (x, 0), 1);
3565 rtx false_val = true_val;
3566 rtx true_arm = XEXP (x, 1);
3567 rtx false_arm = XEXP (x, 2);
3568 int swapped = 0;
3569
3570 /* If FALSE_CODE is EQ, swap the codes and arms. */
3571
3572 if (false_code == EQ)
3573 {
3574 swapped = 1, true_code = EQ, false_code = NE;
3575 true_arm = XEXP (x, 2), false_arm = XEXP (x, 1);
3576 }
d0ab8cd3 3577
1a26b032 3578 /* If we are comparing against zero and the expression being tested
951553af
RK
3579 has only a single bit that might be nonzero, that is its value
3580 when it is not equal to zero. Similarly if it is known to be
3581 -1 or 0. */
d0ab8cd3 3582
1a26b032 3583 if (true_code == EQ && true_val == const0_rtx
951553af
RK
3584 && exact_log2 (nzb = nonzero_bits (from, GET_MODE (from))) >= 0)
3585 false_code = EQ, false_val = GEN_INT (nzb);
1a26b032 3586 else if (true_code == EQ && true_val == const0_rtx
d0ab8cd3
RK
3587 && (num_sign_bit_copies (from, GET_MODE (from))
3588 == GET_MODE_BITSIZE (GET_MODE (from))))
1a26b032 3589 false_code = EQ, false_val = constm1_rtx;
d0ab8cd3
RK
3590
3591 /* Now simplify an arm if we know the value of the register
3592 in the branch and it is used in the arm. Be carefull due to
3593 the potential of locally-shared RTL. */
3594
1a26b032
RK
3595 if (reg_mentioned_p (from, true_arm))
3596 true_arm = subst (known_cond (copy_rtx (true_arm), true_code,
3597 from, true_val),
3598 pc_rtx, pc_rtx, 0, 0);
3599 if (reg_mentioned_p (from, false_arm))
3600 false_arm = subst (known_cond (copy_rtx (false_arm), false_code,
3601 from, false_val),
3602 pc_rtx, pc_rtx, 0, 0);
3603
3604 SUBST (XEXP (x, 1), swapped ? false_arm : true_arm);
3605 SUBST (XEXP (x, 2), swapped ? true_arm : false_arm);
d0ab8cd3
RK
3606 }
3607
230d793d
RS
3608 /* If we have (if_then_else FOO (pc) (label_ref BAR)) and FOO can be
3609 reversed, do so to avoid needing two sets of patterns for
d0ab8cd3 3610 subtract-and-branch insns. Similarly if we have a constant in that
1a26b032
RK
3611 position or if the third operand is the same as the first operand
3612 of the comparison. */
3613
3614 if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3615 && reversible_comparison_p (XEXP (x, 0))
3616 && (XEXP (x, 1) == pc_rtx || GET_CODE (XEXP (x, 1)) == CONST_INT
3617 || rtx_equal_p (XEXP (x, 2), XEXP (XEXP (x, 0), 0))))
230d793d
RS
3618 {
3619 SUBST (XEXP (x, 0),
d0ab8cd3
RK
3620 gen_binary (reverse_condition (GET_CODE (XEXP (x, 0))),
3621 GET_MODE (XEXP (x, 0)),
3622 XEXP (XEXP (x, 0), 0), XEXP (XEXP (x, 0), 1)));
3623
3624 temp = XEXP (x, 1);
230d793d 3625 SUBST (XEXP (x, 1), XEXP (x, 2));
d0ab8cd3 3626 SUBST (XEXP (x, 2), temp);
230d793d 3627 }
1a26b032
RK
3628
3629 /* If the two arms are identical, we don't need the comparison. */
3630
3631 if (rtx_equal_p (XEXP (x, 1), XEXP (x, 2))
3632 && ! side_effects_p (XEXP (x, 0)))
3633 return XEXP (x, 1);
3634
3635 /* Look for cases where we have (abs x) or (neg (abs X)). */
3636
3637 if (GET_MODE_CLASS (mode) == MODE_INT
3638 && GET_CODE (XEXP (x, 2)) == NEG
3639 && rtx_equal_p (XEXP (x, 1), XEXP (XEXP (x, 2), 0))
3640 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3641 && rtx_equal_p (XEXP (x, 1), XEXP (XEXP (x, 0), 0))
3642 && ! side_effects_p (XEXP (x, 1)))
3643 switch (GET_CODE (XEXP (x, 0)))
3644 {
3645 case GT:
3646 case GE:
3647 x = gen_unary (ABS, mode, XEXP (x, 1));
3648 goto restart;
3649 case LT:
3650 case LE:
3651 x = gen_unary (NEG, mode, gen_unary (ABS, mode, XEXP (x, 1)));
3652 goto restart;
3653 }
3654
3655 /* Look for MIN or MAX. */
3656
3657 if (GET_MODE_CLASS (mode) == MODE_INT
3658 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3659 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1))
3660 && rtx_equal_p (XEXP (XEXP (x, 0), 1), XEXP (x, 2))
3661 && ! side_effects_p (XEXP (x, 0)))
3662 switch (GET_CODE (XEXP (x, 0)))
3663 {
3664 case GE:
3665 case GT:
3666 x = gen_binary (SMAX, mode, XEXP (x, 1), XEXP (x, 2));
3667 goto restart;
3668 case LE:
3669 case LT:
3670 x = gen_binary (SMIN, mode, XEXP (x, 1), XEXP (x, 2));
3671 goto restart;
3672 case GEU:
3673 case GTU:
3674 x = gen_binary (UMAX, mode, XEXP (x, 1), XEXP (x, 2));
3675 goto restart;
3676 case LEU:
3677 case LTU:
3678 x = gen_binary (UMIN, mode, XEXP (x, 1), XEXP (x, 2));
3679 goto restart;
3680 }
3681
3682 /* If we have something like (if_then_else (ne A 0) (OP X C) X),
3683 A is known to be either 0 or 1, and OP is an identity when its
3684 second operand is zero, this can be done as (OP X (mult A C)).
3685 Similarly if A is known to be 0 or -1 and also similarly if we have
3686 a ZERO_EXTEND or SIGN_EXTEND as long as X is already extended (so
3687 we don't destroy it). */
3688
3689 if (mode != VOIDmode
3690 && (GET_CODE (XEXP (x, 0)) == EQ || GET_CODE (XEXP (x, 0)) == NE)
3691 && XEXP (XEXP (x, 0), 1) == const0_rtx
951553af 3692 && (nonzero_bits (XEXP (XEXP (x, 0), 0), mode) == 1
1a26b032
RK
3693 || (num_sign_bit_copies (XEXP (XEXP (x, 0), 0), mode)
3694 == GET_MODE_BITSIZE (mode))))
3695 {
3696 rtx nz = make_compound_operation (GET_CODE (XEXP (x, 0)) == NE
3697 ? XEXP (x, 1) : XEXP (x, 2));
3698 rtx z = GET_CODE (XEXP (x, 0)) == NE ? XEXP (x, 2) : XEXP (x, 1);
951553af 3699 rtx dir = (nonzero_bits (XEXP (XEXP (x, 0), 0), mode) == 1
1a26b032
RK
3700 ? const1_rtx : constm1_rtx);
3701 rtx c = 0;
3702 enum machine_mode m = mode;
e64ff103 3703 enum rtx_code op, extend_op = 0;
1a26b032
RK
3704
3705 if ((GET_CODE (nz) == PLUS || GET_CODE (nz) == MINUS
3706 || GET_CODE (nz) == IOR || GET_CODE (nz) == XOR
3707 || GET_CODE (nz) == ASHIFT
3708 || GET_CODE (nz) == LSHIFTRT || GET_CODE (nz) == ASHIFTRT)
3709 && rtx_equal_p (XEXP (nz, 0), z))
3710 c = XEXP (nz, 1), op = GET_CODE (nz);
3711 else if (GET_CODE (nz) == SIGN_EXTEND
3712 && (GET_CODE (XEXP (nz, 0)) == PLUS
3713 || GET_CODE (XEXP (nz, 0)) == MINUS
3714 || GET_CODE (XEXP (nz, 0)) == IOR
3715 || GET_CODE (XEXP (nz, 0)) == XOR
3716 || GET_CODE (XEXP (nz, 0)) == ASHIFT
3717 || GET_CODE (XEXP (nz, 0)) == LSHIFTRT
3718 || GET_CODE (XEXP (nz, 0)) == ASHIFTRT)
3719 && GET_CODE (XEXP (XEXP (nz, 0), 0)) == SUBREG
3720 && subreg_lowpart_p (XEXP (XEXP (nz, 0), 0))
3721 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (nz, 0), 0)), z)
3722 && (num_sign_bit_copies (z, GET_MODE (z))
3723 >= (GET_MODE_BITSIZE (mode)
3724 - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (nz, 0), 0))))))
3725 {
3726 c = XEXP (XEXP (nz, 0), 1);
3727 op = GET_CODE (XEXP (nz, 0));
3728 extend_op = SIGN_EXTEND;
3729 m = GET_MODE (XEXP (nz, 0));
3730 }
3731 else if (GET_CODE (nz) == ZERO_EXTEND
3732 && (GET_CODE (XEXP (nz, 0)) == PLUS
3733 || GET_CODE (XEXP (nz, 0)) == MINUS
3734 || GET_CODE (XEXP (nz, 0)) == IOR
3735 || GET_CODE (XEXP (nz, 0)) == XOR
3736 || GET_CODE (XEXP (nz, 0)) == ASHIFT
3737 || GET_CODE (XEXP (nz, 0)) == LSHIFTRT
3738 || GET_CODE (XEXP (nz, 0)) == ASHIFTRT)
3739 && GET_CODE (XEXP (XEXP (nz, 0), 0)) == SUBREG
3740 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3741 && subreg_lowpart_p (XEXP (XEXP (nz, 0), 0))
3742 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (nz, 0), 0)), z)
951553af 3743 && ((nonzero_bits (z, GET_MODE (z))
1a26b032
RK
3744 & ~ GET_MODE_MASK (GET_MODE (XEXP (XEXP (nz, 0), 0))))
3745 == 0))
3746 {
3747 c = XEXP (XEXP (nz, 0), 1);
3748 op = GET_CODE (XEXP (nz, 0));
3749 extend_op = ZERO_EXTEND;
3750 m = GET_MODE (XEXP (nz, 0));
3751 }
3752
3753 if (c && ! side_effects_p (c) && ! side_effects_p (z))
3754 {
3755 temp
3756 = gen_binary (MULT, m,
3757 gen_lowpart_for_combine (m,
3758 XEXP (XEXP (x, 0), 0)),
3759 gen_binary (MULT, m, c, dir));
3760
3761 temp = gen_binary (op, m, gen_lowpart_for_combine (m, z), temp);
3762
e64ff103 3763 if (extend_op != 0)
1a26b032
RK
3764 temp = gen_unary (extend_op, mode, temp);
3765
3766 return temp;
3767 }
3768 }
230d793d
RS
3769 break;
3770
3771 case ZERO_EXTRACT:
3772 case SIGN_EXTRACT:
3773 case ZERO_EXTEND:
3774 case SIGN_EXTEND:
3775 /* If we are processing SET_DEST, we are done. */
3776 if (in_dest)
3777 return x;
3778
3779 x = expand_compound_operation (x);
3780 if (GET_CODE (x) != code)
3781 goto restart;
3782 break;
3783
3784 case SET:
3785 /* (set (pc) (return)) gets written as (return). */
3786 if (GET_CODE (SET_DEST (x)) == PC && GET_CODE (SET_SRC (x)) == RETURN)
3787 return SET_SRC (x);
3788
3789 /* Convert this into a field assignment operation, if possible. */
3790 x = make_field_assignment (x);
3791
230d793d
RS
3792 /* If we are setting CC0 or if the source is a COMPARE, look for the
3793 use of the comparison result and try to simplify it unless we already
3794 have used undobuf.other_insn. */
3795 if ((GET_CODE (SET_SRC (x)) == COMPARE
3796#ifdef HAVE_cc0
3797 || SET_DEST (x) == cc0_rtx
3798#endif
3799 )
3800 && (cc_use = find_single_use (SET_DEST (x), subst_insn,
3801 &other_insn)) != 0
3802 && (undobuf.other_insn == 0 || other_insn == undobuf.other_insn)
3803 && GET_RTX_CLASS (GET_CODE (*cc_use)) == '<'
3804 && XEXP (*cc_use, 0) == SET_DEST (x))
3805 {
3806 enum rtx_code old_code = GET_CODE (*cc_use);
3807 enum rtx_code new_code;
3808 rtx op0, op1;
3809 int other_changed = 0;
3810 enum machine_mode compare_mode = GET_MODE (SET_DEST (x));
3811
3812 if (GET_CODE (SET_SRC (x)) == COMPARE)
3813 op0 = XEXP (SET_SRC (x), 0), op1 = XEXP (SET_SRC (x), 1);
3814 else
3815 op0 = SET_SRC (x), op1 = const0_rtx;
3816
3817 /* Simplify our comparison, if possible. */
3818 new_code = simplify_comparison (old_code, &op0, &op1);
3819
c141a106 3820#ifdef EXTRA_CC_MODES
230d793d
RS
3821 /* If this machine has CC modes other than CCmode, check to see
3822 if we need to use a different CC mode here. */
77fa0940 3823 compare_mode = SELECT_CC_MODE (new_code, op0, op1);
c141a106 3824#endif /* EXTRA_CC_MODES */
230d793d 3825
c141a106 3826#if !defined (HAVE_cc0) && defined (EXTRA_CC_MODES)
230d793d
RS
3827 /* If the mode changed, we have to change SET_DEST, the mode
3828 in the compare, and the mode in the place SET_DEST is used.
3829 If SET_DEST is a hard register, just build new versions with
3830 the proper mode. If it is a pseudo, we lose unless it is only
3831 time we set the pseudo, in which case we can safely change
3832 its mode. */
3833 if (compare_mode != GET_MODE (SET_DEST (x)))
3834 {
3835 int regno = REGNO (SET_DEST (x));
3836 rtx new_dest = gen_rtx (REG, compare_mode, regno);
3837
3838 if (regno < FIRST_PSEUDO_REGISTER
3839 || (reg_n_sets[regno] == 1
3840 && ! REG_USERVAR_P (SET_DEST (x))))
3841 {
3842 if (regno >= FIRST_PSEUDO_REGISTER)
3843 SUBST (regno_reg_rtx[regno], new_dest);
3844
3845 SUBST (SET_DEST (x), new_dest);
3846 SUBST (XEXP (*cc_use, 0), new_dest);
3847 other_changed = 1;
3848 }
3849 }
3850#endif
3851
3852 /* If the code changed, we have to build a new comparison
3853 in undobuf.other_insn. */
3854 if (new_code != old_code)
3855 {
951553af 3856 unsigned HOST_WIDE_INT mask;
230d793d
RS
3857
3858 SUBST (*cc_use, gen_rtx_combine (new_code, GET_MODE (*cc_use),
3859 SET_DEST (x), const0_rtx));
3860
3861 /* If the only change we made was to change an EQ into an
951553af 3862 NE or vice versa, OP0 has only one bit that might be nonzero,
230d793d
RS
3863 and OP1 is zero, check if changing the user of the condition
3864 code will produce a valid insn. If it won't, we can keep
3865 the original code in that insn by surrounding our operation
3866 with an XOR. */
3867
3868 if (((old_code == NE && new_code == EQ)
3869 || (old_code == EQ && new_code == NE))
3870 && ! other_changed && op1 == const0_rtx
5f4f0e22
CH
3871 && (GET_MODE_BITSIZE (GET_MODE (op0))
3872 <= HOST_BITS_PER_WIDE_INT)
951553af 3873 && (exact_log2 (mask = nonzero_bits (op0, GET_MODE (op0)))
230d793d
RS
3874 >= 0))
3875 {
3876 rtx pat = PATTERN (other_insn), note = 0;
3877
6e2a4e3c 3878 if ((recog_for_combine (&pat, other_insn, &note) < 0
230d793d
RS
3879 && ! check_asm_operands (pat)))
3880 {
3881 PUT_CODE (*cc_use, old_code);
3882 other_insn = 0;
3883
3884 op0 = gen_binary (XOR, GET_MODE (op0), op0,
5f4f0e22 3885 GEN_INT (mask));
230d793d
RS
3886 }
3887 }
3888
3889 other_changed = 1;
3890 }
3891
3892 if (other_changed)
3893 undobuf.other_insn = other_insn;
3894
3895#ifdef HAVE_cc0
3896 /* If we are now comparing against zero, change our source if
3897 needed. If we do not use cc0, we always have a COMPARE. */
3898 if (op1 == const0_rtx && SET_DEST (x) == cc0_rtx)
3899 SUBST (SET_SRC (x), op0);
3900 else
3901#endif
3902
3903 /* Otherwise, if we didn't previously have a COMPARE in the
3904 correct mode, we need one. */
3905 if (GET_CODE (SET_SRC (x)) != COMPARE
3906 || GET_MODE (SET_SRC (x)) != compare_mode)
3907 SUBST (SET_SRC (x), gen_rtx_combine (COMPARE, compare_mode,
3908 op0, op1));
3909 else
3910 {
3911 /* Otherwise, update the COMPARE if needed. */
3912 SUBST (XEXP (SET_SRC (x), 0), op0);
3913 SUBST (XEXP (SET_SRC (x), 1), op1);
3914 }
3915 }
3916 else
3917 {
3918 /* Get SET_SRC in a form where we have placed back any
3919 compound expressions. Then do the checks below. */
3920 temp = make_compound_operation (SET_SRC (x), SET);
3921 SUBST (SET_SRC (x), temp);
3922 }
3923
df62f951
RK
3924 /* If we have (set x (subreg:m1 (op:m2 ...) 0)) with OP being some
3925 operation, and X being a REG or (subreg (reg)), we may be able to
3926 convert this to (set (subreg:m2 x) (op)).
3927
3928 We can always do this if M1 is narrower than M2 because that
3929 means that we only care about the low bits of the result.
3930
c6dc70d6
RK
3931 However, on most machines (those with neither BYTE_LOADS_ZERO_EXTEND
3932 nor BYTES_LOADS_SIGN_EXTEND defined), we cannot perform a
457816e2
RK
3933 narrower operation that requested since the high-order bits will
3934 be undefined. On machine where BYTE_LOADS_*_EXTEND is defined,
3935 however, this transformation is safe as long as M1 and M2 have
3936 the same number of words. */
df62f951
RK
3937
3938 if (GET_CODE (SET_SRC (x)) == SUBREG
3939 && subreg_lowpart_p (SET_SRC (x))
3940 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (SET_SRC (x)))) != 'o'
3941 && (((GET_MODE_SIZE (GET_MODE (SET_SRC (x))) + (UNITS_PER_WORD - 1))
3942 / UNITS_PER_WORD)
3943 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_SRC (x))))
3944 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
c6dc70d6 3945#ifndef BYTE_LOADS_EXTEND
df62f951
RK
3946 && (GET_MODE_SIZE (GET_MODE (SET_SRC (x)))
3947 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_SRC (x)))))
3948#endif
3949 && (GET_CODE (SET_DEST (x)) == REG
3950 || (GET_CODE (SET_DEST (x)) == SUBREG
3951 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG)))
3952 {
df62f951 3953 SUBST (SET_DEST (x),
d0ab8cd3
RK
3954 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (SET_SRC (x))),
3955 SET_DEST (x)));
df62f951
RK
3956 SUBST (SET_SRC (x), SUBREG_REG (SET_SRC (x)));
3957 }
3958
c6dc70d6 3959#ifdef BYTE_LOADS_EXTEND
230d793d
RS
3960 /* If we have (set FOO (subreg:M (mem:N BAR) 0)) with
3961 M wider than N, this would require a paradoxical subreg.
3962 Replace the subreg with a zero_extend to avoid the reload that
3963 would otherwise be required. */
c6dc70d6 3964
230d793d
RS
3965 if (GET_CODE (SET_SRC (x)) == SUBREG
3966 && subreg_lowpart_p (SET_SRC (x))
3967 && SUBREG_WORD (SET_SRC (x)) == 0
3968 && (GET_MODE_SIZE (GET_MODE (SET_SRC (x)))
3969 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_SRC (x)))))
3970 && GET_CODE (SUBREG_REG (SET_SRC (x))) == MEM)
c6dc70d6 3971 SUBST (SET_SRC (x), gen_rtx_combine (LOAD_EXTEND,
230d793d
RS
3972 GET_MODE (SET_SRC (x)),
3973 XEXP (SET_SRC (x), 0)));
3974#endif
3975
1a26b032
RK
3976#ifndef HAVE_conditional_move
3977
3978 /* If we don't have a conditional move, SET_SRC is an IF_THEN_ELSE,
3979 and we are comparing an item known to be 0 or -1 against 0, use a
3980 logical operation instead. Check for one of the arms being an IOR
3981 of the other arm with some value. We compute three terms to be
3982 IOR'ed together. In practice, at most two will be nonzero. Then
3983 we do the IOR's. */
3984
696223d7
TW
3985 if (GET_CODE (SET_DEST (x)) != PC
3986 && GET_CODE (SET_SRC (x)) == IF_THEN_ELSE
1a26b032
RK
3987 && (GET_CODE (XEXP (SET_SRC (x), 0)) == EQ
3988 || GET_CODE (XEXP (SET_SRC (x), 0)) == NE)
3989 && XEXP (XEXP (SET_SRC (x), 0), 1) == const0_rtx
3990 && (num_sign_bit_copies (XEXP (XEXP (SET_SRC (x), 0), 0),
3991 GET_MODE (XEXP (XEXP (SET_SRC (x), 0), 0)))
3992 == GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (SET_SRC (x), 0), 0))))
3993 && ! side_effects_p (SET_SRC (x)))
3994 {
3995 rtx true = (GET_CODE (XEXP (SET_SRC (x), 0)) == NE
3996 ? XEXP (SET_SRC (x), 1) : XEXP (SET_SRC (x), 2));
3997 rtx false = (GET_CODE (XEXP (SET_SRC (x), 0)) == NE
3998 ? XEXP (SET_SRC (x), 2) : XEXP (SET_SRC (x), 1));
3999 rtx term1 = const0_rtx, term2, term3;
4000
4001 if (GET_CODE (true) == IOR && rtx_equal_p (XEXP (true, 0), false))
4002 term1 = false, true = XEXP (true, 1), false = const0_rtx;
4003 else if (GET_CODE (true) == IOR
4004 && rtx_equal_p (XEXP (true, 1), false))
4005 term1 = false, true = XEXP (true, 0), false = const0_rtx;
4006 else if (GET_CODE (false) == IOR
4007 && rtx_equal_p (XEXP (false, 0), true))
4008 term1 = true, false = XEXP (false, 1), true = const0_rtx;
4009 else if (GET_CODE (false) == IOR
4010 && rtx_equal_p (XEXP (false, 1), true))
4011 term1 = true, false = XEXP (false, 0), true = const0_rtx;
4012
4013 term2 = gen_binary (AND, GET_MODE (SET_SRC (x)),
4014 XEXP (XEXP (SET_SRC (x), 0), 0), true);
4015 term3 = gen_binary (AND, GET_MODE (SET_SRC (x)),
4016 gen_unary (NOT, GET_MODE (SET_SRC (x)),
4017 XEXP (XEXP (SET_SRC (x), 0), 0)),
4018 false);
4019
4020 SUBST (SET_SRC (x),
4021 gen_binary (IOR, GET_MODE (SET_SRC (x)),
4022 gen_binary (IOR, GET_MODE (SET_SRC (x)),
4023 term1, term2),
4024 term3));
4025 }
4026#endif
230d793d
RS
4027 break;
4028
4029 case AND:
4030 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
4031 {
4032 x = simplify_and_const_int (x, mode, XEXP (x, 0),
4033 INTVAL (XEXP (x, 1)));
4034
4035 /* If we have (ior (and (X C1) C2)) and the next restart would be
4036 the last, simplify this by making C1 as small as possible
4037 and then exit. */
4038 if (n_restarts >= 3 && GET_CODE (x) == IOR
4039 && GET_CODE (XEXP (x, 0)) == AND
4040 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4041 && GET_CODE (XEXP (x, 1)) == CONST_INT)
4042 {
4043 temp = gen_binary (AND, mode, XEXP (XEXP (x, 0), 0),
5f4f0e22
CH
4044 GEN_INT (INTVAL (XEXP (XEXP (x, 0), 1))
4045 & ~ INTVAL (XEXP (x, 1))));
230d793d
RS
4046 return gen_binary (IOR, mode, temp, XEXP (x, 1));
4047 }
4048
4049 if (GET_CODE (x) != AND)
4050 goto restart;
4051 }
4052
4053 /* Convert (A | B) & A to A. */
4054 if (GET_CODE (XEXP (x, 0)) == IOR
4055 && (rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1))
4056 || rtx_equal_p (XEXP (XEXP (x, 0), 1), XEXP (x, 1)))
4057 && ! side_effects_p (XEXP (XEXP (x, 0), 0))
4058 && ! side_effects_p (XEXP (XEXP (x, 0), 1)))
4059 return XEXP (x, 1);
4060
4061 /* Convert (A ^ B) & A to A & (~ B) since the latter is often a single
4062 insn (and may simplify more). */
4063 else if (GET_CODE (XEXP (x, 0)) == XOR
4064 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1))
4065 && ! side_effects_p (XEXP (x, 1)))
4066 {
4067 x = gen_binary (AND, mode,
4068 gen_unary (NOT, mode, XEXP (XEXP (x, 0), 1)),
4069 XEXP (x, 1));
4070 goto restart;
4071 }
4072 else if (GET_CODE (XEXP (x, 0)) == XOR
4073 && rtx_equal_p (XEXP (XEXP (x, 0), 1), XEXP (x, 1))
4074 && ! side_effects_p (XEXP (x, 1)))
4075 {
4076 x = gen_binary (AND, mode,
4077 gen_unary (NOT, mode, XEXP (XEXP (x, 0), 0)),
4078 XEXP (x, 1));
4079 goto restart;
4080 }
4081
4082 /* Similarly for (~ (A ^ B)) & A. */
4083 else if (GET_CODE (XEXP (x, 0)) == NOT
4084 && GET_CODE (XEXP (XEXP (x, 0), 0)) == XOR
4085 && rtx_equal_p (XEXP (XEXP (XEXP (x, 0), 0), 0), XEXP (x, 1))
4086 && ! side_effects_p (XEXP (x, 1)))
4087 {
4088 x = gen_binary (AND, mode, XEXP (XEXP (XEXP (x, 0), 0), 1),
4089 XEXP (x, 1));
4090 goto restart;
4091 }
4092 else if (GET_CODE (XEXP (x, 0)) == NOT
4093 && GET_CODE (XEXP (XEXP (x, 0), 0)) == XOR
4094 && rtx_equal_p (XEXP (XEXP (XEXP (x, 0), 0), 1), XEXP (x, 1))
4095 && ! side_effects_p (XEXP (x, 1)))
4096 {
4097 x = gen_binary (AND, mode, XEXP (XEXP (XEXP (x, 0), 0), 0),
4098 XEXP (x, 1));
4099 goto restart;
4100 }
4101
d0ab8cd3
RK
4102 /* If we have (and A B) with A not an object but that is known to
4103 be -1 or 0, this is equivalent to the expression
4104 (if_then_else (ne A (const_int 0)) B (const_int 0))
4105 We make this conversion because it may allow further
1a26b032
RK
4106 simplifications and then allow use of conditional move insns.
4107 If the machine doesn't have condition moves, code in case SET
4108 will convert the IF_THEN_ELSE back to the logical operation.
4109 We build the IF_THEN_ELSE here in case further simplification
4110 is possible (e.g., we can convert it to ABS). */
d0ab8cd3
RK
4111
4112 if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) != 'o'
4113 && ! (GET_CODE (XEXP (x, 0)) == SUBREG
4114 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0)))) == 'o')
4115 && (num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
4116 == GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))))
4117 {
4118 rtx op0 = XEXP (x, 0);
4119 rtx op1 = const0_rtx;
4120 enum rtx_code comp_code
4121 = simplify_comparison (NE, &op0, &op1);
4122
4123 x = gen_rtx_combine (IF_THEN_ELSE, mode,
4124 gen_binary (comp_code, VOIDmode, op0, op1),
4125 XEXP (x, 1), const0_rtx);
4126 goto restart;
4127 }
4128
4129 /* In the following group of tests (and those in case IOR below),
230d793d
RS
4130 we start with some combination of logical operations and apply
4131 the distributive law followed by the inverse distributive law.
4132 Most of the time, this results in no change. However, if some of
4133 the operands are the same or inverses of each other, simplifications
4134 will result.
4135
4136 For example, (and (ior A B) (not B)) can occur as the result of
4137 expanding a bit field assignment. When we apply the distributive
4138 law to this, we get (ior (and (A (not B))) (and (B (not B)))),
4139 which then simplifies to (and (A (not B))). */
4140
4141 /* If we have (and (ior A B) C), apply the distributive law and then
4142 the inverse distributive law to see if things simplify. */
4143
4144 if (GET_CODE (XEXP (x, 0)) == IOR || GET_CODE (XEXP (x, 0)) == XOR)
4145 {
4146 x = apply_distributive_law
4147 (gen_binary (GET_CODE (XEXP (x, 0)), mode,
4148 gen_binary (AND, mode,
4149 XEXP (XEXP (x, 0), 0), XEXP (x, 1)),
4150 gen_binary (AND, mode,
4151 XEXP (XEXP (x, 0), 1), XEXP (x, 1))));
4152 if (GET_CODE (x) != AND)
4153 goto restart;
4154 }
4155
4156 if (GET_CODE (XEXP (x, 1)) == IOR || GET_CODE (XEXP (x, 1)) == XOR)
4157 {
4158 x = apply_distributive_law
4159 (gen_binary (GET_CODE (XEXP (x, 1)), mode,
4160 gen_binary (AND, mode,
4161 XEXP (XEXP (x, 1), 0), XEXP (x, 0)),
4162 gen_binary (AND, mode,
4163 XEXP (XEXP (x, 1), 1), XEXP (x, 0))));
4164 if (GET_CODE (x) != AND)
4165 goto restart;
4166 }
4167
4168 /* Similarly, taking advantage of the fact that
4169 (and (not A) (xor B C)) == (xor (ior A B) (ior A C)) */
4170
4171 if (GET_CODE (XEXP (x, 0)) == NOT && GET_CODE (XEXP (x, 1)) == XOR)
4172 {
4173 x = apply_distributive_law
4174 (gen_binary (XOR, mode,
4175 gen_binary (IOR, mode, XEXP (XEXP (x, 0), 0),
4176 XEXP (XEXP (x, 1), 0)),
4177 gen_binary (IOR, mode, XEXP (XEXP (x, 0), 0),
4178 XEXP (XEXP (x, 1), 1))));
4179 if (GET_CODE (x) != AND)
4180 goto restart;
4181 }
4182
4183 else if (GET_CODE (XEXP (x, 1)) == NOT && GET_CODE (XEXP (x, 0)) == XOR)
4184 {
4185 x = apply_distributive_law
4186 (gen_binary (XOR, mode,
4187 gen_binary (IOR, mode, XEXP (XEXP (x, 1), 0),
4188 XEXP (XEXP (x, 0), 0)),
4189 gen_binary (IOR, mode, XEXP (XEXP (x, 1), 0),
4190 XEXP (XEXP (x, 0), 1))));
4191 if (GET_CODE (x) != AND)
4192 goto restart;
4193 }
4194 break;
4195
4196 case IOR:
951553af 4197 /* (ior A C) is C if all bits of A that might be nonzero are on in C. */
d0ab8cd3 4198 if (GET_CODE (XEXP (x, 1)) == CONST_INT
ac49a949 4199 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
951553af 4200 && (nonzero_bits (XEXP (x, 0), mode) & ~ INTVAL (XEXP (x, 1))) == 0)
d0ab8cd3
RK
4201 return XEXP (x, 1);
4202
230d793d
RS
4203 /* Convert (A & B) | A to A. */
4204 if (GET_CODE (XEXP (x, 0)) == AND
4205 && (rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1))
4206 || rtx_equal_p (XEXP (XEXP (x, 0), 1), XEXP (x, 1)))
4207 && ! side_effects_p (XEXP (XEXP (x, 0), 0))
4208 && ! side_effects_p (XEXP (XEXP (x, 0), 1)))
4209 return XEXP (x, 1);
4210
4211 /* If we have (ior (and A B) C), apply the distributive law and then
4212 the inverse distributive law to see if things simplify. */
4213
4214 if (GET_CODE (XEXP (x, 0)) == AND)
4215 {
4216 x = apply_distributive_law
4217 (gen_binary (AND, mode,
4218 gen_binary (IOR, mode,
4219 XEXP (XEXP (x, 0), 0), XEXP (x, 1)),
4220 gen_binary (IOR, mode,
4221 XEXP (XEXP (x, 0), 1), XEXP (x, 1))));
4222
4223 if (GET_CODE (x) != IOR)
4224 goto restart;
4225 }
4226
4227 if (GET_CODE (XEXP (x, 1)) == AND)
4228 {
4229 x = apply_distributive_law
4230 (gen_binary (AND, mode,
4231 gen_binary (IOR, mode,
4232 XEXP (XEXP (x, 1), 0), XEXP (x, 0)),
4233 gen_binary (IOR, mode,
4234 XEXP (XEXP (x, 1), 1), XEXP (x, 0))));
4235
4236 if (GET_CODE (x) != IOR)
4237 goto restart;
4238 }
4239
4240 /* Convert (ior (ashift A CX) (lshiftrt A CY)) where CX+CY equals the
4241 mode size to (rotate A CX). */
4242
4243 if (((GET_CODE (XEXP (x, 0)) == ASHIFT
4244 && GET_CODE (XEXP (x, 1)) == LSHIFTRT)
4245 || (GET_CODE (XEXP (x, 1)) == ASHIFT
4246 && GET_CODE (XEXP (x, 0)) == LSHIFTRT))
4247 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (XEXP (x, 1), 0))
4248 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4249 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
4250 && (INTVAL (XEXP (XEXP (x, 0), 1)) + INTVAL (XEXP (XEXP (x, 1), 1))
4251 == GET_MODE_BITSIZE (mode)))
4252 {
4253 rtx shift_count;
4254
4255 if (GET_CODE (XEXP (x, 0)) == ASHIFT)
4256 shift_count = XEXP (XEXP (x, 0), 1);
4257 else
4258 shift_count = XEXP (XEXP (x, 1), 1);
4259 x = gen_rtx (ROTATE, mode, XEXP (XEXP (x, 0), 0), shift_count);
4260 goto restart;
4261 }
4262 break;
4263
4264 case XOR:
4265 /* Convert (XOR (NOT x) (NOT y)) to (XOR x y).
4266 Also convert (XOR (NOT x) y) to (NOT (XOR x y)), similarly for
4267 (NOT y). */
4268 {
4269 int num_negated = 0;
4270 rtx in1 = XEXP (x, 0), in2 = XEXP (x, 1);
4271
4272 if (GET_CODE (in1) == NOT)
4273 num_negated++, in1 = XEXP (in1, 0);
4274 if (GET_CODE (in2) == NOT)
4275 num_negated++, in2 = XEXP (in2, 0);
4276
4277 if (num_negated == 2)
4278 {
4279 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4280 SUBST (XEXP (x, 1), XEXP (XEXP (x, 1), 0));
4281 }
4282 else if (num_negated == 1)
d0ab8cd3
RK
4283 {
4284 x = gen_unary (NOT, mode,
4285 gen_binary (XOR, mode, in1, in2));
4286 goto restart;
4287 }
230d793d
RS
4288 }
4289
4290 /* Convert (xor (and A B) B) to (and (not A) B). The latter may
4291 correspond to a machine insn or result in further simplifications
4292 if B is a constant. */
4293
4294 if (GET_CODE (XEXP (x, 0)) == AND
4295 && rtx_equal_p (XEXP (XEXP (x, 0), 1), XEXP (x, 1))
4296 && ! side_effects_p (XEXP (x, 1)))
4297 {
4298 x = gen_binary (AND, mode,
4299 gen_unary (NOT, mode, XEXP (XEXP (x, 0), 0)),
4300 XEXP (x, 1));
4301 goto restart;
4302 }
4303 else if (GET_CODE (XEXP (x, 0)) == AND
4304 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1))
4305 && ! side_effects_p (XEXP (x, 1)))
4306 {
4307 x = gen_binary (AND, mode,
4308 gen_unary (NOT, mode, XEXP (XEXP (x, 0), 1)),
4309 XEXP (x, 1));
4310 goto restart;
4311 }
4312
4313
4314#if STORE_FLAG_VALUE == 1
4315 /* (xor (comparison foo bar) (const_int 1)) can become the reversed
4316 comparison. */
4317 if (XEXP (x, 1) == const1_rtx
4318 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
4319 && reversible_comparison_p (XEXP (x, 0)))
4320 return gen_rtx_combine (reverse_condition (GET_CODE (XEXP (x, 0))),
4321 mode, XEXP (XEXP (x, 0), 0),
4322 XEXP (XEXP (x, 0), 1));
500c518b
RK
4323
4324 /* (lshiftrt foo C) where C is the number of bits in FOO minus 1
4325 is (lt foo (const_int 0)), so we can perform the above
4326 simplification. */
4327
4328 if (XEXP (x, 1) == const1_rtx
4329 && GET_CODE (XEXP (x, 0)) == LSHIFTRT
4330 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4331 && INTVAL (XEXP (XEXP (x, 0), 1)) == GET_MODE_BITSIZE (mode) - 1)
4332 return gen_rtx_combine (GE, mode, XEXP (XEXP (x, 0), 0), const0_rtx);
230d793d
RS
4333#endif
4334
4335 /* (xor (comparison foo bar) (const_int sign-bit))
4336 when STORE_FLAG_VALUE is the sign bit. */
5f4f0e22
CH
4337 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4338 && (STORE_FLAG_VALUE
4339 == (HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
230d793d
RS
4340 && XEXP (x, 1) == const_true_rtx
4341 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
4342 && reversible_comparison_p (XEXP (x, 0)))
4343 return gen_rtx_combine (reverse_condition (GET_CODE (XEXP (x, 0))),
4344 mode, XEXP (XEXP (x, 0), 0),
4345 XEXP (XEXP (x, 0), 1));
4346 break;
4347
4348 case ABS:
4349 /* (abs (neg <foo>)) -> (abs <foo>) */
4350 if (GET_CODE (XEXP (x, 0)) == NEG)
4351 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4352
4353 /* If operand is something known to be positive, ignore the ABS. */
4354 if (GET_CODE (XEXP (x, 0)) == FFS || GET_CODE (XEXP (x, 0)) == ABS
5f4f0e22
CH
4355 || ((GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4356 <= HOST_BITS_PER_WIDE_INT)
951553af 4357 && ((nonzero_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
5f4f0e22
CH
4358 & ((HOST_WIDE_INT) 1
4359 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1)))
230d793d
RS
4360 == 0)))
4361 return XEXP (x, 0);
4362
4363
4364 /* If operand is known to be only -1 or 0, convert ABS to NEG. */
d0ab8cd3 4365 if (num_sign_bit_copies (XEXP (x, 0), mode) == GET_MODE_BITSIZE (mode))
230d793d
RS
4366 {
4367 x = gen_rtx_combine (NEG, mode, XEXP (x, 0));
4368 goto restart;
4369 }
4370 break;
4371
a7c99304
RK
4372 case FFS:
4373 /* (ffs (*_extend <X>)) = (ffs <X>) */
4374 if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND
4375 || GET_CODE (XEXP (x, 0)) == ZERO_EXTEND)
4376 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4377 break;
4378
230d793d
RS
4379 case FLOAT:
4380 /* (float (sign_extend <X>)) = (float <X>). */
4381 if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND)
4382 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4383 break;
4384
4385 case LSHIFT:
4386 case ASHIFT:
4387 case LSHIFTRT:
4388 case ASHIFTRT:
4389 case ROTATE:
4390 case ROTATERT:
230d793d
RS
4391 /* If this is a shift by a constant amount, simplify it. */
4392 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
4393 {
4394 x = simplify_shift_const (x, code, mode, XEXP (x, 0),
4395 INTVAL (XEXP (x, 1)));
4396 if (GET_CODE (x) != code)
4397 goto restart;
4398 }
77fa0940
RK
4399
4400#ifdef SHIFT_COUNT_TRUNCATED
4401 else if (GET_CODE (XEXP (x, 1)) != REG)
4402 SUBST (XEXP (x, 1),
4403 force_to_mode (XEXP (x, 1), GET_MODE (x),
4404 exact_log2 (GET_MODE_BITSIZE (GET_MODE (x))),
5f4f0e22 4405 NULL_RTX));
77fa0940
RK
4406#endif
4407
230d793d
RS
4408 break;
4409 }
4410
4411 return x;
4412}
4413\f
4414/* We consider ZERO_EXTRACT, SIGN_EXTRACT, and SIGN_EXTEND as "compound
4415 operations" because they can be replaced with two more basic operations.
4416 ZERO_EXTEND is also considered "compound" because it can be replaced with
4417 an AND operation, which is simpler, though only one operation.
4418
4419 The function expand_compound_operation is called with an rtx expression
4420 and will convert it to the appropriate shifts and AND operations,
4421 simplifying at each stage.
4422
4423 The function make_compound_operation is called to convert an expression
4424 consisting of shifts and ANDs into the equivalent compound expression.
4425 It is the inverse of this function, loosely speaking. */
4426
4427static rtx
4428expand_compound_operation (x)
4429 rtx x;
4430{
4431 int pos = 0, len;
4432 int unsignedp = 0;
4433 int modewidth;
4434 rtx tem;
4435
4436 switch (GET_CODE (x))
4437 {
4438 case ZERO_EXTEND:
4439 unsignedp = 1;
4440 case SIGN_EXTEND:
75473182
RS
4441 /* We can't necessarily use a const_int for a multiword mode;
4442 it depends on implicitly extending the value.
4443 Since we don't know the right way to extend it,
4444 we can't tell whether the implicit way is right.
4445
4446 Even for a mode that is no wider than a const_int,
4447 we can't win, because we need to sign extend one of its bits through
4448 the rest of it, and we don't know which bit. */
230d793d 4449 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
75473182 4450 return x;
230d793d
RS
4451
4452 if (! FAKE_EXTEND_SAFE_P (GET_MODE (XEXP (x, 0)), XEXP (x, 0)))
4453 return x;
4454
4455 len = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)));
4456 /* If the inner object has VOIDmode (the only way this can happen
4457 is if it is a ASM_OPERANDS), we can't do anything since we don't
4458 know how much masking to do. */
4459 if (len == 0)
4460 return x;
4461
4462 break;
4463
4464 case ZERO_EXTRACT:
4465 unsignedp = 1;
4466 case SIGN_EXTRACT:
4467 /* If the operand is a CLOBBER, just return it. */
4468 if (GET_CODE (XEXP (x, 0)) == CLOBBER)
4469 return XEXP (x, 0);
4470
4471 if (GET_CODE (XEXP (x, 1)) != CONST_INT
4472 || GET_CODE (XEXP (x, 2)) != CONST_INT
4473 || GET_MODE (XEXP (x, 0)) == VOIDmode)
4474 return x;
4475
4476 len = INTVAL (XEXP (x, 1));
4477 pos = INTVAL (XEXP (x, 2));
4478
4479 /* If this goes outside the object being extracted, replace the object
4480 with a (use (mem ...)) construct that only combine understands
4481 and is used only for this purpose. */
4482 if (len + pos > GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
4483 SUBST (XEXP (x, 0), gen_rtx (USE, GET_MODE (x), XEXP (x, 0)));
4484
4485#if BITS_BIG_ENDIAN
4486 pos = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - len - pos;
4487#endif
4488 break;
4489
4490 default:
4491 return x;
4492 }
4493
4494 /* If we reach here, we want to return a pair of shifts. The inner
4495 shift is a left shift of BITSIZE - POS - LEN bits. The outer
4496 shift is a right shift of BITSIZE - LEN bits. It is arithmetic or
4497 logical depending on the value of UNSIGNEDP.
4498
4499 If this was a ZERO_EXTEND or ZERO_EXTRACT, this pair of shifts will be
4500 converted into an AND of a shift.
4501
4502 We must check for the case where the left shift would have a negative
4503 count. This can happen in a case like (x >> 31) & 255 on machines
4504 that can't shift by a constant. On those machines, we would first
4505 combine the shift with the AND to produce a variable-position
4506 extraction. Then the constant of 31 would be substituted in to produce
4507 a such a position. */
4508
4509 modewidth = GET_MODE_BITSIZE (GET_MODE (x));
4510 if (modewidth >= pos - len)
5f4f0e22 4511 tem = simplify_shift_const (NULL_RTX, unsignedp ? LSHIFTRT : ASHIFTRT,
230d793d 4512 GET_MODE (x),
5f4f0e22
CH
4513 simplify_shift_const (NULL_RTX, ASHIFT,
4514 GET_MODE (x),
230d793d
RS
4515 XEXP (x, 0),
4516 modewidth - pos - len),
4517 modewidth - len);
4518
5f4f0e22
CH
4519 else if (unsignedp && len < HOST_BITS_PER_WIDE_INT)
4520 tem = simplify_and_const_int (NULL_RTX, GET_MODE (x),
4521 simplify_shift_const (NULL_RTX, LSHIFTRT,
230d793d
RS
4522 GET_MODE (x),
4523 XEXP (x, 0), pos),
5f4f0e22 4524 ((HOST_WIDE_INT) 1 << len) - 1);
230d793d
RS
4525 else
4526 /* Any other cases we can't handle. */
4527 return x;
4528
4529
4530 /* If we couldn't do this for some reason, return the original
4531 expression. */
4532 if (GET_CODE (tem) == CLOBBER)
4533 return x;
4534
4535 return tem;
4536}
4537\f
4538/* X is a SET which contains an assignment of one object into
4539 a part of another (such as a bit-field assignment, STRICT_LOW_PART,
4540 or certain SUBREGS). If possible, convert it into a series of
4541 logical operations.
4542
4543 We half-heartedly support variable positions, but do not at all
4544 support variable lengths. */
4545
4546static rtx
4547expand_field_assignment (x)
4548 rtx x;
4549{
4550 rtx inner;
4551 rtx pos; /* Always counts from low bit. */
4552 int len;
4553 rtx mask;
4554 enum machine_mode compute_mode;
4555
4556 /* Loop until we find something we can't simplify. */
4557 while (1)
4558 {
4559 if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
4560 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG)
4561 {
4562 inner = SUBREG_REG (XEXP (SET_DEST (x), 0));
4563 len = GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)));
4564 pos = const0_rtx;
4565 }
4566 else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
4567 && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT)
4568 {
4569 inner = XEXP (SET_DEST (x), 0);
4570 len = INTVAL (XEXP (SET_DEST (x), 1));
4571 pos = XEXP (SET_DEST (x), 2);
4572
4573 /* If the position is constant and spans the width of INNER,
4574 surround INNER with a USE to indicate this. */
4575 if (GET_CODE (pos) == CONST_INT
4576 && INTVAL (pos) + len > GET_MODE_BITSIZE (GET_MODE (inner)))
4577 inner = gen_rtx (USE, GET_MODE (SET_DEST (x)), inner);
4578
4579#if BITS_BIG_ENDIAN
4580 if (GET_CODE (pos) == CONST_INT)
5f4f0e22
CH
4581 pos = GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner)) - len
4582 - INTVAL (pos));
230d793d
RS
4583 else if (GET_CODE (pos) == MINUS
4584 && GET_CODE (XEXP (pos, 1)) == CONST_INT
4585 && (INTVAL (XEXP (pos, 1))
4586 == GET_MODE_BITSIZE (GET_MODE (inner)) - len))
4587 /* If position is ADJUST - X, new position is X. */
4588 pos = XEXP (pos, 0);
4589 else
4590 pos = gen_binary (MINUS, GET_MODE (pos),
5f4f0e22
CH
4591 GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner))
4592 - len),
4593 pos);
230d793d
RS
4594#endif
4595 }
4596
4597 /* A SUBREG between two modes that occupy the same numbers of words
4598 can be done by moving the SUBREG to the source. */
4599 else if (GET_CODE (SET_DEST (x)) == SUBREG
4600 && (((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
4601 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
4602 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
4603 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
4604 {
4605 x = gen_rtx (SET, VOIDmode, SUBREG_REG (SET_DEST (x)),
4606 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (SET_DEST (x))),
4607 SET_SRC (x)));
4608 continue;
4609 }
4610 else
4611 break;
4612
4613 while (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
4614 inner = SUBREG_REG (inner);
4615
4616 compute_mode = GET_MODE (inner);
4617
4618 /* Compute a mask of LEN bits, if we can do this on the host machine. */
5f4f0e22
CH
4619 if (len < HOST_BITS_PER_WIDE_INT)
4620 mask = GEN_INT (((HOST_WIDE_INT) 1 << len) - 1);
230d793d
RS
4621 else
4622 break;
4623
4624 /* Now compute the equivalent expression. Make a copy of INNER
4625 for the SET_DEST in case it is a MEM into which we will substitute;
4626 we don't want shared RTL in that case. */
4627 x = gen_rtx (SET, VOIDmode, copy_rtx (inner),
4628 gen_binary (IOR, compute_mode,
4629 gen_binary (AND, compute_mode,
4630 gen_unary (NOT, compute_mode,
4631 gen_binary (ASHIFT,
4632 compute_mode,
4633 mask, pos)),
4634 inner),
4635 gen_binary (ASHIFT, compute_mode,
4636 gen_binary (AND, compute_mode,
4637 gen_lowpart_for_combine
4638 (compute_mode,
4639 SET_SRC (x)),
4640 mask),
4641 pos)));
4642 }
4643
4644 return x;
4645}
4646\f
8999a12e
RK
4647/* Return an RTX for a reference to LEN bits of INNER. If POS_RTX is nonzero,
4648 it is an RTX that represents a variable starting position; otherwise,
4649 POS is the (constant) starting bit position (counted from the LSB).
230d793d
RS
4650
4651 INNER may be a USE. This will occur when we started with a bitfield
4652 that went outside the boundary of the object in memory, which is
4653 allowed on most machines. To isolate this case, we produce a USE
4654 whose mode is wide enough and surround the MEM with it. The only
4655 code that understands the USE is this routine. If it is not removed,
4656 it will cause the resulting insn not to match.
4657
4658 UNSIGNEDP is non-zero for an unsigned reference and zero for a
4659 signed reference.
4660
4661 IN_DEST is non-zero if this is a reference in the destination of a
4662 SET. This is used when a ZERO_ or SIGN_EXTRACT isn't needed. If non-zero,
4663 a STRICT_LOW_PART will be used, if zero, ZERO_EXTEND or SIGN_EXTEND will
4664 be used.
4665
4666 IN_COMPARE is non-zero if we are in a COMPARE. This means that a
4667 ZERO_EXTRACT should be built even for bits starting at bit 0.
4668
4669 MODE is the desired mode of the result (if IN_DEST == 0). */
4670
4671static rtx
4672make_extraction (mode, inner, pos, pos_rtx, len,
4673 unsignedp, in_dest, in_compare)
4674 enum machine_mode mode;
4675 rtx inner;
4676 int pos;
4677 rtx pos_rtx;
4678 int len;
4679 int unsignedp;
4680 int in_dest, in_compare;
4681{
94b4b17a
RS
4682 /* This mode describes the size of the storage area
4683 to fetch the overall value from. Within that, we
4684 ignore the POS lowest bits, etc. */
230d793d
RS
4685 enum machine_mode is_mode = GET_MODE (inner);
4686 enum machine_mode inner_mode;
4687 enum machine_mode wanted_mem_mode = byte_mode;
4688 enum machine_mode pos_mode = word_mode;
4689 enum machine_mode extraction_mode = word_mode;
4690 enum machine_mode tmode = mode_for_size (len, MODE_INT, 1);
4691 int spans_byte = 0;
4692 rtx new = 0;
8999a12e 4693 rtx orig_pos_rtx = pos_rtx;
230d793d
RS
4694
4695 /* Get some information about INNER and get the innermost object. */
4696 if (GET_CODE (inner) == USE)
94b4b17a 4697 /* (use:SI (mem:QI foo)) stands for (mem:SI foo). */
230d793d
RS
4698 /* We don't need to adjust the position because we set up the USE
4699 to pretend that it was a full-word object. */
4700 spans_byte = 1, inner = XEXP (inner, 0);
4701 else if (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
94b4b17a
RS
4702 {
4703 /* If going from (subreg:SI (mem:QI ...)) to (mem:QI ...),
4704 consider just the QI as the memory to extract from.
4705 The subreg adds or removes high bits; its mode is
4706 irrelevant to the meaning of this extraction,
4707 since POS and LEN count from the lsb. */
4708 if (GET_CODE (SUBREG_REG (inner)) == MEM)
4709 is_mode = GET_MODE (SUBREG_REG (inner));
4710 inner = SUBREG_REG (inner);
4711 }
230d793d
RS
4712
4713 inner_mode = GET_MODE (inner);
4714
4715 if (pos_rtx && GET_CODE (pos_rtx) == CONST_INT)
8999a12e 4716 pos = INTVAL (pos_rtx), pos_rtx = 0;
230d793d
RS
4717
4718 /* See if this can be done without an extraction. We never can if the
4719 width of the field is not the same as that of some integer mode. For
4720 registers, we can only avoid the extraction if the position is at the
4721 low-order bit and this is either not in the destination or we have the
4722 appropriate STRICT_LOW_PART operation available.
4723
4724 For MEM, we can avoid an extract if the field starts on an appropriate
4725 boundary and we can change the mode of the memory reference. However,
4726 we cannot directly access the MEM if we have a USE and the underlying
4727 MEM is not TMODE. This combination means that MEM was being used in a
4728 context where bits outside its mode were being referenced; that is only
4729 valid in bit-field insns. */
4730
4731 if (tmode != BLKmode
4732 && ! (spans_byte && inner_mode != tmode)
8999a12e 4733 && ((pos_rtx == 0 && pos == 0 && GET_CODE (inner) != MEM
230d793d 4734 && (! in_dest
df62f951
RK
4735 || (GET_CODE (inner) == REG
4736 && (movstrict_optab->handlers[(int) tmode].insn_code
4737 != CODE_FOR_nothing))))
8999a12e 4738 || (GET_CODE (inner) == MEM && pos_rtx == 0
dfbe1b2f
RK
4739 && (pos
4740 % (STRICT_ALIGNMENT ? GET_MODE_ALIGNMENT (tmode)
4741 : BITS_PER_UNIT)) == 0
230d793d
RS
4742 /* We can't do this if we are widening INNER_MODE (it
4743 may not be aligned, for one thing). */
4744 && GET_MODE_BITSIZE (inner_mode) >= GET_MODE_BITSIZE (tmode)
4745 && (inner_mode == tmode
4746 || (! mode_dependent_address_p (XEXP (inner, 0))
4747 && ! MEM_VOLATILE_P (inner))))))
4748 {
230d793d
RS
4749 /* If INNER is a MEM, make a new MEM that encompasses just the desired
4750 field. If the original and current mode are the same, we need not
4751 adjust the offset. Otherwise, we do if bytes big endian.
4752
4753 If INNER is not a MEM, get a piece consisting of the just the field
df62f951 4754 of interest (in this case POS must be 0). */
230d793d
RS
4755
4756 if (GET_CODE (inner) == MEM)
4757 {
94b4b17a
RS
4758 int offset;
4759 /* POS counts from lsb, but make OFFSET count in memory order. */
4760 if (BYTES_BIG_ENDIAN)
4761 offset = (GET_MODE_BITSIZE (is_mode) - len - pos) / BITS_PER_UNIT;
4762 else
4763 offset = pos / BITS_PER_UNIT;
230d793d
RS
4764
4765 new = gen_rtx (MEM, tmode, plus_constant (XEXP (inner, 0), offset));
4766 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (inner);
4767 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (inner);
4768 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (inner);
4769 }
df62f951 4770 else if (GET_CODE (inner) == REG)
77fa0940
RK
4771 /* We can't call gen_lowpart_for_combine here since we always want
4772 a SUBREG and it would sometimes return a new hard register. */
4773 new = gen_rtx (SUBREG, tmode, inner,
4774 (WORDS_BIG_ENDIAN
3e3ea975
RS
4775 && GET_MODE_SIZE (inner_mode) > UNITS_PER_WORD
4776 ? ((GET_MODE_SIZE (inner_mode) - GET_MODE_SIZE (tmode))
6ba17bb0
RS
4777 / UNITS_PER_WORD)
4778 : 0));
230d793d 4779 else
d0ab8cd3 4780 new = force_to_mode (inner, tmode, len, NULL_RTX);
230d793d
RS
4781
4782 /* If this extraction is going into the destination of a SET,
4783 make a STRICT_LOW_PART unless we made a MEM. */
4784
4785 if (in_dest)
4786 return (GET_CODE (new) == MEM ? new
77fa0940
RK
4787 : (GET_CODE (new) != SUBREG
4788 ? gen_rtx (CLOBBER, tmode, const0_rtx)
4789 : gen_rtx_combine (STRICT_LOW_PART, VOIDmode, new)));
230d793d
RS
4790
4791 /* Otherwise, sign- or zero-extend unless we already are in the
4792 proper mode. */
4793
4794 return (mode == tmode ? new
4795 : gen_rtx_combine (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
4796 mode, new));
4797 }
4798
cc471082
RS
4799 /* Unless this is a COMPARE or we have a funny memory reference,
4800 don't do anything with zero-extending field extracts starting at
4801 the low-order bit since they are simple AND operations. */
8999a12e
RK
4802 if (pos_rtx == 0 && pos == 0 && ! in_dest
4803 && ! in_compare && ! spans_byte && unsignedp)
230d793d
RS
4804 return 0;
4805
4806 /* Get the mode to use should INNER be a MEM, the mode for the position,
4807 and the mode for the result. */
4808#ifdef HAVE_insv
4809 if (in_dest)
4810 {
4811 wanted_mem_mode = insn_operand_mode[(int) CODE_FOR_insv][0];
4812 pos_mode = insn_operand_mode[(int) CODE_FOR_insv][2];
4813 extraction_mode = insn_operand_mode[(int) CODE_FOR_insv][3];
4814 }
4815#endif
4816
4817#ifdef HAVE_extzv
4818 if (! in_dest && unsignedp)
4819 {
4820 wanted_mem_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
4821 pos_mode = insn_operand_mode[(int) CODE_FOR_extzv][3];
4822 extraction_mode = insn_operand_mode[(int) CODE_FOR_extzv][0];
4823 }
4824#endif
4825
4826#ifdef HAVE_extv
4827 if (! in_dest && ! unsignedp)
4828 {
4829 wanted_mem_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
4830 pos_mode = insn_operand_mode[(int) CODE_FOR_extv][3];
4831 extraction_mode = insn_operand_mode[(int) CODE_FOR_extv][0];
4832 }
4833#endif
4834
4835 /* Never narrow an object, since that might not be safe. */
4836
4837 if (mode != VOIDmode
4838 && GET_MODE_SIZE (extraction_mode) < GET_MODE_SIZE (mode))
4839 extraction_mode = mode;
4840
4841 if (pos_rtx && GET_MODE (pos_rtx) != VOIDmode
4842 && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
4843 pos_mode = GET_MODE (pos_rtx);
4844
4845 /* If this is not from memory or we have to change the mode of memory and
4846 cannot, the desired mode is EXTRACTION_MODE. */
4847 if (GET_CODE (inner) != MEM
4848 || (inner_mode != wanted_mem_mode
4849 && (mode_dependent_address_p (XEXP (inner, 0))
4850 || MEM_VOLATILE_P (inner))))
4851 wanted_mem_mode = extraction_mode;
4852
4853#if BITS_BIG_ENDIAN
4854 /* If position is constant, compute new position. Otherwise, build
4855 subtraction. */
8999a12e 4856 if (pos_rtx == 0)
230d793d
RS
4857 pos = (MAX (GET_MODE_BITSIZE (is_mode), GET_MODE_BITSIZE (wanted_mem_mode))
4858 - len - pos);
4859 else
4860 pos_rtx
4861 = gen_rtx_combine (MINUS, GET_MODE (pos_rtx),
5f4f0e22
CH
4862 GEN_INT (MAX (GET_MODE_BITSIZE (is_mode),
4863 GET_MODE_BITSIZE (wanted_mem_mode))
4864 - len),
4865 pos_rtx);
230d793d
RS
4866#endif
4867
4868 /* If INNER has a wider mode, make it smaller. If this is a constant
4869 extract, try to adjust the byte to point to the byte containing
4870 the value. */
4871 if (wanted_mem_mode != VOIDmode
4872 && GET_MODE_SIZE (wanted_mem_mode) < GET_MODE_SIZE (is_mode)
4873 && ((GET_CODE (inner) == MEM
4874 && (inner_mode == wanted_mem_mode
4875 || (! mode_dependent_address_p (XEXP (inner, 0))
4876 && ! MEM_VOLATILE_P (inner))))))
4877 {
4878 int offset = 0;
4879
4880 /* The computations below will be correct if the machine is big
4881 endian in both bits and bytes or little endian in bits and bytes.
4882 If it is mixed, we must adjust. */
4883
230d793d
RS
4884 /* If bytes are big endian and we had a paradoxical SUBREG, we must
4885 adjust OFFSET to compensate. */
4886#if BYTES_BIG_ENDIAN
4887 if (! spans_byte
4888 && GET_MODE_SIZE (inner_mode) < GET_MODE_SIZE (is_mode))
4889 offset -= GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (inner_mode);
4890#endif
4891
4892 /* If this is a constant position, we can move to the desired byte. */
8999a12e 4893 if (pos_rtx == 0)
230d793d
RS
4894 {
4895 offset += pos / BITS_PER_UNIT;
4896 pos %= GET_MODE_BITSIZE (wanted_mem_mode);
4897 }
4898
c6b3f1f2
JW
4899#if BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
4900 if (! spans_byte && is_mode != wanted_mem_mode)
4901 offset = (GET_MODE_SIZE (is_mode)
4902 - GET_MODE_SIZE (wanted_mem_mode) - offset);
4903#endif
4904
230d793d
RS
4905 if (offset != 0 || inner_mode != wanted_mem_mode)
4906 {
4907 rtx newmem = gen_rtx (MEM, wanted_mem_mode,
4908 plus_constant (XEXP (inner, 0), offset));
4909 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (inner);
4910 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (inner);
4911 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (inner);
4912 inner = newmem;
4913 }
4914 }
4915
4916 /* If INNER is not memory, we can always get it into the proper mode. */
4917 else if (GET_CODE (inner) != MEM)
d0ab8cd3
RK
4918 inner = force_to_mode (inner, extraction_mode,
4919 (pos < 0 ? GET_MODE_BITSIZE (extraction_mode)
4920 : len + pos),
4921 NULL_RTX);
230d793d
RS
4922
4923 /* Adjust mode of POS_RTX, if needed. If we want a wider mode, we
4924 have to zero extend. Otherwise, we can just use a SUBREG. */
8999a12e 4925 if (pos_rtx != 0
230d793d
RS
4926 && GET_MODE_SIZE (pos_mode) > GET_MODE_SIZE (GET_MODE (pos_rtx)))
4927 pos_rtx = gen_rtx_combine (ZERO_EXTEND, pos_mode, pos_rtx);
8999a12e 4928 else if (pos_rtx != 0
230d793d
RS
4929 && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
4930 pos_rtx = gen_lowpart_for_combine (pos_mode, pos_rtx);
4931
8999a12e
RK
4932 /* Make POS_RTX unless we already have it and it is correct. If we don't
4933 have a POS_RTX but we do have an ORIG_POS_RTX, the latter must
4934 be a CONST_INT. */
4935 if (pos_rtx == 0 && orig_pos_rtx != 0 && INTVAL (orig_pos_rtx) == pos)
4936 pos_rtx = orig_pos_rtx;
4937
4938 else if (pos_rtx == 0)
5f4f0e22 4939 pos_rtx = GEN_INT (pos);
230d793d
RS
4940
4941 /* Make the required operation. See if we can use existing rtx. */
4942 new = gen_rtx_combine (unsignedp ? ZERO_EXTRACT : SIGN_EXTRACT,
5f4f0e22 4943 extraction_mode, inner, GEN_INT (len), pos_rtx);
230d793d
RS
4944 if (! in_dest)
4945 new = gen_lowpart_for_combine (mode, new);
4946
4947 return new;
4948}
4949\f
4950/* Look at the expression rooted at X. Look for expressions
4951 equivalent to ZERO_EXTRACT, SIGN_EXTRACT, ZERO_EXTEND, SIGN_EXTEND.
4952 Form these expressions.
4953
4954 Return the new rtx, usually just X.
4955
4956 Also, for machines like the Vax that don't have logical shift insns,
4957 try to convert logical to arithmetic shift operations in cases where
4958 they are equivalent. This undoes the canonicalizations to logical
4959 shifts done elsewhere.
4960
4961 We try, as much as possible, to re-use rtl expressions to save memory.
4962
4963 IN_CODE says what kind of expression we are processing. Normally, it is
42495ca0
RK
4964 SET. In a memory address (inside a MEM, PLUS or minus, the latter two
4965 being kludges), it is MEM. When processing the arguments of a comparison
230d793d
RS
4966 or a COMPARE against zero, it is COMPARE. */
4967
4968static rtx
4969make_compound_operation (x, in_code)
4970 rtx x;
4971 enum rtx_code in_code;
4972{
4973 enum rtx_code code = GET_CODE (x);
4974 enum machine_mode mode = GET_MODE (x);
4975 int mode_width = GET_MODE_BITSIZE (mode);
4976 enum rtx_code next_code;
d0ab8cd3 4977 int i, count;
230d793d 4978 rtx new = 0;
280f58ba 4979 rtx tem;
230d793d
RS
4980 char *fmt;
4981
4982 /* Select the code to be used in recursive calls. Once we are inside an
4983 address, we stay there. If we have a comparison, set to COMPARE,
4984 but once inside, go back to our default of SET. */
4985
42495ca0 4986 next_code = (code == MEM || code == PLUS || code == MINUS ? MEM
230d793d
RS
4987 : ((code == COMPARE || GET_RTX_CLASS (code) == '<')
4988 && XEXP (x, 1) == const0_rtx) ? COMPARE
4989 : in_code == COMPARE ? SET : in_code);
4990
4991 /* Process depending on the code of this operation. If NEW is set
4992 non-zero, it will be returned. */
4993
4994 switch (code)
4995 {
4996 case ASHIFT:
4997 case LSHIFT:
4998 /* Convert shifts by constants into multiplications if inside
4999 an address. */
5000 if (in_code == MEM && GET_CODE (XEXP (x, 1)) == CONST_INT
5f4f0e22 5001 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
230d793d 5002 && INTVAL (XEXP (x, 1)) >= 0)
280f58ba
RK
5003 {
5004 new = make_compound_operation (XEXP (x, 0), next_code);
5005 new = gen_rtx_combine (MULT, mode, new,
5006 GEN_INT ((HOST_WIDE_INT) 1
5007 << INTVAL (XEXP (x, 1))));
5008 }
230d793d
RS
5009 break;
5010
5011 case AND:
5012 /* If the second operand is not a constant, we can't do anything
5013 with it. */
5014 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5015 break;
5016
5017 /* If the constant is a power of two minus one and the first operand
5018 is a logical right shift, make an extraction. */
5019 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
5020 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
280f58ba
RK
5021 {
5022 new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
5023 new = make_extraction (mode, new, 0, XEXP (XEXP (x, 0), 1), i, 1,
5024 0, in_code == COMPARE);
5025 }
dfbe1b2f 5026
230d793d
RS
5027 /* Same as previous, but for (subreg (lshiftrt ...)) in first op. */
5028 else if (GET_CODE (XEXP (x, 0)) == SUBREG
5029 && subreg_lowpart_p (XEXP (x, 0))
5030 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == LSHIFTRT
5031 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
280f58ba
RK
5032 {
5033 new = make_compound_operation (XEXP (SUBREG_REG (XEXP (x, 0)), 0),
5034 next_code);
5035 new = make_extraction (GET_MODE (SUBREG_REG (XEXP (x, 0))), new, 0,
5036 XEXP (SUBREG_REG (XEXP (x, 0)), 1), i, 1,
5037 0, in_code == COMPARE);
5038 }
a7c99304
RK
5039
5040 /* If we are have (and (rotate X C) M) and C is larger than the number
5041 of bits in M, this is an extraction. */
5042
5043 else if (GET_CODE (XEXP (x, 0)) == ROTATE
5044 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5045 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0
5046 && i <= INTVAL (XEXP (XEXP (x, 0), 1)))
280f58ba
RK
5047 {
5048 new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
5049 new = make_extraction (mode, new,
5050 (GET_MODE_BITSIZE (mode)
5051 - INTVAL (XEXP (XEXP (x, 0), 1))),
5052 NULL_RTX, i, 1, 0, in_code == COMPARE);
5053 }
a7c99304
RK
5054
5055 /* On machines without logical shifts, if the operand of the AND is
230d793d
RS
5056 a logical shift and our mask turns off all the propagated sign
5057 bits, we can replace the logical shift with an arithmetic shift. */
d0ab8cd3
RK
5058 else if (ashr_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
5059 && (lshr_optab->handlers[(int) mode].insn_code
5060 == CODE_FOR_nothing)
230d793d
RS
5061 && GET_CODE (XEXP (x, 0)) == LSHIFTRT
5062 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5063 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
5f4f0e22
CH
5064 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
5065 && mode_width <= HOST_BITS_PER_WIDE_INT)
230d793d 5066 {
5f4f0e22 5067 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
230d793d
RS
5068
5069 mask >>= INTVAL (XEXP (XEXP (x, 0), 1));
5070 if ((INTVAL (XEXP (x, 1)) & ~mask) == 0)
5071 SUBST (XEXP (x, 0),
280f58ba
RK
5072 gen_rtx_combine (ASHIFTRT, mode,
5073 make_compound_operation (XEXP (XEXP (x, 0), 0),
5074 next_code),
230d793d
RS
5075 XEXP (XEXP (x, 0), 1)));
5076 }
5077
5078 /* If the constant is one less than a power of two, this might be
5079 representable by an extraction even if no shift is present.
5080 If it doesn't end up being a ZERO_EXTEND, we will ignore it unless
5081 we are in a COMPARE. */
5082 else if ((i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
280f58ba
RK
5083 new = make_extraction (mode,
5084 make_compound_operation (XEXP (x, 0),
5085 next_code),
5086 0, NULL_RTX, i, 1, 0, in_code == COMPARE);
230d793d
RS
5087
5088 /* If we are in a comparison and this is an AND with a power of two,
5089 convert this into the appropriate bit extract. */
5090 else if (in_code == COMPARE
5091 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
280f58ba
RK
5092 new = make_extraction (mode,
5093 make_compound_operation (XEXP (x, 0),
5094 next_code),
5095 i, NULL_RTX, 1, 1, 0, 1);
230d793d
RS
5096
5097 break;
5098
5099 case LSHIFTRT:
5100 /* If the sign bit is known to be zero, replace this with an
5101 arithmetic shift. */
d0ab8cd3
RK
5102 if (ashr_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing
5103 && lshr_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
5f4f0e22 5104 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 5105 && (nonzero_bits (XEXP (x, 0), mode) & (1 << (mode_width - 1))) == 0)
230d793d 5106 {
280f58ba
RK
5107 new = gen_rtx_combine (ASHIFTRT, mode,
5108 make_compound_operation (XEXP (x, 0),
5109 next_code),
5110 XEXP (x, 1));
230d793d
RS
5111 break;
5112 }
5113
5114 /* ... fall through ... */
5115
5116 case ASHIFTRT:
5117 /* If we have (ashiftrt (ashift foo C1) C2) with C2 >= C1,
5118 this is a SIGN_EXTRACT. */
5119 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5120 && GET_CODE (XEXP (x, 0)) == ASHIFT
5121 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5122 && INTVAL (XEXP (x, 1)) >= INTVAL (XEXP (XEXP (x, 0), 1)))
280f58ba
RK
5123 {
5124 new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
5125 new = make_extraction (mode, new,
5126 (INTVAL (XEXP (x, 1))
5127 - INTVAL (XEXP (XEXP (x, 0), 1))),
5128 NULL_RTX, mode_width - INTVAL (XEXP (x, 1)),
5129 code == LSHIFTRT, 0, in_code == COMPARE);
5130 }
d0ab8cd3
RK
5131
5132 /* Similarly if we have (ashifrt (OP (ashift foo C1) C3) C2). In these
5133 cases, we are better off returning a SIGN_EXTEND of the operation. */
5134
5135 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5136 && (GET_CODE (XEXP (x, 0)) == IOR || GET_CODE (XEXP (x, 0)) == AND
5137 || GET_CODE (XEXP (x, 0)) == XOR
5138 || GET_CODE (XEXP (x, 0)) == PLUS)
5139 && GET_CODE (XEXP (XEXP (x, 0), 0)) == ASHIFT
5140 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
d0ab8cd3
RK
5141 && INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1)) < HOST_BITS_PER_WIDE_INT
5142 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
0a5cbff6
RK
5143 && 0 == (INTVAL (XEXP (XEXP (x, 0), 1))
5144 & (((HOST_WIDE_INT) 1
5145 << (MIN (INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1)),
5146 INTVAL (XEXP (x, 1)))
5147 - 1)))))
d0ab8cd3 5148 {
0a5cbff6
RK
5149 rtx c1 = XEXP (XEXP (XEXP (x, 0), 0), 1);
5150 rtx c2 = XEXP (x, 1);
5151 rtx c3 = XEXP (XEXP (x, 0), 1);
5152 HOST_WIDE_INT newop1;
5153 rtx inner = XEXP (XEXP (XEXP (x, 0), 0), 0);
5154
5155 /* If C1 > C2, INNER needs to have the shift performed on it
5156 for C1-C2 bits. */
5157 if (INTVAL (c1) > INTVAL (c2))
5158 {
5159 inner = gen_binary (ASHIFT, mode, inner,
5160 GEN_INT (INTVAL (c1) - INTVAL (c2)));
5161 c1 = c2;
5162 }
d0ab8cd3 5163
0a5cbff6
RK
5164 newop1 = INTVAL (c3) >> INTVAL (c1);
5165 new = make_compound_operation (inner,
5166 GET_CODE (XEXP (x, 0)) == PLUS
5167 ? MEM : GET_CODE (XEXP (x, 0)));
d0ab8cd3 5168 new = make_extraction (mode,
280f58ba 5169 gen_binary (GET_CODE (XEXP (x, 0)), mode, new,
d0ab8cd3 5170 GEN_INT (newop1)),
0a5cbff6
RK
5171 INTVAL (c2) - INTVAL (c1),
5172 NULL_RTX, mode_width - INTVAL (c1),
d0ab8cd3
RK
5173 code == LSHIFTRT, 0, in_code == COMPARE);
5174 }
5175
d0dcc580
RK
5176 /* Similarly for (ashiftrt (neg (ashift FOO C1)) C2). */
5177 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5178 && GET_CODE (XEXP (x, 0)) == NEG
5179 && GET_CODE (XEXP (XEXP (x, 0), 0)) == ASHIFT
5180 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
5181 && INTVAL (XEXP (x, 1)) >= INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1)))
280f58ba
RK
5182 {
5183 new = make_compound_operation (XEXP (XEXP (XEXP (x, 0), 0), 0),
5184 next_code);
5185 new = make_extraction (mode,
5186 gen_unary (GET_CODE (XEXP (x, 0)), mode,
5187 new, 0),
5188 (INTVAL (XEXP (x, 1))
5189 - INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))),
5190 NULL_RTX, mode_width - INTVAL (XEXP (x, 1)),
5191 code == LSHIFTRT, 0, in_code == COMPARE);
5192 }
230d793d 5193 break;
280f58ba
RK
5194
5195 case SUBREG:
5196 /* Call ourselves recursively on the inner expression. If we are
5197 narrowing the object and it has a different RTL code from
5198 what it originally did, do this SUBREG as a force_to_mode. */
5199
0a5cbff6 5200 tem = make_compound_operation (SUBREG_REG (x), in_code);
280f58ba
RK
5201 if (GET_CODE (tem) != GET_CODE (SUBREG_REG (x))
5202 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (tem))
5203 && subreg_lowpart_p (x))
0a5cbff6
RK
5204 {
5205 rtx newer = force_to_mode (tem, mode,
5206 GET_MODE_BITSIZE (mode), NULL_RTX);
5207
5208 /* If we have something other than a SUBREG, we might have
5209 done an expansion, so rerun outselves. */
5210 if (GET_CODE (newer) != SUBREG)
5211 newer = make_compound_operation (newer, in_code);
5212
5213 return newer;
5214 }
230d793d
RS
5215 }
5216
5217 if (new)
5218 {
df62f951 5219 x = gen_lowpart_for_combine (mode, new);
230d793d
RS
5220 code = GET_CODE (x);
5221 }
5222
5223 /* Now recursively process each operand of this operation. */
5224 fmt = GET_RTX_FORMAT (code);
5225 for (i = 0; i < GET_RTX_LENGTH (code); i++)
5226 if (fmt[i] == 'e')
5227 {
5228 new = make_compound_operation (XEXP (x, i), next_code);
5229 SUBST (XEXP (x, i), new);
5230 }
5231
5232 return x;
5233}
5234\f
5235/* Given M see if it is a value that would select a field of bits
5236 within an item, but not the entire word. Return -1 if not.
5237 Otherwise, return the starting position of the field, where 0 is the
5238 low-order bit.
5239
5240 *PLEN is set to the length of the field. */
5241
5242static int
5243get_pos_from_mask (m, plen)
5f4f0e22 5244 unsigned HOST_WIDE_INT m;
230d793d
RS
5245 int *plen;
5246{
5247 /* Get the bit number of the first 1 bit from the right, -1 if none. */
5248 int pos = exact_log2 (m & - m);
5249
5250 if (pos < 0)
5251 return -1;
5252
5253 /* Now shift off the low-order zero bits and see if we have a power of
5254 two minus 1. */
5255 *plen = exact_log2 ((m >> pos) + 1);
5256
5257 if (*plen <= 0)
5258 return -1;
5259
5260 return pos;
5261}
5262\f
dfbe1b2f
RK
5263/* Rewrite X so that it is an expression in MODE. We only care about the
5264 low-order BITS bits so we can ignore AND operations that just clear
5265 higher-order bits.
5266
5267 Also, if REG is non-zero and X is a register equal in value to REG,
5268 replace X with REG. */
5269
5270static rtx
5271force_to_mode (x, mode, bits, reg)
5272 rtx x;
5273 enum machine_mode mode;
5274 int bits;
5275 rtx reg;
5276{
5277 enum rtx_code code = GET_CODE (x);
d0ab8cd3 5278 enum machine_mode op_mode = mode;
dfbe1b2f
RK
5279
5280 /* If X is narrower than MODE or if BITS is larger than the size of MODE,
5281 just get X in the proper mode. */
5282
5283 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode)
5284 || bits > GET_MODE_BITSIZE (mode))
5285 return gen_lowpart_for_combine (mode, x);
5286
5287 switch (code)
5288 {
5289 case SIGN_EXTEND:
5290 case ZERO_EXTEND:
5291 case ZERO_EXTRACT:
5292 case SIGN_EXTRACT:
5293 x = expand_compound_operation (x);
5294 if (GET_CODE (x) != code)
5295 return force_to_mode (x, mode, bits, reg);
5296 break;
5297
5298 case REG:
5299 if (reg != 0 && (rtx_equal_p (get_last_value (reg), x)
5300 || rtx_equal_p (reg, get_last_value (x))))
5301 x = reg;
5302 break;
5303
5304 case CONST_INT:
5f4f0e22
CH
5305 if (bits < HOST_BITS_PER_WIDE_INT)
5306 x = GEN_INT (INTVAL (x) & (((HOST_WIDE_INT) 1 << bits) - 1));
dfbe1b2f
RK
5307 return x;
5308
5309 case SUBREG:
5310 /* Ignore low-order SUBREGs. */
5311 if (subreg_lowpart_p (x))
5312 return force_to_mode (SUBREG_REG (x), mode, bits, reg);
5313 break;
5314
5315 case AND:
5316 /* If this is an AND with a constant. Otherwise, we fall through to
5317 do the general binary case. */
5318
5319 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
5320 {
5f4f0e22 5321 HOST_WIDE_INT mask = INTVAL (XEXP (x, 1));
dfbe1b2f
RK
5322 int len = exact_log2 (mask + 1);
5323 rtx op = XEXP (x, 0);
5324
5325 /* If this is masking some low-order bits, we may be able to
5326 impose a stricter constraint on what bits of the operand are
5327 required. */
5328
5329 op = force_to_mode (op, mode, len > 0 ? MIN (len, bits) : bits,
5330 reg);
5331
5f4f0e22
CH
5332 if (bits < HOST_BITS_PER_WIDE_INT)
5333 mask &= ((HOST_WIDE_INT) 1 << bits) - 1;
dfbe1b2f 5334
d0ab8cd3
RK
5335 /* If we have no AND in MODE, use the original mode for the
5336 operation. */
5337
5338 if (and_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
5339 op_mode = GET_MODE (x);
5340
5341 x = simplify_and_const_int (x, op_mode, op, mask);
dfbe1b2f
RK
5342
5343 /* If X is still an AND, see if it is an AND with a mask that
5344 is just some low-order bits. If so, and it is BITS wide (it
5345 can't be wider), we don't need it. */
5346
5347 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
5f4f0e22
CH
5348 && bits < HOST_BITS_PER_WIDE_INT
5349 && INTVAL (XEXP (x, 1)) == ((HOST_WIDE_INT) 1 << bits) - 1)
dfbe1b2f 5350 x = XEXP (x, 0);
d0ab8cd3
RK
5351
5352 break;
dfbe1b2f
RK
5353 }
5354
5355 /* ... fall through ... */
5356
5357 case PLUS:
5358 case MINUS:
5359 case MULT:
5360 case IOR:
5361 case XOR:
5362 /* For most binary operations, just propagate into the operation and
d0ab8cd3
RK
5363 change the mode if we have an operation of that mode. */
5364
5365 if ((code == PLUS
5366 && add_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
5367 || (code == MINUS
5368 && sub_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
5369 || (code == MULT && (smul_optab->handlers[(int) mode].insn_code
5370 == CODE_FOR_nothing))
53e33d95
RK
5371 || (code == AND
5372 && and_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
d0ab8cd3
RK
5373 || (code == IOR
5374 && ior_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
5375 || (code == XOR && (xor_optab->handlers[(int) mode].insn_code
5376 == CODE_FOR_nothing)))
5377 op_mode = GET_MODE (x);
5378
5379 x = gen_binary (code, op_mode,
5380 gen_lowpart_for_combine (op_mode,
5381 force_to_mode (XEXP (x, 0),
5382 mode, bits,
5383 reg)),
5384 gen_lowpart_for_combine (op_mode,
5385 force_to_mode (XEXP (x, 1),
5386 mode, bits,
5387 reg)));
5388 break;
dfbe1b2f
RK
5389
5390 case ASHIFT:
5391 case LSHIFT:
5392 /* For left shifts, do the same, but just for the first operand.
f6785026
RK
5393 However, we cannot do anything with shifts where we cannot
5394 guarantee that the counts are smaller than the size of the mode
5395 because such a count will have a different meaning in a
5396 wider mode.
5397
5398 If we can narrow the shift and know the count, we need even fewer
5399 bits of the first operand. */
5400
5401 if (! (GET_CODE (XEXP (x, 1)) == CONST_INT
5402 && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (mode))
5403 && ! (GET_MODE (XEXP (x, 1)) != VOIDmode
5404 && (nonzero_bits (XEXP (x, 1), GET_MODE (XEXP (x, 1)))
adb7a1cb 5405 < (unsigned HOST_WIDE_INT) GET_MODE_BITSIZE (mode))))
f6785026
RK
5406 break;
5407
dfbe1b2f
RK
5408 if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) < bits)
5409 bits -= INTVAL (XEXP (x, 1));
5410
d0ab8cd3
RK
5411 if ((code == ASHIFT
5412 && ashl_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
5413 || (code == LSHIFT && (lshl_optab->handlers[(int) mode].insn_code
5414 == CODE_FOR_nothing)))
5415 op_mode = GET_MODE (x);
5416
5417 x = gen_binary (code, op_mode,
5418 gen_lowpart_for_combine (op_mode,
5419 force_to_mode (XEXP (x, 0),
5420 mode, bits,
5421 reg)),
5422 XEXP (x, 1));
5423 break;
dfbe1b2f
RK
5424
5425 case LSHIFTRT:
5426 /* Here we can only do something if the shift count is a constant and
f6785026
RK
5427 the count plus BITS is no larger than the width of MODE. In that
5428 case, we can do the shift in MODE. */
dfbe1b2f
RK
5429
5430 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5431 && INTVAL (XEXP (x, 1)) + bits <= GET_MODE_BITSIZE (mode))
d0ab8cd3
RK
5432 {
5433 rtx inner = force_to_mode (XEXP (x, 0), mode,
5434 bits + INTVAL (XEXP (x, 1)), reg);
5435
5436 if (lshr_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
5437 op_mode = GET_MODE (x);
5438
5439 x = gen_binary (LSHIFTRT, op_mode,
5440 gen_lowpart_for_combine (op_mode, inner),
5441 XEXP (x, 1));
5442 }
5443 break;
5444
5445 case ASHIFTRT:
5446 /* If this is a sign-extension operation that just affects bits
5447 we don't care about, remove it. */
5448
5449 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5450 && INTVAL (XEXP (x, 1)) >= 0
5451 && INTVAL (XEXP (x, 1)) <= GET_MODE_BITSIZE (GET_MODE (x)) - bits
5452 && GET_CODE (XEXP (x, 0)) == ASHIFT
5453 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5454 && INTVAL (XEXP (XEXP (x, 0), 1)) == INTVAL (XEXP (x, 1)))
5455 return force_to_mode (XEXP (XEXP (x, 0), 0), mode, bits, reg);
dfbe1b2f
RK
5456 break;
5457
5458 case NEG:
5459 case NOT:
d0ab8cd3
RK
5460 if ((code == NEG
5461 && neg_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
5462 || (code == NOT && (one_cmpl_optab->handlers[(int) mode].insn_code
5463 == CODE_FOR_nothing)))
5464 op_mode = GET_MODE (x);
5465
dfbe1b2f 5466 /* Handle these similarly to the way we handle most binary operations. */
d0ab8cd3
RK
5467 x = gen_unary (code, op_mode,
5468 gen_lowpart_for_combine (op_mode,
5469 force_to_mode (XEXP (x, 0), mode,
5470 bits, reg)));
5471 break;
5472
5473 case IF_THEN_ELSE:
5474 /* We have no way of knowing if the IF_THEN_ELSE can itself be
5475 written in a narrower mode. We play it safe and do not do so. */
5476
5477 SUBST (XEXP (x, 1),
5478 gen_lowpart_for_combine (GET_MODE (x),
5479 force_to_mode (XEXP (x, 1), mode,
5480 bits, reg)));
5481 SUBST (XEXP (x, 2),
5482 gen_lowpart_for_combine (GET_MODE (x),
5483 force_to_mode (XEXP (x, 2), mode,
5484 bits, reg)));
5485 break;
dfbe1b2f
RK
5486 }
5487
d0ab8cd3 5488 /* Ensure we return a value of the proper mode. */
dfbe1b2f
RK
5489 return gen_lowpart_for_combine (mode, x);
5490}
5491\f
1a26b032
RK
5492/* Return the value of expression X given the fact that condition COND
5493 is known to be true when applied to REG as its first operand and VAL
5494 as its second. X is known to not be shared and so can be modified in
5495 place.
5496
5497 We only handle the simplest cases, and specifically those cases that
5498 arise with IF_THEN_ELSE expressions. */
5499
5500static rtx
5501known_cond (x, cond, reg, val)
5502 rtx x;
5503 enum rtx_code cond;
5504 rtx reg, val;
5505{
5506 enum rtx_code code = GET_CODE (x);
5507 rtx new, temp;
5508 char *fmt;
5509 int i, j;
5510
5511 if (side_effects_p (x))
5512 return x;
5513
5514 if (cond == EQ && rtx_equal_p (x, reg))
5515 return val;
5516
5517 /* If X is (abs REG) and we know something about REG's relationship
5518 with zero, we may be able to simplify this. */
5519
5520 if (code == ABS && rtx_equal_p (XEXP (x, 0), reg) && val == const0_rtx)
5521 switch (cond)
5522 {
5523 case GE: case GT: case EQ:
5524 return XEXP (x, 0);
5525 case LT: case LE:
5526 return gen_unary (NEG, GET_MODE (XEXP (x, 0)), XEXP (x, 0));
5527 }
5528
5529 /* The only other cases we handle are MIN, MAX, and comparisons if the
5530 operands are the same as REG and VAL. */
5531
5532 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
5533 {
5534 if (rtx_equal_p (XEXP (x, 0), val))
5535 cond = swap_condition (cond), temp = val, val = reg, reg = temp;
5536
5537 if (rtx_equal_p (XEXP (x, 0), reg) && rtx_equal_p (XEXP (x, 1), val))
5538 {
5539 if (GET_RTX_CLASS (code) == '<')
5540 return (comparison_dominates_p (cond, code) ? const_true_rtx
5541 : (comparison_dominates_p (cond,
5542 reverse_condition (code))
5543 ? const0_rtx : x));
5544
5545 else if (code == SMAX || code == SMIN
5546 || code == UMIN || code == UMAX)
5547 {
5548 int unsignedp = (code == UMIN || code == UMAX);
5549
5550 if (code == SMAX || code == UMAX)
5551 cond = reverse_condition (cond);
5552
5553 switch (cond)
5554 {
5555 case GE: case GT:
5556 return unsignedp ? x : XEXP (x, 1);
5557 case LE: case LT:
5558 return unsignedp ? x : XEXP (x, 0);
5559 case GEU: case GTU:
5560 return unsignedp ? XEXP (x, 1) : x;
5561 case LEU: case LTU:
5562 return unsignedp ? XEXP (x, 0) : x;
5563 }
5564 }
5565 }
5566 }
5567
5568 fmt = GET_RTX_FORMAT (code);
5569 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5570 {
5571 if (fmt[i] == 'e')
5572 SUBST (XEXP (x, i), known_cond (XEXP (x, i), cond, reg, val));
5573 else if (fmt[i] == 'E')
5574 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5575 SUBST (XVECEXP (x, i, j), known_cond (XVECEXP (x, i, j),
5576 cond, reg, val));
5577 }
5578
5579 return x;
5580}
5581\f
230d793d
RS
5582/* See if X, a SET operation, can be rewritten as a bit-field assignment.
5583 Return that assignment if so.
5584
5585 We only handle the most common cases. */
5586
5587static rtx
5588make_field_assignment (x)
5589 rtx x;
5590{
5591 rtx dest = SET_DEST (x);
5592 rtx src = SET_SRC (x);
dfbe1b2f
RK
5593 rtx ourdest;
5594 rtx assign;
5f4f0e22
CH
5595 HOST_WIDE_INT c1;
5596 int pos, len;
dfbe1b2f
RK
5597 rtx other;
5598 enum machine_mode mode;
230d793d
RS
5599
5600 /* If SRC was (and (not (ashift (const_int 1) POS)) DEST), this is
5601 a clear of a one-bit field. We will have changed it to
5602 (and (rotate (const_int -2) POS) DEST), so check for that. Also check
5603 for a SUBREG. */
5604
5605 if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == ROTATE
5606 && GET_CODE (XEXP (XEXP (src, 0), 0)) == CONST_INT
5607 && INTVAL (XEXP (XEXP (src, 0), 0)) == -2
dfbe1b2f
RK
5608 && (rtx_equal_p (dest, XEXP (src, 1))
5609 || rtx_equal_p (dest, get_last_value (XEXP (src, 1)))
5610 || rtx_equal_p (get_last_value (dest), XEXP (src, 1))))
230d793d 5611 {
8999a12e 5612 assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
230d793d 5613 1, 1, 1, 0);
dfbe1b2f 5614 return gen_rtx (SET, VOIDmode, assign, const0_rtx);
230d793d
RS
5615 }
5616
5617 else if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == SUBREG
5618 && subreg_lowpart_p (XEXP (src, 0))
5619 && (GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
5620 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (src, 0)))))
5621 && GET_CODE (SUBREG_REG (XEXP (src, 0))) == ROTATE
5622 && INTVAL (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == -2
dfbe1b2f
RK
5623 && (rtx_equal_p (dest, XEXP (src, 1))
5624 || rtx_equal_p (dest, get_last_value (XEXP (src, 1)))
5625 || rtx_equal_p (get_last_value (dest), XEXP (src, 1))))
230d793d 5626 {
8999a12e 5627 assign = make_extraction (VOIDmode, dest, 0,
230d793d
RS
5628 XEXP (SUBREG_REG (XEXP (src, 0)), 1),
5629 1, 1, 1, 0);
dfbe1b2f 5630 return gen_rtx (SET, VOIDmode, assign, const0_rtx);
230d793d
RS
5631 }
5632
5633 /* If SRC is (ior (ashift (const_int 1) POS DEST)), this is a set of a
5634 one-bit field. */
5635 else if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == ASHIFT
5636 && XEXP (XEXP (src, 0), 0) == const1_rtx
dfbe1b2f
RK
5637 && (rtx_equal_p (dest, XEXP (src, 1))
5638 || rtx_equal_p (dest, get_last_value (XEXP (src, 1)))
5639 || rtx_equal_p (get_last_value (dest), XEXP (src, 1))))
230d793d 5640 {
8999a12e 5641 assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
230d793d 5642 1, 1, 1, 0);
dfbe1b2f 5643 return gen_rtx (SET, VOIDmode, assign, const1_rtx);
230d793d
RS
5644 }
5645
dfbe1b2f
RK
5646 /* The other case we handle is assignments into a constant-position
5647 field. They look like (ior (and DEST C1) OTHER). If C1 represents
5648 a mask that has all one bits except for a group of zero bits and
5649 OTHER is known to have zeros where C1 has ones, this is such an
5650 assignment. Compute the position and length from C1. Shift OTHER
5651 to the appropriate position, force it to the required mode, and
5652 make the extraction. Check for the AND in both operands. */
5653
5654 if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == AND
5655 && GET_CODE (XEXP (XEXP (src, 0), 1)) == CONST_INT
5656 && (rtx_equal_p (XEXP (XEXP (src, 0), 0), dest)
5657 || rtx_equal_p (XEXP (XEXP (src, 0), 0), get_last_value (dest))
5658 || rtx_equal_p (get_last_value (XEXP (XEXP (src, 0), 1)), dest)))
5659 c1 = INTVAL (XEXP (XEXP (src, 0), 1)), other = XEXP (src, 1);
5660 else if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 1)) == AND
5661 && GET_CODE (XEXP (XEXP (src, 1), 1)) == CONST_INT
5662 && (rtx_equal_p (XEXP (XEXP (src, 1), 0), dest)
5663 || rtx_equal_p (XEXP (XEXP (src, 1), 0), get_last_value (dest))
5664 || rtx_equal_p (get_last_value (XEXP (XEXP (src, 1), 0)),
5665 dest)))
5666 c1 = INTVAL (XEXP (XEXP (src, 1), 1)), other = XEXP (src, 0);
5667 else
5668 return x;
230d793d 5669
dfbe1b2f
RK
5670 pos = get_pos_from_mask (~c1, &len);
5671 if (pos < 0 || pos + len > GET_MODE_BITSIZE (GET_MODE (dest))
ac49a949 5672 || (GET_MODE_BITSIZE (GET_MODE (other)) <= HOST_BITS_PER_WIDE_INT
951553af 5673 && (c1 & nonzero_bits (other, GET_MODE (other))) != 0))
dfbe1b2f 5674 return x;
230d793d 5675
5f4f0e22 5676 assign = make_extraction (VOIDmode, dest, pos, NULL_RTX, len, 1, 1, 0);
230d793d 5677
dfbe1b2f
RK
5678 /* The mode to use for the source is the mode of the assignment, or of
5679 what is inside a possible STRICT_LOW_PART. */
5680 mode = (GET_CODE (assign) == STRICT_LOW_PART
5681 ? GET_MODE (XEXP (assign, 0)) : GET_MODE (assign));
230d793d 5682
dfbe1b2f
RK
5683 /* Shift OTHER right POS places and make it the source, restricting it
5684 to the proper length and mode. */
230d793d 5685
5f4f0e22
CH
5686 src = force_to_mode (simplify_shift_const (NULL_RTX, LSHIFTRT,
5687 GET_MODE (src), other, pos),
dfbe1b2f 5688 mode, len, dest);
230d793d 5689
dfbe1b2f 5690 return gen_rtx_combine (SET, VOIDmode, assign, src);
230d793d
RS
5691}
5692\f
5693/* See if X is of the form (+ (* a c) (* b c)) and convert to (* (+ a b) c)
5694 if so. */
5695
5696static rtx
5697apply_distributive_law (x)
5698 rtx x;
5699{
5700 enum rtx_code code = GET_CODE (x);
5701 rtx lhs, rhs, other;
5702 rtx tem;
5703 enum rtx_code inner_code;
5704
d8a8a4da
RS
5705 /* Distributivity is not true for floating point.
5706 It can change the value. So don't do it.
5707 -- rms and moshier@world.std.com. */
5708 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
5709 return x;
5710
230d793d
RS
5711 /* The outer operation can only be one of the following: */
5712 if (code != IOR && code != AND && code != XOR
5713 && code != PLUS && code != MINUS)
5714 return x;
5715
5716 lhs = XEXP (x, 0), rhs = XEXP (x, 1);
5717
dfbe1b2f 5718 /* If either operand is a primitive we can't do anything, so get out fast. */
230d793d 5719 if (GET_RTX_CLASS (GET_CODE (lhs)) == 'o'
dfbe1b2f 5720 || GET_RTX_CLASS (GET_CODE (rhs)) == 'o')
230d793d
RS
5721 return x;
5722
5723 lhs = expand_compound_operation (lhs);
5724 rhs = expand_compound_operation (rhs);
5725 inner_code = GET_CODE (lhs);
5726 if (inner_code != GET_CODE (rhs))
5727 return x;
5728
5729 /* See if the inner and outer operations distribute. */
5730 switch (inner_code)
5731 {
5732 case LSHIFTRT:
5733 case ASHIFTRT:
5734 case AND:
5735 case IOR:
5736 /* These all distribute except over PLUS. */
5737 if (code == PLUS || code == MINUS)
5738 return x;
5739 break;
5740
5741 case MULT:
5742 if (code != PLUS && code != MINUS)
5743 return x;
5744 break;
5745
5746 case ASHIFT:
5747 case LSHIFT:
5748 /* These are also multiplies, so they distribute over everything. */
5749 break;
5750
5751 case SUBREG:
dfbe1b2f
RK
5752 /* Non-paradoxical SUBREGs distributes over all operations, provided
5753 the inner modes and word numbers are the same, this is an extraction
2b4bd1bc
JW
5754 of a low-order part, we don't convert an fp operation to int or
5755 vice versa, and we would not be converting a single-word
dfbe1b2f 5756 operation into a multi-word operation. The latter test is not
2b4bd1bc 5757 required, but it prevents generating unneeded multi-word operations.
dfbe1b2f
RK
5758 Some of the previous tests are redundant given the latter test, but
5759 are retained because they are required for correctness.
5760
5761 We produce the result slightly differently in this case. */
5762
5763 if (GET_MODE (SUBREG_REG (lhs)) != GET_MODE (SUBREG_REG (rhs))
5764 || SUBREG_WORD (lhs) != SUBREG_WORD (rhs)
5765 || ! subreg_lowpart_p (lhs)
2b4bd1bc
JW
5766 || (GET_MODE_CLASS (GET_MODE (lhs))
5767 != GET_MODE_CLASS (GET_MODE (SUBREG_REG (lhs))))
dfbe1b2f
RK
5768 || (GET_MODE_SIZE (GET_MODE (lhs))
5769 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))))
5770 || GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))) > UNITS_PER_WORD)
230d793d
RS
5771 return x;
5772
5773 tem = gen_binary (code, GET_MODE (SUBREG_REG (lhs)),
5774 SUBREG_REG (lhs), SUBREG_REG (rhs));
5775 return gen_lowpart_for_combine (GET_MODE (x), tem);
5776
5777 default:
5778 return x;
5779 }
5780
5781 /* Set LHS and RHS to the inner operands (A and B in the example
5782 above) and set OTHER to the common operand (C in the example).
5783 These is only one way to do this unless the inner operation is
5784 commutative. */
5785 if (GET_RTX_CLASS (inner_code) == 'c'
5786 && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 0)))
5787 other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 1);
5788 else if (GET_RTX_CLASS (inner_code) == 'c'
5789 && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 1)))
5790 other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 0);
5791 else if (GET_RTX_CLASS (inner_code) == 'c'
5792 && rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 0)))
5793 other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 1);
5794 else if (rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 1)))
5795 other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 0);
5796 else
5797 return x;
5798
5799 /* Form the new inner operation, seeing if it simplifies first. */
5800 tem = gen_binary (code, GET_MODE (x), lhs, rhs);
5801
5802 /* There is one exception to the general way of distributing:
5803 (a ^ b) | (a ^ c) -> (~a) & (b ^ c) */
5804 if (code == XOR && inner_code == IOR)
5805 {
5806 inner_code = AND;
5807 other = gen_unary (NOT, GET_MODE (x), other);
5808 }
5809
5810 /* We may be able to continuing distributing the result, so call
5811 ourselves recursively on the inner operation before forming the
5812 outer operation, which we return. */
5813 return gen_binary (inner_code, GET_MODE (x),
5814 apply_distributive_law (tem), other);
5815}
5816\f
5817/* We have X, a logical `and' of VAROP with the constant CONSTOP, to be done
5818 in MODE.
5819
5820 Return an equivalent form, if different from X. Otherwise, return X. If
5821 X is zero, we are to always construct the equivalent form. */
5822
5823static rtx
5824simplify_and_const_int (x, mode, varop, constop)
5825 rtx x;
5826 enum machine_mode mode;
5827 rtx varop;
5f4f0e22 5828 unsigned HOST_WIDE_INT constop;
230d793d
RS
5829{
5830 register enum machine_mode tmode;
5831 register rtx temp;
951553af 5832 unsigned HOST_WIDE_INT nonzero;
230d793d
RS
5833
5834 /* There is a large class of optimizations based on the principle that
5835 some operations produce results where certain bits are known to be zero,
5836 and hence are not significant to the AND. For example, if we have just
5837 done a left shift of one bit, the low-order bit is known to be zero and
5838 hence an AND with a mask of ~1 would not do anything.
5839
5840 At the end of the following loop, we set:
5841
5842 VAROP to be the item to be AND'ed with;
5843 CONSTOP to the constant value to AND it with. */
5844
5845 while (1)
5846 {
5f4f0e22
CH
5847 /* If we ever encounter a mode wider than the host machine's widest
5848 integer size, we can't compute the masks accurately, so give up. */
5849 if (GET_MODE_BITSIZE (GET_MODE (varop)) > HOST_BITS_PER_WIDE_INT)
230d793d
RS
5850 break;
5851
5852 /* Unless one of the cases below does a `continue',
5853 a `break' will be executed to exit the loop. */
5854
5855 switch (GET_CODE (varop))
5856 {
5857 case CLOBBER:
5858 /* If VAROP is a (clobber (const_int)), return it since we know
5859 we are generating something that won't match. */
5860 return varop;
5861
5862#if ! BITS_BIG_ENDIAN
5863 case USE:
5864 /* VAROP is a (use (mem ..)) that was made from a bit-field
5865 extraction that spanned the boundary of the MEM. If we are
5866 now masking so it is within that boundary, we don't need the
5867 USE any more. */
5868 if ((constop & ~ GET_MODE_MASK (GET_MODE (XEXP (varop, 0)))) == 0)
5869 {
5870 varop = XEXP (varop, 0);
5871 continue;
5872 }
5873 break;
5874#endif
5875
5876 case SUBREG:
5877 if (subreg_lowpart_p (varop)
5878 /* We can ignore the effect this SUBREG if it narrows the mode
457816e2 5879 or, on machines where byte operations extend, if the
230d793d
RS
5880 constant masks to zero all the bits the mode doesn't have. */
5881 && ((GET_MODE_SIZE (GET_MODE (varop))
5882 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop))))
c6dc70d6 5883#ifdef BYTE_LOADS_EXTEND
230d793d
RS
5884 || (0 == (constop
5885 & GET_MODE_MASK (GET_MODE (varop))
5886 & ~ GET_MODE_MASK (GET_MODE (SUBREG_REG (varop)))))
5887#endif
5888 ))
5889 {
5890 varop = SUBREG_REG (varop);
5891 continue;
5892 }
5893 break;
5894
5895 case ZERO_EXTRACT:
5896 case SIGN_EXTRACT:
5897 case ZERO_EXTEND:
5898 case SIGN_EXTEND:
5899 /* Try to expand these into a series of shifts and then work
5900 with that result. If we can't, for example, if the extract
5901 isn't at a fixed position, give up. */
5902 temp = expand_compound_operation (varop);
5903 if (temp != varop)
5904 {
5905 varop = temp;
5906 continue;
5907 }
5908 break;
5909
5910 case AND:
5911 if (GET_CODE (XEXP (varop, 1)) == CONST_INT)
5912 {
5913 constop &= INTVAL (XEXP (varop, 1));
5914 varop = XEXP (varop, 0);
5915 continue;
5916 }
5917 break;
5918
5919 case IOR:
5920 case XOR:
5921 /* If VAROP is (ior (lshiftrt FOO C1) C2), try to commute the IOR and
5922 LSHIFT so we end up with an (and (lshiftrt (ior ...) ...) ...)
5923 operation which may be a bitfield extraction. */
5924
5925 if (GET_CODE (XEXP (varop, 0)) == LSHIFTRT
5926 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
5927 && INTVAL (XEXP (XEXP (varop, 0), 1)) >= 0
5f4f0e22 5928 && INTVAL (XEXP (XEXP (varop, 0), 1)) < HOST_BITS_PER_WIDE_INT
230d793d
RS
5929 && GET_CODE (XEXP (varop, 1)) == CONST_INT
5930 && (INTVAL (XEXP (varop, 1))
951553af 5931 & ~ nonzero_bits (XEXP (varop, 0), GET_MODE (varop)) == 0))
230d793d 5932 {
5f4f0e22
CH
5933 temp = GEN_INT ((INTVAL (XEXP (varop, 1)) & constop)
5934 << INTVAL (XEXP (XEXP (varop, 0), 1)));
230d793d
RS
5935 temp = gen_binary (GET_CODE (varop), GET_MODE (varop),
5936 XEXP (XEXP (varop, 0), 0), temp);
5937 varop = gen_rtx_combine (LSHIFTRT, GET_MODE (varop),
5938 temp, XEXP (varop, 1));
5939 continue;
5940 }
5941
5942 /* Apply the AND to both branches of the IOR or XOR, then try to
5943 apply the distributive law. This may eliminate operations
5944 if either branch can be simplified because of the AND.
5945 It may also make some cases more complex, but those cases
5946 probably won't match a pattern either with or without this. */
5947 return
5948 gen_lowpart_for_combine
5949 (mode, apply_distributive_law
5950 (gen_rtx_combine
5951 (GET_CODE (varop), GET_MODE (varop),
5f4f0e22 5952 simplify_and_const_int (NULL_RTX, GET_MODE (varop),
230d793d 5953 XEXP (varop, 0), constop),
5f4f0e22 5954 simplify_and_const_int (NULL_RTX, GET_MODE (varop),
230d793d
RS
5955 XEXP (varop, 1), constop))));
5956
5957 case NOT:
5958 /* (and (not FOO)) is (and (xor FOO CONST_OP)) so if FOO is an
5959 LSHIFTRT we can do the same as above. */
5960
5961 if (GET_CODE (XEXP (varop, 0)) == LSHIFTRT
5962 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
5963 && INTVAL (XEXP (XEXP (varop, 0), 1)) >= 0
5f4f0e22 5964 && INTVAL (XEXP (XEXP (varop, 0), 1)) < HOST_BITS_PER_WIDE_INT)
230d793d 5965 {
5f4f0e22 5966 temp = GEN_INT (constop << INTVAL (XEXP (XEXP (varop, 0), 1)));
230d793d
RS
5967 temp = gen_binary (XOR, GET_MODE (varop),
5968 XEXP (XEXP (varop, 0), 0), temp);
5969 varop = gen_rtx_combine (LSHIFTRT, GET_MODE (varop),
5970 temp, XEXP (XEXP (varop, 0), 1));
5971 continue;
5972 }
5973 break;
5974
5975 case ASHIFTRT:
5976 /* If we are just looking for the sign bit, we don't need this
5977 shift at all, even if it has a variable count. */
5f4f0e22
CH
5978 if (constop == ((HOST_WIDE_INT) 1
5979 << (GET_MODE_BITSIZE (GET_MODE (varop)) - 1)))
230d793d
RS
5980 {
5981 varop = XEXP (varop, 0);
5982 continue;
5983 }
5984
5985 /* If this is a shift by a constant, get a mask that contains
5986 those bits that are not copies of the sign bit. We then have
5987 two cases: If CONSTOP only includes those bits, this can be
5988 a logical shift, which may allow simplifications. If CONSTOP
5989 is a single-bit field not within those bits, we are requesting
5990 a copy of the sign bit and hence can shift the sign bit to
5991 the appropriate location. */
5992 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
5993 && INTVAL (XEXP (varop, 1)) >= 0
5f4f0e22 5994 && INTVAL (XEXP (varop, 1)) < HOST_BITS_PER_WIDE_INT)
230d793d
RS
5995 {
5996 int i = -1;
5997
951553af
RK
5998 nonzero = GET_MODE_MASK (GET_MODE (varop));
5999 nonzero >>= INTVAL (XEXP (varop, 1));
230d793d 6000
951553af 6001 if ((constop & ~ nonzero) == 0
230d793d
RS
6002 || (i = exact_log2 (constop)) >= 0)
6003 {
6004 varop = simplify_shift_const
6005 (varop, LSHIFTRT, GET_MODE (varop), XEXP (varop, 0),
6006 i < 0 ? INTVAL (XEXP (varop, 1))
6007 : GET_MODE_BITSIZE (GET_MODE (varop)) - 1 - i);
6008 if (GET_CODE (varop) != ASHIFTRT)
6009 continue;
6010 }
6011 }
6012
6013 /* If our mask is 1, convert this to a LSHIFTRT. This can be done
6014 even if the shift count isn't a constant. */
6015 if (constop == 1)
6016 varop = gen_rtx_combine (LSHIFTRT, GET_MODE (varop),
6017 XEXP (varop, 0), XEXP (varop, 1));
6018 break;
6019
500c518b
RK
6020 case LSHIFTRT:
6021 /* If we have (and (lshiftrt FOO C1) C2) where the combination of the
6022 shift and AND produces only copies of the sign bit (C2 is one less
b8a68db6 6023 than a power of two), we can do this with just a shift. */
500c518b
RK
6024
6025 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
6026 && ((INTVAL (XEXP (varop, 1))
6027 + num_sign_bit_copies (XEXP (varop, 0),
6028 GET_MODE (XEXP (varop, 0))))
6029 >= GET_MODE_BITSIZE (GET_MODE (varop)))
6030 && exact_log2 (constop + 1) >= 0)
6031 varop
6032 = gen_rtx_combine (LSHIFTRT, GET_MODE (varop), XEXP (varop, 0),
6033 GEN_INT (GET_MODE_BITSIZE (GET_MODE (varop))
6034 - exact_log2 (constop + 1)));
6035 break;
6036
230d793d
RS
6037 case NE:
6038 /* (and (ne FOO 0) CONST) can be (and FOO CONST) if CONST is
951553af
RK
6039 included in STORE_FLAG_VALUE and FOO has no bits that might be
6040 nonzero not in CONST. */
230d793d
RS
6041 if ((constop & ~ STORE_FLAG_VALUE) == 0
6042 && XEXP (varop, 0) == const0_rtx
951553af 6043 && (nonzero_bits (XEXP (varop, 0), mode) & ~ constop) == 0)
230d793d
RS
6044 {
6045 varop = XEXP (varop, 0);
6046 continue;
6047 }
6048 break;
6049
6050 case PLUS:
6051 /* In (and (plus FOO C1) M), if M is a mask that just turns off
6052 low-order bits (as in an alignment operation) and FOO is already
6053 aligned to that boundary, we can convert remove this AND
6054 and possibly the PLUS if it is now adding zero. */
6055 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
6056 && exact_log2 (-constop) >= 0
951553af 6057 && (nonzero_bits (XEXP (varop, 0), mode) & ~ constop) == 0)
230d793d
RS
6058 {
6059 varop = plus_constant (XEXP (varop, 0),
6060 INTVAL (XEXP (varop, 1)) & constop);
6061 constop = ~0;
6062 break;
6063 }
6064
6065 /* ... fall through ... */
6066
6067 case MINUS:
6068 /* In (and (plus (and FOO M1) BAR) M2), if M1 and M2 are one
6069 less than powers of two and M2 is narrower than M1, we can
6070 eliminate the inner AND. This occurs when incrementing
6071 bit fields. */
6072
6073 if (GET_CODE (XEXP (varop, 0)) == ZERO_EXTRACT
6074 || GET_CODE (XEXP (varop, 0)) == ZERO_EXTEND)
6075 SUBST (XEXP (varop, 0),
6076 expand_compound_operation (XEXP (varop, 0)));
6077
6078 if (GET_CODE (XEXP (varop, 0)) == AND
6079 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
6080 && exact_log2 (constop + 1) >= 0
6081 && exact_log2 (INTVAL (XEXP (XEXP (varop, 0), 1)) + 1) >= 0
6082 && (~ INTVAL (XEXP (XEXP (varop, 0), 1)) & constop) == 0)
6083 SUBST (XEXP (varop, 0), XEXP (XEXP (varop, 0), 0));
6084 break;
6085 }
6086
6087 break;
6088 }
6089
6090 /* If we have reached a constant, this whole thing is constant. */
6091 if (GET_CODE (varop) == CONST_INT)
5f4f0e22 6092 return GEN_INT (constop & INTVAL (varop));
230d793d 6093
fc06d7aa
RK
6094 /* See what bits may be nonzero in VAROP. Unlike the general case of
6095 a call to nonzero_bits, here we don't care about bits outside
6096 MODE. */
6097
6098 nonzero = nonzero_bits (varop, mode) & GET_MODE_MASK (mode);
230d793d
RS
6099
6100 /* Turn off all bits in the constant that are known to already be zero.
951553af 6101 Thus, if the AND isn't needed at all, we will have CONSTOP == NONZERO_BITS
230d793d
RS
6102 which is tested below. */
6103
951553af 6104 constop &= nonzero;
230d793d
RS
6105
6106 /* If we don't have any bits left, return zero. */
6107 if (constop == 0)
6108 return const0_rtx;
6109
6110 /* Get VAROP in MODE. Try to get a SUBREG if not. Don't make a new SUBREG
6111 if we already had one (just check for the simplest cases). */
6112 if (x && GET_CODE (XEXP (x, 0)) == SUBREG
6113 && GET_MODE (XEXP (x, 0)) == mode
6114 && SUBREG_REG (XEXP (x, 0)) == varop)
6115 varop = XEXP (x, 0);
6116 else
6117 varop = gen_lowpart_for_combine (mode, varop);
6118
6119 /* If we can't make the SUBREG, try to return what we were given. */
6120 if (GET_CODE (varop) == CLOBBER)
6121 return x ? x : varop;
6122
6123 /* If we are only masking insignificant bits, return VAROP. */
951553af 6124 if (constop == nonzero)
230d793d
RS
6125 x = varop;
6126
6127 /* Otherwise, return an AND. See how much, if any, of X we can use. */
6128 else if (x == 0 || GET_CODE (x) != AND || GET_MODE (x) != mode)
5f4f0e22 6129 x = gen_rtx_combine (AND, mode, varop, GEN_INT (constop));
230d793d
RS
6130
6131 else
6132 {
6133 if (GET_CODE (XEXP (x, 1)) != CONST_INT
6134 || INTVAL (XEXP (x, 1)) != constop)
5f4f0e22 6135 SUBST (XEXP (x, 1), GEN_INT (constop));
230d793d
RS
6136
6137 SUBST (XEXP (x, 0), varop);
6138 }
6139
6140 return x;
6141}
6142\f
6143/* Given an expression, X, compute which bits in X can be non-zero.
6144 We don't care about bits outside of those defined in MODE.
6145
6146 For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
6147 a shift, AND, or zero_extract, we can do better. */
6148
5f4f0e22 6149static unsigned HOST_WIDE_INT
951553af 6150nonzero_bits (x, mode)
230d793d
RS
6151 rtx x;
6152 enum machine_mode mode;
6153{
951553af
RK
6154 unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
6155 unsigned HOST_WIDE_INT inner_nz;
230d793d
RS
6156 enum rtx_code code;
6157 int mode_width = GET_MODE_BITSIZE (mode);
6158 rtx tem;
6159
6160 /* If X is wider than MODE, use its mode instead. */
6161 if (GET_MODE_BITSIZE (GET_MODE (x)) > mode_width)
6162 {
6163 mode = GET_MODE (x);
951553af 6164 nonzero = GET_MODE_MASK (mode);
230d793d
RS
6165 mode_width = GET_MODE_BITSIZE (mode);
6166 }
6167
5f4f0e22 6168 if (mode_width > HOST_BITS_PER_WIDE_INT)
230d793d
RS
6169 /* Our only callers in this case look for single bit values. So
6170 just return the mode mask. Those tests will then be false. */
951553af 6171 return nonzero;
230d793d
RS
6172
6173 code = GET_CODE (x);
6174 switch (code)
6175 {
6176 case REG:
6177#ifdef STACK_BOUNDARY
6178 /* If this is the stack pointer, we may know something about its
6179 alignment. If PUSH_ROUNDING is defined, it is possible for the
6180 stack to be momentarily aligned only to that amount, so we pick
6181 the least alignment. */
6182
6183 if (x == stack_pointer_rtx)
6184 {
6185 int sp_alignment = STACK_BOUNDARY / BITS_PER_UNIT;
6186
6187#ifdef PUSH_ROUNDING
6188 sp_alignment = MIN (PUSH_ROUNDING (1), sp_alignment);
6189#endif
6190
951553af 6191 return nonzero & ~ (sp_alignment - 1);
230d793d
RS
6192 }
6193#endif
6194
55310dad
RK
6195 /* If X is a register whose nonzero bits value is current, use it.
6196 Otherwise, if X is a register whose value we can find, use that
6197 value. Otherwise, use the previously-computed global nonzero bits
6198 for this register. */
6199
6200 if (reg_last_set_value[REGNO (x)] != 0
6201 && reg_last_set_mode[REGNO (x)] == mode
6202 && (reg_n_sets[REGNO (x)] == 1
6203 || reg_last_set_label[REGNO (x)] == label_tick)
6204 && INSN_CUID (reg_last_set[REGNO (x)]) < subst_low_cuid)
6205 return reg_last_set_nonzero_bits[REGNO (x)];
230d793d
RS
6206
6207 tem = get_last_value (x);
6208 if (tem)
951553af
RK
6209 return nonzero_bits (tem, mode);
6210 else if (nonzero_sign_valid && reg_nonzero_bits[REGNO (x)])
6211 return reg_nonzero_bits[REGNO (x)] & nonzero;
230d793d 6212 else
951553af 6213 return nonzero;
230d793d
RS
6214
6215 case CONST_INT:
6216 return INTVAL (x);
6217
6218#ifdef BYTE_LOADS_ZERO_EXTEND
6219 case MEM:
6220 /* In many, if not most, RISC machines, reading a byte from memory
6221 zeros the rest of the register. Noticing that fact saves a lot
6222 of extra zero-extends. */
951553af 6223 nonzero &= GET_MODE_MASK (GET_MODE (x));
230d793d
RS
6224 break;
6225#endif
6226
6227#if STORE_FLAG_VALUE == 1
6228 case EQ: case NE:
6229 case GT: case GTU:
6230 case LT: case LTU:
6231 case GE: case GEU:
6232 case LE: case LEU:
3f508eca
RK
6233
6234 if (GET_MODE_CLASS (mode) == MODE_INT)
951553af 6235 nonzero = 1;
230d793d
RS
6236
6237 /* A comparison operation only sets the bits given by its mode. The
6238 rest are set undefined. */
6239 if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
951553af 6240 nonzero |= (GET_MODE_MASK (mode) & ~ GET_MODE_MASK (GET_MODE (x)));
230d793d
RS
6241 break;
6242#endif
6243
230d793d 6244 case NEG:
d0ab8cd3
RK
6245 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
6246 == GET_MODE_BITSIZE (GET_MODE (x)))
951553af 6247 nonzero = 1;
230d793d
RS
6248
6249 if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
951553af 6250 nonzero |= (GET_MODE_MASK (mode) & ~ GET_MODE_MASK (GET_MODE (x)));
230d793d 6251 break;
d0ab8cd3
RK
6252
6253 case ABS:
6254 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
6255 == GET_MODE_BITSIZE (GET_MODE (x)))
951553af 6256 nonzero = 1;
d0ab8cd3 6257 break;
230d793d
RS
6258
6259 case TRUNCATE:
951553af 6260 nonzero &= (nonzero_bits (XEXP (x, 0), mode) & GET_MODE_MASK (mode));
230d793d
RS
6261 break;
6262
6263 case ZERO_EXTEND:
951553af 6264 nonzero &= nonzero_bits (XEXP (x, 0), mode);
230d793d 6265 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
951553af 6266 nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
230d793d
RS
6267 break;
6268
6269 case SIGN_EXTEND:
6270 /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
6271 Otherwise, show all the bits in the outer mode but not the inner
6272 may be non-zero. */
951553af 6273 inner_nz = nonzero_bits (XEXP (x, 0), mode);
230d793d
RS
6274 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
6275 {
951553af
RK
6276 inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
6277 if (inner_nz &
5f4f0e22
CH
6278 (((HOST_WIDE_INT) 1
6279 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1))))
951553af 6280 inner_nz |= (GET_MODE_MASK (mode)
230d793d
RS
6281 & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
6282 }
6283
951553af 6284 nonzero &= inner_nz;
230d793d
RS
6285 break;
6286
6287 case AND:
951553af
RK
6288 nonzero &= (nonzero_bits (XEXP (x, 0), mode)
6289 & nonzero_bits (XEXP (x, 1), mode));
230d793d
RS
6290 break;
6291
d0ab8cd3
RK
6292 case XOR: case IOR:
6293 case UMIN: case UMAX: case SMIN: case SMAX:
951553af
RK
6294 nonzero &= (nonzero_bits (XEXP (x, 0), mode)
6295 | nonzero_bits (XEXP (x, 1), mode));
230d793d
RS
6296 break;
6297
6298 case PLUS: case MINUS:
6299 case MULT:
6300 case DIV: case UDIV:
6301 case MOD: case UMOD:
6302 /* We can apply the rules of arithmetic to compute the number of
6303 high- and low-order zero bits of these operations. We start by
6304 computing the width (position of the highest-order non-zero bit)
6305 and the number of low-order zero bits for each value. */
6306 {
951553af
RK
6307 unsigned HOST_WIDE_INT nz0 = nonzero_bits (XEXP (x, 0), mode);
6308 unsigned HOST_WIDE_INT nz1 = nonzero_bits (XEXP (x, 1), mode);
6309 int width0 = floor_log2 (nz0) + 1;
6310 int width1 = floor_log2 (nz1) + 1;
6311 int low0 = floor_log2 (nz0 & -nz0);
6312 int low1 = floor_log2 (nz1 & -nz1);
6313 int op0_maybe_minusp = (nz0 & ((HOST_WIDE_INT) 1 << (mode_width - 1)));
6314 int op1_maybe_minusp = (nz1 & ((HOST_WIDE_INT) 1 << (mode_width - 1)));
230d793d
RS
6315 int result_width = mode_width;
6316 int result_low = 0;
6317
6318 switch (code)
6319 {
6320 case PLUS:
6321 result_width = MAX (width0, width1) + 1;
6322 result_low = MIN (low0, low1);
6323 break;
6324 case MINUS:
6325 result_low = MIN (low0, low1);
6326 break;
6327 case MULT:
6328 result_width = width0 + width1;
6329 result_low = low0 + low1;
6330 break;
6331 case DIV:
6332 if (! op0_maybe_minusp && ! op1_maybe_minusp)
6333 result_width = width0;
6334 break;
6335 case UDIV:
6336 result_width = width0;
6337 break;
6338 case MOD:
6339 if (! op0_maybe_minusp && ! op1_maybe_minusp)
6340 result_width = MIN (width0, width1);
6341 result_low = MIN (low0, low1);
6342 break;
6343 case UMOD:
6344 result_width = MIN (width0, width1);
6345 result_low = MIN (low0, low1);
6346 break;
6347 }
6348
6349 if (result_width < mode_width)
951553af 6350 nonzero &= ((HOST_WIDE_INT) 1 << result_width) - 1;
230d793d
RS
6351
6352 if (result_low > 0)
951553af 6353 nonzero &= ~ (((HOST_WIDE_INT) 1 << result_low) - 1);
230d793d
RS
6354 }
6355 break;
6356
6357 case ZERO_EXTRACT:
6358 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5f4f0e22 6359 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
951553af 6360 nonzero &= ((HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
230d793d
RS
6361 break;
6362
6363 case SUBREG:
c3c2cb37
RK
6364 /* If this is a SUBREG formed for a promoted variable that has
6365 been zero-extended, we know that at least the high-order bits
6366 are zero, though others might be too. */
6367
6368 if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x))
951553af
RK
6369 nonzero = (GET_MODE_MASK (GET_MODE (x))
6370 & nonzero_bits (SUBREG_REG (x), GET_MODE (x)));
c3c2cb37 6371
230d793d
RS
6372 /* If the inner mode is a single word for both the host and target
6373 machines, we can compute this from which bits of the inner
951553af 6374 object might be nonzero. */
230d793d 6375 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) <= BITS_PER_WORD
5f4f0e22
CH
6376 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
6377 <= HOST_BITS_PER_WIDE_INT))
230d793d 6378 {
951553af 6379 nonzero &= nonzero_bits (SUBREG_REG (x), mode);
c6dc70d6 6380#ifndef BYTE_LOADS_EXTEND
230d793d
RS
6381 /* On many CISC machines, accessing an object in a wider mode
6382 causes the high-order bits to become undefined. So they are
6383 not known to be zero. */
6384 if (GET_MODE_SIZE (GET_MODE (x))
6385 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
951553af
RK
6386 nonzero |= (GET_MODE_MASK (GET_MODE (x))
6387 & ~ GET_MODE_MASK (GET_MODE (SUBREG_REG (x))));
230d793d
RS
6388#endif
6389 }
6390 break;
6391
6392 case ASHIFTRT:
6393 case LSHIFTRT:
6394 case ASHIFT:
6395 case LSHIFT:
6396 case ROTATE:
951553af 6397 /* The nonzero bits are in two classes: any bits within MODE
230d793d 6398 that aren't in GET_MODE (x) are always significant. The rest of the
951553af 6399 nonzero bits are those that are significant in the operand of
230d793d
RS
6400 the shift when shifted the appropriate number of bits. This
6401 shows that high-order bits are cleared by the right shift and
6402 low-order bits by left shifts. */
6403 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6404 && INTVAL (XEXP (x, 1)) >= 0
5f4f0e22 6405 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
230d793d
RS
6406 {
6407 enum machine_mode inner_mode = GET_MODE (x);
6408 int width = GET_MODE_BITSIZE (inner_mode);
6409 int count = INTVAL (XEXP (x, 1));
5f4f0e22 6410 unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
951553af
RK
6411 unsigned HOST_WIDE_INT op_nonzero = nonzero_bits (XEXP (x, 0), mode);
6412 unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask;
5f4f0e22 6413 unsigned HOST_WIDE_INT outer = 0;
230d793d
RS
6414
6415 if (mode_width > width)
951553af 6416 outer = (op_nonzero & nonzero & ~ mode_mask);
230d793d
RS
6417
6418 if (code == LSHIFTRT)
6419 inner >>= count;
6420 else if (code == ASHIFTRT)
6421 {
6422 inner >>= count;
6423
951553af 6424 /* If the sign bit may have been nonzero before the shift, we
230d793d 6425 need to mark all the places it could have been copied to
951553af 6426 by the shift as possibly nonzero. */
5f4f0e22
CH
6427 if (inner & ((HOST_WIDE_INT) 1 << (width - 1 - count)))
6428 inner |= (((HOST_WIDE_INT) 1 << count) - 1) << (width - count);
230d793d
RS
6429 }
6430 else if (code == LSHIFT || code == ASHIFT)
6431 inner <<= count;
6432 else
6433 inner = ((inner << (count % width)
6434 | (inner >> (width - (count % width)))) & mode_mask);
6435
951553af 6436 nonzero &= (outer | inner);
230d793d
RS
6437 }
6438 break;
6439
6440 case FFS:
6441 /* This is at most the number of bits in the mode. */
951553af 6442 nonzero = ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width) + 1)) - 1;
230d793d 6443 break;
d0ab8cd3
RK
6444
6445 case IF_THEN_ELSE:
951553af
RK
6446 nonzero &= (nonzero_bits (XEXP (x, 1), mode)
6447 | nonzero_bits (XEXP (x, 2), mode));
d0ab8cd3 6448 break;
230d793d
RS
6449 }
6450
951553af 6451 return nonzero;
230d793d
RS
6452}
6453\f
d0ab8cd3
RK
6454/* Return the number of bits at the high-order end of X that are known to
6455 be equal to the sign bit. This number will always be between 1 and
6456 the number of bits in the mode of X. MODE is the mode to be used
6457 if X is VOIDmode. */
6458
6459static int
6460num_sign_bit_copies (x, mode)
6461 rtx x;
6462 enum machine_mode mode;
6463{
6464 enum rtx_code code = GET_CODE (x);
6465 int bitwidth;
6466 int num0, num1, result;
951553af 6467 unsigned HOST_WIDE_INT nonzero;
d0ab8cd3
RK
6468 rtx tem;
6469
6470 /* If we weren't given a mode, use the mode of X. If the mode is still
6471 VOIDmode, we don't know anything. */
6472
6473 if (mode == VOIDmode)
6474 mode = GET_MODE (x);
6475
6476 if (mode == VOIDmode)
6752e8d2 6477 return 1;
d0ab8cd3
RK
6478
6479 bitwidth = GET_MODE_BITSIZE (mode);
6480
6481 switch (code)
6482 {
6483 case REG:
55310dad
RK
6484
6485 if (reg_last_set_value[REGNO (x)] != 0
6486 && reg_last_set_mode[REGNO (x)] == mode
6487 && (reg_n_sets[REGNO (x)] == 1
6488 || reg_last_set_label[REGNO (x)] == label_tick)
6489 && INSN_CUID (reg_last_set[REGNO (x)]) < subst_low_cuid)
6490 return reg_last_set_sign_bit_copies[REGNO (x)];
d0ab8cd3
RK
6491
6492 tem = get_last_value (x);
6493 if (tem != 0)
6494 return num_sign_bit_copies (tem, mode);
55310dad
RK
6495
6496 if (nonzero_sign_valid && reg_sign_bit_copies[REGNO (x)] != 0)
6497 return reg_sign_bit_copies[REGNO (x)];
d0ab8cd3
RK
6498 break;
6499
457816e2
RK
6500#ifdef BYTE_LOADS_SIGN_EXTEND
6501 case MEM:
6502 /* Some RISC machines sign-extend all loads of smaller than a word. */
6503 return MAX (1, bitwidth - GET_MODE_BITSIZE (GET_MODE (x)) + 1);
6504#endif
6505
d0ab8cd3
RK
6506 case CONST_INT:
6507 /* If the constant is negative, take its 1's complement and remask.
6508 Then see how many zero bits we have. */
951553af 6509 nonzero = INTVAL (x) & GET_MODE_MASK (mode);
ac49a949 6510 if (bitwidth <= HOST_BITS_PER_WIDE_INT
951553af
RK
6511 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
6512 nonzero = (~ nonzero) & GET_MODE_MASK (mode);
d0ab8cd3 6513
951553af 6514 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
d0ab8cd3
RK
6515
6516 case SUBREG:
c3c2cb37
RK
6517 /* If this is a SUBREG for a promoted object that is sign-extended
6518 and we are looking at it in a wider mode, we know that at least the
6519 high-order bits are known to be sign bit copies. */
6520
6521 if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x))
6522 return (GET_MODE_BITSIZE (mode) - GET_MODE_BITSIZE (GET_MODE (x))
6523 + num_sign_bit_copies (SUBREG_REG (x), GET_MODE (x)));
6524
d0ab8cd3
RK
6525 /* For a smaller object, just ignore the high bits. */
6526 if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
6527 {
6528 num0 = num_sign_bit_copies (SUBREG_REG (x), VOIDmode);
6529 return MAX (1, (num0
6530 - (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
6531 - bitwidth)));
6532 }
457816e2 6533
c6dc70d6 6534#ifdef BYTE_LOADS_EXTEND
457816e2
RK
6535 /* For paradoxical SUBREGs, just look inside since, on machines with
6536 one of these defined, we assume that operations are actually
6537 performed on the full register. Note that we are passing MODE
6538 to the recursive call, so the number of sign bit copies will
6539 remain relative to that mode, not the inner mode. */
6540
6541 if (GET_MODE_SIZE (GET_MODE (x))
6542 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
6543 return num_sign_bit_copies (SUBREG_REG (x), mode);
6544#endif
6545
d0ab8cd3
RK
6546 break;
6547
6548 case SIGN_EXTRACT:
6549 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
6550 return MAX (1, bitwidth - INTVAL (XEXP (x, 1)));
6551 break;
6552
6553 case SIGN_EXTEND:
6554 return (bitwidth - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
6555 + num_sign_bit_copies (XEXP (x, 0), VOIDmode));
6556
6557 case TRUNCATE:
6558 /* For a smaller object, just ignore the high bits. */
6559 num0 = num_sign_bit_copies (XEXP (x, 0), VOIDmode);
6560 return MAX (1, (num0 - (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
6561 - bitwidth)));
6562
6563 case NOT:
6564 return num_sign_bit_copies (XEXP (x, 0), mode);
6565
6566 case ROTATE: case ROTATERT:
6567 /* If we are rotating left by a number of bits less than the number
6568 of sign bit copies, we can just subtract that amount from the
6569 number. */
6570 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6571 && INTVAL (XEXP (x, 1)) >= 0 && INTVAL (XEXP (x, 1)) < bitwidth)
6572 {
6573 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
6574 return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
6575 : bitwidth - INTVAL (XEXP (x, 1))));
6576 }
6577 break;
6578
6579 case NEG:
6580 /* In general, this subtracts one sign bit copy. But if the value
6581 is known to be positive, the number of sign bit copies is the
951553af
RK
6582 same as that of the input. Finally, if the input has just one bit
6583 that might be nonzero, all the bits are copies of the sign bit. */
6584 nonzero = nonzero_bits (XEXP (x, 0), mode);
6585 if (nonzero == 1)
d0ab8cd3
RK
6586 return bitwidth;
6587
6588 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
6589 if (num0 > 1
ac49a949 6590 && bitwidth <= HOST_BITS_PER_WIDE_INT
951553af 6591 && (((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero))
d0ab8cd3
RK
6592 num0--;
6593
6594 return num0;
6595
6596 case IOR: case AND: case XOR:
6597 case SMIN: case SMAX: case UMIN: case UMAX:
6598 /* Logical operations will preserve the number of sign-bit copies.
6599 MIN and MAX operations always return one of the operands. */
6600 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
6601 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
6602 return MIN (num0, num1);
6603
6604 case PLUS: case MINUS:
6605 /* For addition and subtraction, we can have a 1-bit carry. However,
6606 if we are subtracting 1 from a positive number, there will not
6607 be such a carry. Furthermore, if the positive number is known to
6608 be 0 or 1, we know the result is either -1 or 0. */
6609
3e3ea975 6610 if (code == PLUS && XEXP (x, 1) == constm1_rtx
9295e6af 6611 && bitwidth <= HOST_BITS_PER_WIDE_INT)
d0ab8cd3 6612 {
951553af
RK
6613 nonzero = nonzero_bits (XEXP (x, 0), mode);
6614 if ((((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0)
6615 return (nonzero == 1 || nonzero == 0 ? bitwidth
6616 : bitwidth - floor_log2 (nonzero) - 1);
d0ab8cd3
RK
6617 }
6618
6619 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
6620 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
6621 return MAX (1, MIN (num0, num1) - 1);
6622
6623 case MULT:
6624 /* The number of bits of the product is the sum of the number of
6625 bits of both terms. However, unless one of the terms if known
6626 to be positive, we must allow for an additional bit since negating
6627 a negative number can remove one sign bit copy. */
6628
6629 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
6630 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
6631
6632 result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
6633 if (result > 0
9295e6af 6634 && bitwidth <= HOST_BITS_PER_WIDE_INT
951553af 6635 && ((nonzero_bits (XEXP (x, 0), mode)
d0ab8cd3 6636 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
951553af 6637 && (nonzero_bits (XEXP (x, 1), mode)
d0ab8cd3
RK
6638 & ((HOST_WIDE_INT) 1 << (bitwidth - 1)) != 0))
6639 result--;
6640
6641 return MAX (1, result);
6642
6643 case UDIV:
6644 /* The result must be <= the first operand. */
6645 return num_sign_bit_copies (XEXP (x, 0), mode);
6646
6647 case UMOD:
6648 /* The result must be <= the scond operand. */
6649 return num_sign_bit_copies (XEXP (x, 1), mode);
6650
6651 case DIV:
6652 /* Similar to unsigned division, except that we have to worry about
6653 the case where the divisor is negative, in which case we have
6654 to add 1. */
6655 result = num_sign_bit_copies (XEXP (x, 0), mode);
6656 if (result > 1
ac49a949 6657 && bitwidth <= HOST_BITS_PER_WIDE_INT
951553af 6658 && (nonzero_bits (XEXP (x, 1), mode)
d0ab8cd3
RK
6659 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
6660 result --;
6661
6662 return result;
6663
6664 case MOD:
6665 result = num_sign_bit_copies (XEXP (x, 1), mode);
6666 if (result > 1
ac49a949 6667 && bitwidth <= HOST_BITS_PER_WIDE_INT
951553af 6668 && (nonzero_bits (XEXP (x, 1), mode)
d0ab8cd3
RK
6669 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
6670 result --;
6671
6672 return result;
6673
6674 case ASHIFTRT:
6675 /* Shifts by a constant add to the number of bits equal to the
6676 sign bit. */
6677 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
6678 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6679 && INTVAL (XEXP (x, 1)) > 0)
6680 num0 = MIN (bitwidth, num0 + INTVAL (XEXP (x, 1)));
6681
6682 return num0;
6683
6684 case ASHIFT:
6685 case LSHIFT:
6686 /* Left shifts destroy copies. */
6687 if (GET_CODE (XEXP (x, 1)) != CONST_INT
6688 || INTVAL (XEXP (x, 1)) < 0
6689 || INTVAL (XEXP (x, 1)) >= bitwidth)
6690 return 1;
6691
6692 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
6693 return MAX (1, num0 - INTVAL (XEXP (x, 1)));
6694
6695 case IF_THEN_ELSE:
6696 num0 = num_sign_bit_copies (XEXP (x, 1), mode);
6697 num1 = num_sign_bit_copies (XEXP (x, 2), mode);
6698 return MIN (num0, num1);
6699
6700#if STORE_FLAG_VALUE == -1
6701 case EQ: case NE: case GE: case GT: case LE: case LT:
6702 case GEU: case GTU: case LEU: case LTU:
6703 return bitwidth;
6704#endif
6705 }
6706
6707 /* If we haven't been able to figure it out by one of the above rules,
6708 see if some of the high-order bits are known to be zero. If so,
ac49a949
RS
6709 count those bits and return one less than that amount. If we can't
6710 safely compute the mask for this mode, always return BITWIDTH. */
6711
6712 if (bitwidth > HOST_BITS_PER_WIDE_INT)
6752e8d2 6713 return 1;
d0ab8cd3 6714
951553af
RK
6715 nonzero = nonzero_bits (x, mode);
6716 return (nonzero == GET_MODE_MASK (mode)
6717 ? 1 : bitwidth - floor_log2 (nonzero) - 1);
d0ab8cd3
RK
6718}
6719\f
1a26b032
RK
6720/* Return the number of "extended" bits there are in X, when interpreted
6721 as a quantity in MODE whose signedness is indicated by UNSIGNEDP. For
6722 unsigned quantities, this is the number of high-order zero bits.
6723 For signed quantities, this is the number of copies of the sign bit
6724 minus 1. In both case, this function returns the number of "spare"
6725 bits. For example, if two quantities for which this function returns
6726 at least 1 are added, the addition is known not to overflow.
6727
6728 This function will always return 0 unless called during combine, which
6729 implies that it must be called from a define_split. */
6730
6731int
6732extended_count (x, mode, unsignedp)
6733 rtx x;
6734 enum machine_mode mode;
6735 int unsignedp;
6736{
951553af 6737 if (nonzero_sign_valid == 0)
1a26b032
RK
6738 return 0;
6739
6740 return (unsignedp
ac49a949
RS
6741 ? (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
6742 && (GET_MODE_BITSIZE (mode) - 1
951553af 6743 - floor_log2 (nonzero_bits (x, mode))))
1a26b032
RK
6744 : num_sign_bit_copies (x, mode) - 1);
6745}
6746\f
230d793d
RS
6747/* This function is called from `simplify_shift_const' to merge two
6748 outer operations. Specifically, we have already found that we need
6749 to perform operation *POP0 with constant *PCONST0 at the outermost
6750 position. We would now like to also perform OP1 with constant CONST1
6751 (with *POP0 being done last).
6752
6753 Return 1 if we can do the operation and update *POP0 and *PCONST0 with
6754 the resulting operation. *PCOMP_P is set to 1 if we would need to
6755 complement the innermost operand, otherwise it is unchanged.
6756
6757 MODE is the mode in which the operation will be done. No bits outside
6758 the width of this mode matter. It is assumed that the width of this mode
5f4f0e22 6759 is smaller than or equal to HOST_BITS_PER_WIDE_INT.
230d793d
RS
6760
6761 If *POP0 or OP1 are NIL, it means no operation is required. Only NEG, PLUS,
6762 IOR, XOR, and AND are supported. We may set *POP0 to SET if the proper
6763 result is simply *PCONST0.
6764
6765 If the resulting operation cannot be expressed as one operation, we
6766 return 0 and do not change *POP0, *PCONST0, and *PCOMP_P. */
6767
6768static int
6769merge_outer_ops (pop0, pconst0, op1, const1, mode, pcomp_p)
6770 enum rtx_code *pop0;
5f4f0e22 6771 HOST_WIDE_INT *pconst0;
230d793d 6772 enum rtx_code op1;
5f4f0e22 6773 HOST_WIDE_INT const1;
230d793d
RS
6774 enum machine_mode mode;
6775 int *pcomp_p;
6776{
6777 enum rtx_code op0 = *pop0;
5f4f0e22 6778 HOST_WIDE_INT const0 = *pconst0;
230d793d
RS
6779
6780 const0 &= GET_MODE_MASK (mode);
6781 const1 &= GET_MODE_MASK (mode);
6782
6783 /* If OP0 is an AND, clear unimportant bits in CONST1. */
6784 if (op0 == AND)
6785 const1 &= const0;
6786
6787 /* If OP0 or OP1 is NIL, this is easy. Similarly if they are the same or
6788 if OP0 is SET. */
6789
6790 if (op1 == NIL || op0 == SET)
6791 return 1;
6792
6793 else if (op0 == NIL)
6794 op0 = op1, const0 = const1;
6795
6796 else if (op0 == op1)
6797 {
6798 switch (op0)
6799 {
6800 case AND:
6801 const0 &= const1;
6802 break;
6803 case IOR:
6804 const0 |= const1;
6805 break;
6806 case XOR:
6807 const0 ^= const1;
6808 break;
6809 case PLUS:
6810 const0 += const1;
6811 break;
6812 case NEG:
6813 op0 = NIL;
6814 break;
6815 }
6816 }
6817
6818 /* Otherwise, if either is a PLUS or NEG, we can't do anything. */
6819 else if (op0 == PLUS || op1 == PLUS || op0 == NEG || op1 == NEG)
6820 return 0;
6821
6822 /* If the two constants aren't the same, we can't do anything. The
6823 remaining six cases can all be done. */
6824 else if (const0 != const1)
6825 return 0;
6826
6827 else
6828 switch (op0)
6829 {
6830 case IOR:
6831 if (op1 == AND)
6832 /* (a & b) | b == b */
6833 op0 = SET;
6834 else /* op1 == XOR */
6835 /* (a ^ b) | b == a | b */
6836 ;
6837 break;
6838
6839 case XOR:
6840 if (op1 == AND)
6841 /* (a & b) ^ b == (~a) & b */
6842 op0 = AND, *pcomp_p = 1;
6843 else /* op1 == IOR */
6844 /* (a | b) ^ b == a & ~b */
6845 op0 = AND, *pconst0 = ~ const0;
6846 break;
6847
6848 case AND:
6849 if (op1 == IOR)
6850 /* (a | b) & b == b */
6851 op0 = SET;
6852 else /* op1 == XOR */
6853 /* (a ^ b) & b) == (~a) & b */
6854 *pcomp_p = 1;
6855 break;
6856 }
6857
6858 /* Check for NO-OP cases. */
6859 const0 &= GET_MODE_MASK (mode);
6860 if (const0 == 0
6861 && (op0 == IOR || op0 == XOR || op0 == PLUS))
6862 op0 = NIL;
6863 else if (const0 == 0 && op0 == AND)
6864 op0 = SET;
6865 else if (const0 == GET_MODE_MASK (mode) && op0 == AND)
6866 op0 = NIL;
6867
6868 *pop0 = op0;
6869 *pconst0 = const0;
6870
6871 return 1;
6872}
6873\f
6874/* Simplify a shift of VAROP by COUNT bits. CODE says what kind of shift.
6875 The result of the shift is RESULT_MODE. X, if non-zero, is an expression
6876 that we started with.
6877
6878 The shift is normally computed in the widest mode we find in VAROP, as
6879 long as it isn't a different number of words than RESULT_MODE. Exceptions
6880 are ASHIFTRT and ROTATE, which are always done in their original mode, */
6881
6882static rtx
6883simplify_shift_const (x, code, result_mode, varop, count)
6884 rtx x;
6885 enum rtx_code code;
6886 enum machine_mode result_mode;
6887 rtx varop;
6888 int count;
6889{
6890 enum rtx_code orig_code = code;
6891 int orig_count = count;
6892 enum machine_mode mode = result_mode;
6893 enum machine_mode shift_mode, tmode;
6894 int mode_words
6895 = (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
6896 /* We form (outer_op (code varop count) (outer_const)). */
6897 enum rtx_code outer_op = NIL;
5f4f0e22 6898 HOST_WIDE_INT outer_const;
230d793d
RS
6899 rtx const_rtx;
6900 int complement_p = 0;
6901 rtx new;
6902
6903 /* If we were given an invalid count, don't do anything except exactly
6904 what was requested. */
6905
6906 if (count < 0 || count > GET_MODE_BITSIZE (mode))
6907 {
6908 if (x)
6909 return x;
6910
5f4f0e22 6911 return gen_rtx (code, mode, varop, GEN_INT (count));
230d793d
RS
6912 }
6913
6914 /* Unless one of the branches of the `if' in this loop does a `continue',
6915 we will `break' the loop after the `if'. */
6916
6917 while (count != 0)
6918 {
6919 /* If we have an operand of (clobber (const_int 0)), just return that
6920 value. */
6921 if (GET_CODE (varop) == CLOBBER)
6922 return varop;
6923
6924 /* If we discovered we had to complement VAROP, leave. Making a NOT
6925 here would cause an infinite loop. */
6926 if (complement_p)
6927 break;
6928
6929 /* Convert ROTATETRT to ROTATE. */
6930 if (code == ROTATERT)
6931 code = ROTATE, count = GET_MODE_BITSIZE (result_mode) - count;
6932
6933 /* Canonicalize LSHIFT to ASHIFT. */
6934 if (code == LSHIFT)
6935 code = ASHIFT;
6936
6937 /* We need to determine what mode we will do the shift in. If the
6938 shift is a ASHIFTRT or ROTATE, we must always do it in the mode it
6939 was originally done in. Otherwise, we can do it in MODE, the widest
6940 mode encountered. */
6941 shift_mode = (code == ASHIFTRT || code == ROTATE ? result_mode : mode);
6942
6943 /* Handle cases where the count is greater than the size of the mode
6944 minus 1. For ASHIFT, use the size minus one as the count (this can
6945 occur when simplifying (lshiftrt (ashiftrt ..))). For rotates,
6946 take the count modulo the size. For other shifts, the result is
6947 zero.
6948
6949 Since these shifts are being produced by the compiler by combining
6950 multiple operations, each of which are defined, we know what the
6951 result is supposed to be. */
6952
6953 if (count > GET_MODE_BITSIZE (shift_mode) - 1)
6954 {
6955 if (code == ASHIFTRT)
6956 count = GET_MODE_BITSIZE (shift_mode) - 1;
6957 else if (code == ROTATE || code == ROTATERT)
6958 count %= GET_MODE_BITSIZE (shift_mode);
6959 else
6960 {
6961 /* We can't simply return zero because there may be an
6962 outer op. */
6963 varop = const0_rtx;
6964 count = 0;
6965 break;
6966 }
6967 }
6968
6969 /* Negative counts are invalid and should not have been made (a
6970 programmer-specified negative count should have been handled
6971 above). */
6972 else if (count < 0)
6973 abort ();
6974
d0ab8cd3
RK
6975 /* An arithmetic right shift of a quantity known to be -1 or 0
6976 is a no-op. */
6977 if (code == ASHIFTRT
6978 && (num_sign_bit_copies (varop, shift_mode)
6979 == GET_MODE_BITSIZE (shift_mode)))
6980 {
6981 count = 0;
6982 break;
6983 }
6984
500c518b
RK
6985 /* If we are doing an arithmetic right shift and discarding all but
6986 the sign bit copies, this is equivalent to doing a shift by the
6987 bitsize minus one. Convert it into that shift because it will often
6988 allow other simplifications. */
6989
6990 if (code == ASHIFTRT
6991 && (count + num_sign_bit_copies (varop, shift_mode)
6992 >= GET_MODE_BITSIZE (shift_mode)))
6993 count = GET_MODE_BITSIZE (shift_mode) - 1;
6994
230d793d
RS
6995 /* We simplify the tests below and elsewhere by converting
6996 ASHIFTRT to LSHIFTRT if we know the sign bit is clear.
6997 `make_compound_operation' will convert it to a ASHIFTRT for
6998 those machines (such as Vax) that don't have a LSHIFTRT. */
5f4f0e22 6999 if (GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT
230d793d 7000 && code == ASHIFTRT
951553af 7001 && ((nonzero_bits (varop, shift_mode)
5f4f0e22
CH
7002 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (shift_mode) - 1)))
7003 == 0))
230d793d
RS
7004 code = LSHIFTRT;
7005
7006 switch (GET_CODE (varop))
7007 {
7008 case SIGN_EXTEND:
7009 case ZERO_EXTEND:
7010 case SIGN_EXTRACT:
7011 case ZERO_EXTRACT:
7012 new = expand_compound_operation (varop);
7013 if (new != varop)
7014 {
7015 varop = new;
7016 continue;
7017 }
7018 break;
7019
7020 case MEM:
7021 /* If we have (xshiftrt (mem ...) C) and C is MODE_WIDTH
7022 minus the width of a smaller mode, we can do this with a
7023 SIGN_EXTEND or ZERO_EXTEND from the narrower memory location. */
7024 if ((code == ASHIFTRT || code == LSHIFTRT)
7025 && ! mode_dependent_address_p (XEXP (varop, 0))
7026 && ! MEM_VOLATILE_P (varop)
7027 && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
7028 MODE_INT, 1)) != BLKmode)
7029 {
7030#if BYTES_BIG_ENDIAN
7031 new = gen_rtx (MEM, tmode, XEXP (varop, 0));
7032#else
7033 new = gen_rtx (MEM, tmode,
7034 plus_constant (XEXP (varop, 0),
7035 count / BITS_PER_UNIT));
7036 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (varop);
7037 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (varop);
7038 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (varop);
7039#endif
7040 varop = gen_rtx_combine (code == ASHIFTRT ? SIGN_EXTEND
7041 : ZERO_EXTEND, mode, new);
7042 count = 0;
7043 continue;
7044 }
7045 break;
7046
7047 case USE:
7048 /* Similar to the case above, except that we can only do this if
7049 the resulting mode is the same as that of the underlying
7050 MEM and adjust the address depending on the *bits* endianness
7051 because of the way that bit-field extract insns are defined. */
7052 if ((code == ASHIFTRT || code == LSHIFTRT)
7053 && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
7054 MODE_INT, 1)) != BLKmode
7055 && tmode == GET_MODE (XEXP (varop, 0)))
7056 {
7057#if BITS_BIG_ENDIAN
7058 new = XEXP (varop, 0);
7059#else
7060 new = copy_rtx (XEXP (varop, 0));
7061 SUBST (XEXP (new, 0),
7062 plus_constant (XEXP (new, 0),
7063 count / BITS_PER_UNIT));
7064#endif
7065
7066 varop = gen_rtx_combine (code == ASHIFTRT ? SIGN_EXTEND
7067 : ZERO_EXTEND, mode, new);
7068 count = 0;
7069 continue;
7070 }
7071 break;
7072
7073 case SUBREG:
7074 /* If VAROP is a SUBREG, strip it as long as the inner operand has
7075 the same number of words as what we've seen so far. Then store
7076 the widest mode in MODE. */
f9e67232
RS
7077 if (subreg_lowpart_p (varop)
7078 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
7079 > GET_MODE_SIZE (GET_MODE (varop)))
230d793d
RS
7080 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
7081 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
7082 == mode_words))
7083 {
7084 varop = SUBREG_REG (varop);
7085 if (GET_MODE_SIZE (GET_MODE (varop)) > GET_MODE_SIZE (mode))
7086 mode = GET_MODE (varop);
7087 continue;
7088 }
7089 break;
7090
7091 case MULT:
7092 /* Some machines use MULT instead of ASHIFT because MULT
7093 is cheaper. But it is still better on those machines to
7094 merge two shifts into one. */
7095 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
7096 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
7097 {
7098 varop = gen_binary (ASHIFT, GET_MODE (varop), XEXP (varop, 0),
5f4f0e22 7099 GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));;
230d793d
RS
7100 continue;
7101 }
7102 break;
7103
7104 case UDIV:
7105 /* Similar, for when divides are cheaper. */
7106 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
7107 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
7108 {
7109 varop = gen_binary (LSHIFTRT, GET_MODE (varop), XEXP (varop, 0),
5f4f0e22 7110 GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));
230d793d
RS
7111 continue;
7112 }
7113 break;
7114
7115 case ASHIFTRT:
7116 /* If we are extracting just the sign bit of an arithmetic right
7117 shift, that shift is not needed. */
7118 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1)
7119 {
7120 varop = XEXP (varop, 0);
7121 continue;
7122 }
7123
7124 /* ... fall through ... */
7125
7126 case LSHIFTRT:
7127 case ASHIFT:
7128 case LSHIFT:
7129 case ROTATE:
7130 /* Here we have two nested shifts. The result is usually the
7131 AND of a new shift with a mask. We compute the result below. */
7132 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
7133 && INTVAL (XEXP (varop, 1)) >= 0
7134 && INTVAL (XEXP (varop, 1)) < GET_MODE_BITSIZE (GET_MODE (varop))
5f4f0e22
CH
7135 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
7136 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
230d793d
RS
7137 {
7138 enum rtx_code first_code = GET_CODE (varop);
7139 int first_count = INTVAL (XEXP (varop, 1));
5f4f0e22 7140 unsigned HOST_WIDE_INT mask;
230d793d
RS
7141 rtx mask_rtx;
7142 rtx inner;
7143
7144 if (first_code == LSHIFT)
7145 first_code = ASHIFT;
7146
7147 /* We have one common special case. We can't do any merging if
7148 the inner code is an ASHIFTRT of a smaller mode. However, if
7149 we have (ashift:M1 (subreg:M1 (ashiftrt:M2 FOO C1) 0) C2)
7150 with C2 == GET_MODE_BITSIZE (M1) - GET_MODE_BITSIZE (M2),
7151 we can convert it to
7152 (ashiftrt:M1 (ashift:M1 (and:M1 (subreg:M1 FOO 0 C2) C3) C1).
7153 This simplifies certain SIGN_EXTEND operations. */
7154 if (code == ASHIFT && first_code == ASHIFTRT
7155 && (GET_MODE_BITSIZE (result_mode)
7156 - GET_MODE_BITSIZE (GET_MODE (varop))) == count)
7157 {
7158 /* C3 has the low-order C1 bits zero. */
7159
5f4f0e22
CH
7160 mask = (GET_MODE_MASK (mode)
7161 & ~ (((HOST_WIDE_INT) 1 << first_count) - 1));
230d793d 7162
5f4f0e22 7163 varop = simplify_and_const_int (NULL_RTX, result_mode,
230d793d 7164 XEXP (varop, 0), mask);
5f4f0e22 7165 varop = simplify_shift_const (NULL_RTX, ASHIFT, result_mode,
230d793d
RS
7166 varop, count);
7167 count = first_count;
7168 code = ASHIFTRT;
7169 continue;
7170 }
7171
d0ab8cd3
RK
7172 /* If this was (ashiftrt (ashift foo C1) C2) and FOO has more
7173 than C1 high-order bits equal to the sign bit, we can convert
7174 this to either an ASHIFT or a ASHIFTRT depending on the
7175 two counts.
230d793d
RS
7176
7177 We cannot do this if VAROP's mode is not SHIFT_MODE. */
7178
7179 if (code == ASHIFTRT && first_code == ASHIFT
7180 && GET_MODE (varop) == shift_mode
d0ab8cd3
RK
7181 && (num_sign_bit_copies (XEXP (varop, 0), shift_mode)
7182 > first_count))
230d793d 7183 {
d0ab8cd3
RK
7184 count -= first_count;
7185 if (count < 0)
7186 count = - count, code = ASHIFT;
7187 varop = XEXP (varop, 0);
7188 continue;
230d793d
RS
7189 }
7190
7191 /* There are some cases we can't do. If CODE is ASHIFTRT,
7192 we can only do this if FIRST_CODE is also ASHIFTRT.
7193
7194 We can't do the case when CODE is ROTATE and FIRST_CODE is
7195 ASHIFTRT.
7196
7197 If the mode of this shift is not the mode of the outer shift,
7198 we can't do this if either shift is ASHIFTRT or ROTATE.
7199
7200 Finally, we can't do any of these if the mode is too wide
7201 unless the codes are the same.
7202
7203 Handle the case where the shift codes are the same
7204 first. */
7205
7206 if (code == first_code)
7207 {
7208 if (GET_MODE (varop) != result_mode
7209 && (code == ASHIFTRT || code == ROTATE))
7210 break;
7211
7212 count += first_count;
7213 varop = XEXP (varop, 0);
7214 continue;
7215 }
7216
7217 if (code == ASHIFTRT
7218 || (code == ROTATE && first_code == ASHIFTRT)
5f4f0e22 7219 || GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT
230d793d
RS
7220 || (GET_MODE (varop) != result_mode
7221 && (first_code == ASHIFTRT || first_code == ROTATE
7222 || code == ROTATE)))
7223 break;
7224
7225 /* To compute the mask to apply after the shift, shift the
951553af 7226 nonzero bits of the inner shift the same way the
230d793d
RS
7227 outer shift will. */
7228
951553af 7229 mask_rtx = GEN_INT (nonzero_bits (varop, GET_MODE (varop)));
230d793d
RS
7230
7231 mask_rtx
7232 = simplify_binary_operation (code, result_mode, mask_rtx,
5f4f0e22 7233 GEN_INT (count));
230d793d
RS
7234
7235 /* Give up if we can't compute an outer operation to use. */
7236 if (mask_rtx == 0
7237 || GET_CODE (mask_rtx) != CONST_INT
7238 || ! merge_outer_ops (&outer_op, &outer_const, AND,
7239 INTVAL (mask_rtx),
7240 result_mode, &complement_p))
7241 break;
7242
7243 /* If the shifts are in the same direction, we add the
7244 counts. Otherwise, we subtract them. */
7245 if ((code == ASHIFTRT || code == LSHIFTRT)
7246 == (first_code == ASHIFTRT || first_code == LSHIFTRT))
7247 count += first_count;
7248 else
7249 count -= first_count;
7250
7251 /* If COUNT is positive, the new shift is usually CODE,
7252 except for the two exceptions below, in which case it is
7253 FIRST_CODE. If the count is negative, FIRST_CODE should
7254 always be used */
7255 if (count > 0
7256 && ((first_code == ROTATE && code == ASHIFT)
7257 || (first_code == ASHIFTRT && code == LSHIFTRT)))
7258 code = first_code;
7259 else if (count < 0)
7260 code = first_code, count = - count;
7261
7262 varop = XEXP (varop, 0);
7263 continue;
7264 }
7265
7266 /* If we have (A << B << C) for any shift, we can convert this to
7267 (A << C << B). This wins if A is a constant. Only try this if
7268 B is not a constant. */
7269
7270 else if (GET_CODE (varop) == code
7271 && GET_CODE (XEXP (varop, 1)) != CONST_INT
7272 && 0 != (new
7273 = simplify_binary_operation (code, mode,
7274 XEXP (varop, 0),
5f4f0e22 7275 GEN_INT (count))))
230d793d
RS
7276 {
7277 varop = gen_rtx_combine (code, mode, new, XEXP (varop, 1));
7278 count = 0;
7279 continue;
7280 }
7281 break;
7282
7283 case NOT:
7284 /* Make this fit the case below. */
7285 varop = gen_rtx_combine (XOR, mode, XEXP (varop, 0),
5f4f0e22 7286 GEN_INT (GET_MODE_MASK (mode)));
230d793d
RS
7287 continue;
7288
7289 case IOR:
7290 case AND:
7291 case XOR:
7292 /* If we have (xshiftrt (ior (plus X (const_int -1)) X) C)
7293 with C the size of VAROP - 1 and the shift is logical if
7294 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
7295 we have an (le X 0) operation. If we have an arithmetic shift
7296 and STORE_FLAG_VALUE is 1 or we have a logical shift with
7297 STORE_FLAG_VALUE of -1, we have a (neg (le X 0)) operation. */
7298
7299 if (GET_CODE (varop) == IOR && GET_CODE (XEXP (varop, 0)) == PLUS
7300 && XEXP (XEXP (varop, 0), 1) == constm1_rtx
7301 && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
7302 && (code == LSHIFTRT || code == ASHIFTRT)
7303 && count == GET_MODE_BITSIZE (GET_MODE (varop)) - 1
7304 && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
7305 {
7306 count = 0;
7307 varop = gen_rtx_combine (LE, GET_MODE (varop), XEXP (varop, 1),
7308 const0_rtx);
7309
7310 if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
7311 varop = gen_rtx_combine (NEG, GET_MODE (varop), varop);
7312
7313 continue;
7314 }
7315
7316 /* If we have (shift (logical)), move the logical to the outside
7317 to allow it to possibly combine with another logical and the
7318 shift to combine with another shift. This also canonicalizes to
7319 what a ZERO_EXTRACT looks like. Also, some machines have
7320 (and (shift)) insns. */
7321
7322 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
7323 && (new = simplify_binary_operation (code, result_mode,
7324 XEXP (varop, 1),
5f4f0e22 7325 GEN_INT (count))) != 0
230d793d
RS
7326 && merge_outer_ops (&outer_op, &outer_const, GET_CODE (varop),
7327 INTVAL (new), result_mode, &complement_p))
7328 {
7329 varop = XEXP (varop, 0);
7330 continue;
7331 }
7332
7333 /* If we can't do that, try to simplify the shift in each arm of the
7334 logical expression, make a new logical expression, and apply
7335 the inverse distributive law. */
7336 {
5f4f0e22 7337 rtx lhs = simplify_shift_const (NULL_RTX, code, result_mode,
230d793d 7338 XEXP (varop, 0), count);
5f4f0e22 7339 rtx rhs = simplify_shift_const (NULL_RTX, code, result_mode,
230d793d
RS
7340 XEXP (varop, 1), count);
7341
7342 varop = gen_binary (GET_CODE (varop), result_mode, lhs, rhs);
7343 varop = apply_distributive_law (varop);
7344
7345 count = 0;
7346 }
7347 break;
7348
7349 case EQ:
7350 /* convert (lshift (eq FOO 0) C) to (xor FOO 1) if STORE_FLAG_VALUE
7351 says that the sign bit can be tested, FOO has mode MODE, C is
7352 GET_MODE_BITSIZE (MODE) - 1, and FOO has only the low-order bit
951553af 7353 may be nonzero. */
230d793d
RS
7354 if (code == LSHIFT
7355 && XEXP (varop, 1) == const0_rtx
7356 && GET_MODE (XEXP (varop, 0)) == result_mode
7357 && count == GET_MODE_BITSIZE (result_mode) - 1
5f4f0e22 7358 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
230d793d 7359 && ((STORE_FLAG_VALUE
5f4f0e22 7360 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (result_mode) - 1))))
951553af 7361 && nonzero_bits (XEXP (varop, 0), result_mode) == 1
5f4f0e22
CH
7362 && merge_outer_ops (&outer_op, &outer_const, XOR,
7363 (HOST_WIDE_INT) 1, result_mode,
7364 &complement_p))
230d793d
RS
7365 {
7366 varop = XEXP (varop, 0);
7367 count = 0;
7368 continue;
7369 }
7370 break;
7371
7372 case NEG:
d0ab8cd3
RK
7373 /* (lshiftrt (neg A) C) where A is either 0 or 1 and C is one less
7374 than the number of bits in the mode is equivalent to A. */
7375 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1
951553af 7376 && nonzero_bits (XEXP (varop, 0), result_mode) == 1)
230d793d 7377 {
d0ab8cd3 7378 varop = XEXP (varop, 0);
230d793d
RS
7379 count = 0;
7380 continue;
7381 }
7382
7383 /* NEG commutes with ASHIFT since it is multiplication. Move the
7384 NEG outside to allow shifts to combine. */
7385 if (code == ASHIFT
5f4f0e22
CH
7386 && merge_outer_ops (&outer_op, &outer_const, NEG,
7387 (HOST_WIDE_INT) 0, result_mode,
7388 &complement_p))
230d793d
RS
7389 {
7390 varop = XEXP (varop, 0);
7391 continue;
7392 }
7393 break;
7394
7395 case PLUS:
d0ab8cd3
RK
7396 /* (lshiftrt (plus A -1) C) where A is either 0 or 1 and C
7397 is one less than the number of bits in the mode is
7398 equivalent to (xor A 1). */
230d793d
RS
7399 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1
7400 && XEXP (varop, 1) == constm1_rtx
951553af 7401 && nonzero_bits (XEXP (varop, 0), result_mode) == 1
5f4f0e22
CH
7402 && merge_outer_ops (&outer_op, &outer_const, XOR,
7403 (HOST_WIDE_INT) 1, result_mode,
7404 &complement_p))
230d793d
RS
7405 {
7406 count = 0;
7407 varop = XEXP (varop, 0);
7408 continue;
7409 }
7410
3f508eca 7411 /* If we have (xshiftrt (plus FOO BAR) C), and the only bits
951553af 7412 that might be nonzero in BAR are those being shifted out and those
3f508eca
RK
7413 bits are known zero in FOO, we can replace the PLUS with FOO.
7414 Similarly in the other operand order. This code occurs when
7415 we are computing the size of a variable-size array. */
7416
7417 if ((code == ASHIFTRT || code == LSHIFTRT)
5f4f0e22 7418 && count < HOST_BITS_PER_WIDE_INT
951553af
RK
7419 && nonzero_bits (XEXP (varop, 1), result_mode) >> count == 0
7420 && (nonzero_bits (XEXP (varop, 1), result_mode)
7421 & nonzero_bits (XEXP (varop, 0), result_mode)) == 0)
3f508eca
RK
7422 {
7423 varop = XEXP (varop, 0);
7424 continue;
7425 }
7426 else if ((code == ASHIFTRT || code == LSHIFTRT)
5f4f0e22 7427 && count < HOST_BITS_PER_WIDE_INT
ac49a949 7428 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
951553af 7429 && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
3f508eca 7430 >> count)
951553af
RK
7431 && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
7432 & nonzero_bits (XEXP (varop, 1),
3f508eca
RK
7433 result_mode)))
7434 {
7435 varop = XEXP (varop, 1);
7436 continue;
7437 }
7438
230d793d
RS
7439 /* (ashift (plus foo C) N) is (plus (ashift foo N) C'). */
7440 if (code == ASHIFT
7441 && GET_CODE (XEXP (varop, 1)) == CONST_INT
7442 && (new = simplify_binary_operation (ASHIFT, result_mode,
7443 XEXP (varop, 1),
5f4f0e22 7444 GEN_INT (count))) != 0
230d793d
RS
7445 && merge_outer_ops (&outer_op, &outer_const, PLUS,
7446 INTVAL (new), result_mode, &complement_p))
7447 {
7448 varop = XEXP (varop, 0);
7449 continue;
7450 }
7451 break;
7452
7453 case MINUS:
7454 /* If we have (xshiftrt (minus (ashiftrt X C)) X) C)
7455 with C the size of VAROP - 1 and the shift is logical if
7456 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
7457 we have a (gt X 0) operation. If the shift is arithmetic with
7458 STORE_FLAG_VALUE of 1 or logical with STORE_FLAG_VALUE == -1,
7459 we have a (neg (gt X 0)) operation. */
7460
7461 if (GET_CODE (XEXP (varop, 0)) == ASHIFTRT
7462 && count == GET_MODE_BITSIZE (GET_MODE (varop)) - 1
7463 && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
7464 && (code == LSHIFTRT || code == ASHIFTRT)
7465 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
7466 && INTVAL (XEXP (XEXP (varop, 0), 1)) == count
7467 && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
7468 {
7469 count = 0;
7470 varop = gen_rtx_combine (GT, GET_MODE (varop), XEXP (varop, 1),
7471 const0_rtx);
7472
7473 if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
7474 varop = gen_rtx_combine (NEG, GET_MODE (varop), varop);
7475
7476 continue;
7477 }
7478 break;
7479 }
7480
7481 break;
7482 }
7483
7484 /* We need to determine what mode to do the shift in. If the shift is
7485 a ASHIFTRT or ROTATE, we must always do it in the mode it was originally
7486 done in. Otherwise, we can do it in MODE, the widest mode encountered.
7487 The code we care about is that of the shift that will actually be done,
7488 not the shift that was originally requested. */
7489 shift_mode = (code == ASHIFTRT || code == ROTATE ? result_mode : mode);
7490
7491 /* We have now finished analyzing the shift. The result should be
7492 a shift of type CODE with SHIFT_MODE shifting VAROP COUNT places. If
7493 OUTER_OP is non-NIL, it is an operation that needs to be applied
7494 to the result of the shift. OUTER_CONST is the relevant constant,
7495 but we must turn off all bits turned off in the shift.
7496
7497 If we were passed a value for X, see if we can use any pieces of
7498 it. If not, make new rtx. */
7499
7500 if (x && GET_RTX_CLASS (GET_CODE (x)) == '2'
7501 && GET_CODE (XEXP (x, 1)) == CONST_INT
7502 && INTVAL (XEXP (x, 1)) == count)
7503 const_rtx = XEXP (x, 1);
7504 else
5f4f0e22 7505 const_rtx = GEN_INT (count);
230d793d
RS
7506
7507 if (x && GET_CODE (XEXP (x, 0)) == SUBREG
7508 && GET_MODE (XEXP (x, 0)) == shift_mode
7509 && SUBREG_REG (XEXP (x, 0)) == varop)
7510 varop = XEXP (x, 0);
7511 else if (GET_MODE (varop) != shift_mode)
7512 varop = gen_lowpart_for_combine (shift_mode, varop);
7513
7514 /* If we can't make the SUBREG, try to return what we were given. */
7515 if (GET_CODE (varop) == CLOBBER)
7516 return x ? x : varop;
7517
7518 new = simplify_binary_operation (code, shift_mode, varop, const_rtx);
7519 if (new != 0)
7520 x = new;
7521 else
7522 {
7523 if (x == 0 || GET_CODE (x) != code || GET_MODE (x) != shift_mode)
7524 x = gen_rtx_combine (code, shift_mode, varop, const_rtx);
7525
7526 SUBST (XEXP (x, 0), varop);
7527 SUBST (XEXP (x, 1), const_rtx);
7528 }
7529
7530 /* If we were doing a LSHIFTRT in a wider mode than it was originally,
7531 turn off all the bits that the shift would have turned off. */
7532 if (orig_code == LSHIFTRT && result_mode != shift_mode)
5f4f0e22 7533 x = simplify_and_const_int (NULL_RTX, shift_mode, x,
230d793d
RS
7534 GET_MODE_MASK (result_mode) >> orig_count);
7535
7536 /* Do the remainder of the processing in RESULT_MODE. */
7537 x = gen_lowpart_for_combine (result_mode, x);
7538
7539 /* If COMPLEMENT_P is set, we have to complement X before doing the outer
7540 operation. */
7541 if (complement_p)
7542 x = gen_unary (NOT, result_mode, x);
7543
7544 if (outer_op != NIL)
7545 {
5f4f0e22 7546 if (GET_MODE_BITSIZE (result_mode) < HOST_BITS_PER_WIDE_INT)
230d793d
RS
7547 outer_const &= GET_MODE_MASK (result_mode);
7548
7549 if (outer_op == AND)
5f4f0e22 7550 x = simplify_and_const_int (NULL_RTX, result_mode, x, outer_const);
230d793d
RS
7551 else if (outer_op == SET)
7552 /* This means that we have determined that the result is
7553 equivalent to a constant. This should be rare. */
5f4f0e22 7554 x = GEN_INT (outer_const);
230d793d
RS
7555 else if (GET_RTX_CLASS (outer_op) == '1')
7556 x = gen_unary (outer_op, result_mode, x);
7557 else
5f4f0e22 7558 x = gen_binary (outer_op, result_mode, x, GEN_INT (outer_const));
230d793d
RS
7559 }
7560
7561 return x;
7562}
7563\f
7564/* Like recog, but we receive the address of a pointer to a new pattern.
7565 We try to match the rtx that the pointer points to.
7566 If that fails, we may try to modify or replace the pattern,
7567 storing the replacement into the same pointer object.
7568
7569 Modifications include deletion or addition of CLOBBERs.
7570
7571 PNOTES is a pointer to a location where any REG_UNUSED notes added for
7572 the CLOBBERs are placed.
7573
7574 The value is the final insn code from the pattern ultimately matched,
7575 or -1. */
7576
7577static int
7578recog_for_combine (pnewpat, insn, pnotes)
7579 rtx *pnewpat;
7580 rtx insn;
7581 rtx *pnotes;
7582{
7583 register rtx pat = *pnewpat;
7584 int insn_code_number;
7585 int num_clobbers_to_add = 0;
7586 int i;
7587 rtx notes = 0;
7588
7589 /* Is the result of combination a valid instruction? */
7590 insn_code_number = recog (pat, insn, &num_clobbers_to_add);
7591
7592 /* If it isn't, there is the possibility that we previously had an insn
7593 that clobbered some register as a side effect, but the combined
7594 insn doesn't need to do that. So try once more without the clobbers
7595 unless this represents an ASM insn. */
7596
7597 if (insn_code_number < 0 && ! check_asm_operands (pat)
7598 && GET_CODE (pat) == PARALLEL)
7599 {
7600 int pos;
7601
7602 for (pos = 0, i = 0; i < XVECLEN (pat, 0); i++)
7603 if (GET_CODE (XVECEXP (pat, 0, i)) != CLOBBER)
7604 {
7605 if (i != pos)
7606 SUBST (XVECEXP (pat, 0, pos), XVECEXP (pat, 0, i));
7607 pos++;
7608 }
7609
7610 SUBST_INT (XVECLEN (pat, 0), pos);
7611
7612 if (pos == 1)
7613 pat = XVECEXP (pat, 0, 0);
7614
7615 insn_code_number = recog (pat, insn, &num_clobbers_to_add);
7616 }
7617
7618 /* If we had any clobbers to add, make a new pattern than contains
7619 them. Then check to make sure that all of them are dead. */
7620 if (num_clobbers_to_add)
7621 {
7622 rtx newpat = gen_rtx (PARALLEL, VOIDmode,
7623 gen_rtvec (GET_CODE (pat) == PARALLEL
7624 ? XVECLEN (pat, 0) + num_clobbers_to_add
7625 : num_clobbers_to_add + 1));
7626
7627 if (GET_CODE (pat) == PARALLEL)
7628 for (i = 0; i < XVECLEN (pat, 0); i++)
7629 XVECEXP (newpat, 0, i) = XVECEXP (pat, 0, i);
7630 else
7631 XVECEXP (newpat, 0, 0) = pat;
7632
7633 add_clobbers (newpat, insn_code_number);
7634
7635 for (i = XVECLEN (newpat, 0) - num_clobbers_to_add;
7636 i < XVECLEN (newpat, 0); i++)
7637 {
7638 if (GET_CODE (XEXP (XVECEXP (newpat, 0, i), 0)) == REG
7639 && ! reg_dead_at_p (XEXP (XVECEXP (newpat, 0, i), 0), insn))
7640 return -1;
7641 notes = gen_rtx (EXPR_LIST, REG_UNUSED,
7642 XEXP (XVECEXP (newpat, 0, i), 0), notes);
7643 }
7644 pat = newpat;
7645 }
7646
7647 *pnewpat = pat;
7648 *pnotes = notes;
7649
7650 return insn_code_number;
7651}
7652\f
7653/* Like gen_lowpart but for use by combine. In combine it is not possible
7654 to create any new pseudoregs. However, it is safe to create
7655 invalid memory addresses, because combine will try to recognize
7656 them and all they will do is make the combine attempt fail.
7657
7658 If for some reason this cannot do its job, an rtx
7659 (clobber (const_int 0)) is returned.
7660 An insn containing that will not be recognized. */
7661
7662#undef gen_lowpart
7663
7664static rtx
7665gen_lowpart_for_combine (mode, x)
7666 enum machine_mode mode;
7667 register rtx x;
7668{
7669 rtx result;
7670
7671 if (GET_MODE (x) == mode)
7672 return x;
7673
eae957a8
RK
7674 /* We can only support MODE being wider than a word if X is a
7675 constant integer or has a mode the same size. */
7676
7677 if (GET_MODE_SIZE (mode) > UNITS_PER_WORD
7678 && ! ((GET_MODE (x) == VOIDmode
7679 && (GET_CODE (x) == CONST_INT
7680 || GET_CODE (x) == CONST_DOUBLE))
7681 || GET_MODE_SIZE (GET_MODE (x)) == GET_MODE_SIZE (mode)))
230d793d
RS
7682 return gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
7683
7684 /* X might be a paradoxical (subreg (mem)). In that case, gen_lowpart
7685 won't know what to do. So we will strip off the SUBREG here and
7686 process normally. */
7687 if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
7688 {
7689 x = SUBREG_REG (x);
7690 if (GET_MODE (x) == mode)
7691 return x;
7692 }
7693
7694 result = gen_lowpart_common (mode, x);
7695 if (result)
7696 return result;
7697
7698 if (GET_CODE (x) == MEM)
7699 {
7700 register int offset = 0;
7701 rtx new;
7702
7703 /* Refuse to work on a volatile memory ref or one with a mode-dependent
7704 address. */
7705 if (MEM_VOLATILE_P (x) || mode_dependent_address_p (XEXP (x, 0)))
7706 return gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
7707
7708 /* If we want to refer to something bigger than the original memref,
7709 generate a perverse subreg instead. That will force a reload
7710 of the original memref X. */
7711 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode))
7712 return gen_rtx (SUBREG, mode, x, 0);
7713
7714#if WORDS_BIG_ENDIAN
7715 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
7716 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
7717#endif
7718#if BYTES_BIG_ENDIAN
7719 /* Adjust the address so that the address-after-the-data
7720 is unchanged. */
7721 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
7722 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
7723#endif
7724 new = gen_rtx (MEM, mode, plus_constant (XEXP (x, 0), offset));
7725 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
7726 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
7727 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
7728 return new;
7729 }
7730
7731 /* If X is a comparison operator, rewrite it in a new mode. This
7732 probably won't match, but may allow further simplifications. */
7733 else if (GET_RTX_CLASS (GET_CODE (x)) == '<')
7734 return gen_rtx_combine (GET_CODE (x), mode, XEXP (x, 0), XEXP (x, 1));
7735
7736 /* If we couldn't simplify X any other way, just enclose it in a
7737 SUBREG. Normally, this SUBREG won't match, but some patterns may
a7c99304 7738 include an explicit SUBREG or we may simplify it further in combine. */
230d793d 7739 else
dfbe1b2f
RK
7740 {
7741 int word = 0;
7742
7743 if (WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD)
7744 word = ((GET_MODE_SIZE (GET_MODE (x))
7745 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD))
7746 / UNITS_PER_WORD);
7747 return gen_rtx (SUBREG, mode, x, word);
7748 }
230d793d
RS
7749}
7750\f
7751/* Make an rtx expression. This is a subset of gen_rtx and only supports
7752 expressions of 1, 2, or 3 operands, each of which are rtx expressions.
7753
7754 If the identical expression was previously in the insn (in the undobuf),
7755 it will be returned. Only if it is not found will a new expression
7756 be made. */
7757
7758/*VARARGS2*/
7759static rtx
7760gen_rtx_combine (va_alist)
7761 va_dcl
7762{
7763 va_list p;
7764 enum rtx_code code;
7765 enum machine_mode mode;
7766 int n_args;
7767 rtx args[3];
7768 int i, j;
7769 char *fmt;
7770 rtx rt;
7771
7772 va_start (p);
7773 code = va_arg (p, enum rtx_code);
7774 mode = va_arg (p, enum machine_mode);
7775 n_args = GET_RTX_LENGTH (code);
7776 fmt = GET_RTX_FORMAT (code);
7777
7778 if (n_args == 0 || n_args > 3)
7779 abort ();
7780
7781 /* Get each arg and verify that it is supposed to be an expression. */
7782 for (j = 0; j < n_args; j++)
7783 {
7784 if (*fmt++ != 'e')
7785 abort ();
7786
7787 args[j] = va_arg (p, rtx);
7788 }
7789
7790 /* See if this is in undobuf. Be sure we don't use objects that came
7791 from another insn; this could produce circular rtl structures. */
7792
7793 for (i = previous_num_undos; i < undobuf.num_undo; i++)
7794 if (!undobuf.undo[i].is_int
7c046e4e
RK
7795 && GET_CODE (undobuf.undo[i].old_contents.rtx) == code
7796 && GET_MODE (undobuf.undo[i].old_contents.rtx) == mode)
230d793d
RS
7797 {
7798 for (j = 0; j < n_args; j++)
7c046e4e 7799 if (XEXP (undobuf.undo[i].old_contents.rtx, j) != args[j])
230d793d
RS
7800 break;
7801
7802 if (j == n_args)
7c046e4e 7803 return undobuf.undo[i].old_contents.rtx;
230d793d
RS
7804 }
7805
7806 /* Otherwise make a new rtx. We know we have 1, 2, or 3 args.
7807 Use rtx_alloc instead of gen_rtx because it's faster on RISC. */
7808 rt = rtx_alloc (code);
7809 PUT_MODE (rt, mode);
7810 XEXP (rt, 0) = args[0];
7811 if (n_args > 1)
7812 {
7813 XEXP (rt, 1) = args[1];
7814 if (n_args > 2)
7815 XEXP (rt, 2) = args[2];
7816 }
7817 return rt;
7818}
7819
7820/* These routines make binary and unary operations by first seeing if they
7821 fold; if not, a new expression is allocated. */
7822
7823static rtx
7824gen_binary (code, mode, op0, op1)
7825 enum rtx_code code;
7826 enum machine_mode mode;
7827 rtx op0, op1;
7828{
7829 rtx result;
1a26b032
RK
7830 rtx tem;
7831
7832 if (GET_RTX_CLASS (code) == 'c'
7833 && (GET_CODE (op0) == CONST_INT
7834 || (CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)))
7835 tem = op0, op0 = op1, op1 = tem;
230d793d
RS
7836
7837 if (GET_RTX_CLASS (code) == '<')
7838 {
7839 enum machine_mode op_mode = GET_MODE (op0);
7840 if (op_mode == VOIDmode)
7841 op_mode = GET_MODE (op1);
7842 result = simplify_relational_operation (code, op_mode, op0, op1);
7843 }
7844 else
7845 result = simplify_binary_operation (code, mode, op0, op1);
7846
7847 if (result)
7848 return result;
7849
7850 /* Put complex operands first and constants second. */
7851 if (GET_RTX_CLASS (code) == 'c'
7852 && ((CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)
7853 || (GET_RTX_CLASS (GET_CODE (op0)) == 'o'
7854 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')
7855 || (GET_CODE (op0) == SUBREG
7856 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op0))) == 'o'
7857 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')))
7858 return gen_rtx_combine (code, mode, op1, op0);
7859
7860 return gen_rtx_combine (code, mode, op0, op1);
7861}
7862
7863static rtx
7864gen_unary (code, mode, op0)
7865 enum rtx_code code;
7866 enum machine_mode mode;
7867 rtx op0;
7868{
7869 rtx result = simplify_unary_operation (code, mode, op0, mode);
7870
7871 if (result)
7872 return result;
7873
7874 return gen_rtx_combine (code, mode, op0);
7875}
7876\f
7877/* Simplify a comparison between *POP0 and *POP1 where CODE is the
7878 comparison code that will be tested.
7879
7880 The result is a possibly different comparison code to use. *POP0 and
7881 *POP1 may be updated.
7882
7883 It is possible that we might detect that a comparison is either always
7884 true or always false. However, we do not perform general constant
5089e22e 7885 folding in combine, so this knowledge isn't useful. Such tautologies
230d793d
RS
7886 should have been detected earlier. Hence we ignore all such cases. */
7887
7888static enum rtx_code
7889simplify_comparison (code, pop0, pop1)
7890 enum rtx_code code;
7891 rtx *pop0;
7892 rtx *pop1;
7893{
7894 rtx op0 = *pop0;
7895 rtx op1 = *pop1;
7896 rtx tem, tem1;
7897 int i;
7898 enum machine_mode mode, tmode;
7899
7900 /* Try a few ways of applying the same transformation to both operands. */
7901 while (1)
7902 {
7903 /* If both operands are the same constant shift, see if we can ignore the
7904 shift. We can if the shift is a rotate or if the bits shifted out of
951553af 7905 this shift are known to be zero for both inputs and if the type of
230d793d
RS
7906 comparison is compatible with the shift. */
7907 if (GET_CODE (op0) == GET_CODE (op1)
5f4f0e22 7908 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
230d793d
RS
7909 && ((GET_CODE (op0) == ROTATE && (code == NE || code == EQ))
7910 || ((GET_CODE (op0) == LSHIFTRT
7911 || GET_CODE (op0) == ASHIFT || GET_CODE (op0) == LSHIFT)
7912 && (code != GT && code != LT && code != GE && code != LE))
7913 || (GET_CODE (op0) == ASHIFTRT
7914 && (code != GTU && code != LTU
7915 && code != GEU && code != GEU)))
7916 && GET_CODE (XEXP (op0, 1)) == CONST_INT
7917 && INTVAL (XEXP (op0, 1)) >= 0
5f4f0e22 7918 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
230d793d
RS
7919 && XEXP (op0, 1) == XEXP (op1, 1))
7920 {
7921 enum machine_mode mode = GET_MODE (op0);
5f4f0e22 7922 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
230d793d
RS
7923 int shift_count = INTVAL (XEXP (op0, 1));
7924
7925 if (GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFTRT)
7926 mask &= (mask >> shift_count) << shift_count;
7927 else if (GET_CODE (op0) == ASHIFT || GET_CODE (op0) == LSHIFT)
7928 mask = (mask & (mask << shift_count)) >> shift_count;
7929
951553af
RK
7930 if ((nonzero_bits (XEXP (op0, 0), mode) & ~ mask) == 0
7931 && (nonzero_bits (XEXP (op1, 0), mode) & ~ mask) == 0)
230d793d
RS
7932 op0 = XEXP (op0, 0), op1 = XEXP (op1, 0);
7933 else
7934 break;
7935 }
7936
7937 /* If both operands are AND's of a paradoxical SUBREG by constant, the
7938 SUBREGs are of the same mode, and, in both cases, the AND would
7939 be redundant if the comparison was done in the narrower mode,
7940 do the comparison in the narrower mode (e.g., we are AND'ing with 1
951553af
RK
7941 and the operand's possibly nonzero bits are 0xffffff01; in that case
7942 if we only care about QImode, we don't need the AND). This case
7943 occurs if the output mode of an scc insn is not SImode and
230d793d
RS
7944 STORE_FLAG_VALUE == 1 (e.g., the 386). */
7945
7946 else if (GET_CODE (op0) == AND && GET_CODE (op1) == AND
7947 && GET_CODE (XEXP (op0, 1)) == CONST_INT
7948 && GET_CODE (XEXP (op1, 1)) == CONST_INT
7949 && GET_CODE (XEXP (op0, 0)) == SUBREG
7950 && GET_CODE (XEXP (op1, 0)) == SUBREG
7951 && (GET_MODE_SIZE (GET_MODE (XEXP (op0, 0)))
7952 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (op0, 0)))))
7953 && (GET_MODE (SUBREG_REG (XEXP (op0, 0)))
7954 == GET_MODE (SUBREG_REG (XEXP (op1, 0))))
ac49a949
RS
7955 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (XEXP (op0, 0))))
7956 <= HOST_BITS_PER_WIDE_INT)
951553af 7957 && (nonzero_bits (SUBREG_REG (XEXP (op0, 0)),
230d793d
RS
7958 GET_MODE (SUBREG_REG (XEXP (op0, 0))))
7959 & ~ INTVAL (XEXP (op0, 1))) == 0
951553af 7960 && (nonzero_bits (SUBREG_REG (XEXP (op1, 0)),
230d793d
RS
7961 GET_MODE (SUBREG_REG (XEXP (op1, 0))))
7962 & ~ INTVAL (XEXP (op1, 1))) == 0)
7963 {
7964 op0 = SUBREG_REG (XEXP (op0, 0));
7965 op1 = SUBREG_REG (XEXP (op1, 0));
7966
7967 /* the resulting comparison is always unsigned since we masked off
7968 the original sign bit. */
7969 code = unsigned_condition (code);
7970 }
7971 else
7972 break;
7973 }
7974
7975 /* If the first operand is a constant, swap the operands and adjust the
7976 comparison code appropriately. */
7977 if (CONSTANT_P (op0))
7978 {
7979 tem = op0, op0 = op1, op1 = tem;
7980 code = swap_condition (code);
7981 }
7982
7983 /* We now enter a loop during which we will try to simplify the comparison.
7984 For the most part, we only are concerned with comparisons with zero,
7985 but some things may really be comparisons with zero but not start
7986 out looking that way. */
7987
7988 while (GET_CODE (op1) == CONST_INT)
7989 {
7990 enum machine_mode mode = GET_MODE (op0);
7991 int mode_width = GET_MODE_BITSIZE (mode);
5f4f0e22 7992 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
230d793d
RS
7993 int equality_comparison_p;
7994 int sign_bit_comparison_p;
7995 int unsigned_comparison_p;
5f4f0e22 7996 HOST_WIDE_INT const_op;
230d793d
RS
7997
7998 /* We only want to handle integral modes. This catches VOIDmode,
7999 CCmode, and the floating-point modes. An exception is that we
8000 can handle VOIDmode if OP0 is a COMPARE or a comparison
8001 operation. */
8002
8003 if (GET_MODE_CLASS (mode) != MODE_INT
8004 && ! (mode == VOIDmode
8005 && (GET_CODE (op0) == COMPARE
8006 || GET_RTX_CLASS (GET_CODE (op0)) == '<')))
8007 break;
8008
8009 /* Get the constant we are comparing against and turn off all bits
8010 not on in our mode. */
8011 const_op = INTVAL (op1);
5f4f0e22 8012 if (mode_width <= HOST_BITS_PER_WIDE_INT)
4803a34a 8013 const_op &= mask;
230d793d
RS
8014
8015 /* If we are comparing against a constant power of two and the value
951553af 8016 being compared can only have that single bit nonzero (e.g., it was
230d793d
RS
8017 `and'ed with that bit), we can replace this with a comparison
8018 with zero. */
8019 if (const_op
8020 && (code == EQ || code == NE || code == GE || code == GEU
8021 || code == LT || code == LTU)
5f4f0e22 8022 && mode_width <= HOST_BITS_PER_WIDE_INT
230d793d 8023 && exact_log2 (const_op) >= 0
951553af 8024 && nonzero_bits (op0, mode) == const_op)
230d793d
RS
8025 {
8026 code = (code == EQ || code == GE || code == GEU ? NE : EQ);
8027 op1 = const0_rtx, const_op = 0;
8028 }
8029
d0ab8cd3
RK
8030 /* Similarly, if we are comparing a value known to be either -1 or
8031 0 with -1, change it to the opposite comparison against zero. */
8032
8033 if (const_op == -1
8034 && (code == EQ || code == NE || code == GT || code == LE
8035 || code == GEU || code == LTU)
8036 && num_sign_bit_copies (op0, mode) == mode_width)
8037 {
8038 code = (code == EQ || code == LE || code == GEU ? NE : EQ);
8039 op1 = const0_rtx, const_op = 0;
8040 }
8041
230d793d 8042 /* Do some canonicalizations based on the comparison code. We prefer
4803a34a
RK
8043 comparisons against zero and then prefer equality comparisons.
8044 If we can reduce the size of a constant, we will do that too. */
230d793d
RS
8045
8046 switch (code)
8047 {
8048 case LT:
4803a34a
RK
8049 /* < C is equivalent to <= (C - 1) */
8050 if (const_op > 0)
230d793d 8051 {
4803a34a 8052 const_op -= 1;
5f4f0e22 8053 op1 = GEN_INT (const_op);
230d793d
RS
8054 code = LE;
8055 /* ... fall through to LE case below. */
8056 }
8057 else
8058 break;
8059
8060 case LE:
4803a34a
RK
8061 /* <= C is equivalent to < (C + 1); we do this for C < 0 */
8062 if (const_op < 0)
8063 {
8064 const_op += 1;
5f4f0e22 8065 op1 = GEN_INT (const_op);
4803a34a
RK
8066 code = LT;
8067 }
230d793d
RS
8068
8069 /* If we are doing a <= 0 comparison on a value known to have
8070 a zero sign bit, we can replace this with == 0. */
8071 else if (const_op == 0
5f4f0e22 8072 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 8073 && (nonzero_bits (op0, mode)
5f4f0e22 8074 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
230d793d
RS
8075 code = EQ;
8076 break;
8077
8078 case GE:
4803a34a
RK
8079 /* >= C is equivalent to > (C - 1). */
8080 if (const_op > 0)
230d793d 8081 {
4803a34a 8082 const_op -= 1;
5f4f0e22 8083 op1 = GEN_INT (const_op);
230d793d
RS
8084 code = GT;
8085 /* ... fall through to GT below. */
8086 }
8087 else
8088 break;
8089
8090 case GT:
4803a34a
RK
8091 /* > C is equivalent to >= (C + 1); we do this for C < 0*/
8092 if (const_op < 0)
8093 {
8094 const_op += 1;
5f4f0e22 8095 op1 = GEN_INT (const_op);
4803a34a
RK
8096 code = GE;
8097 }
230d793d
RS
8098
8099 /* If we are doing a > 0 comparison on a value known to have
8100 a zero sign bit, we can replace this with != 0. */
8101 else if (const_op == 0
5f4f0e22 8102 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 8103 && (nonzero_bits (op0, mode)
5f4f0e22 8104 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
230d793d
RS
8105 code = NE;
8106 break;
8107
230d793d 8108 case LTU:
4803a34a
RK
8109 /* < C is equivalent to <= (C - 1). */
8110 if (const_op > 0)
8111 {
8112 const_op -= 1;
5f4f0e22 8113 op1 = GEN_INT (const_op);
4803a34a
RK
8114 code = LEU;
8115 /* ... fall through ... */
8116 }
d0ab8cd3
RK
8117
8118 /* (unsigned) < 0x80000000 is equivalent to >= 0. */
8119 else if (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1))
8120 {
8121 const_op = 0, op1 = const0_rtx;
8122 code = GE;
8123 break;
8124 }
4803a34a
RK
8125 else
8126 break;
230d793d
RS
8127
8128 case LEU:
8129 /* unsigned <= 0 is equivalent to == 0 */
8130 if (const_op == 0)
8131 code = EQ;
d0ab8cd3
RK
8132
8133 /* (unsigned) <= 0x7fffffff is equivalent to >= 0. */
8134 else if (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1)
8135 {
8136 const_op = 0, op1 = const0_rtx;
8137 code = GE;
8138 }
230d793d
RS
8139 break;
8140
4803a34a
RK
8141 case GEU:
8142 /* >= C is equivalent to < (C - 1). */
8143 if (const_op > 1)
8144 {
8145 const_op -= 1;
5f4f0e22 8146 op1 = GEN_INT (const_op);
4803a34a
RK
8147 code = GTU;
8148 /* ... fall through ... */
8149 }
d0ab8cd3
RK
8150
8151 /* (unsigned) >= 0x80000000 is equivalent to < 0. */
8152 else if (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1))
8153 {
8154 const_op = 0, op1 = const0_rtx;
8155 code = LT;
8156 }
4803a34a
RK
8157 else
8158 break;
8159
230d793d
RS
8160 case GTU:
8161 /* unsigned > 0 is equivalent to != 0 */
8162 if (const_op == 0)
8163 code = NE;
d0ab8cd3
RK
8164
8165 /* (unsigned) > 0x7fffffff is equivalent to < 0. */
8166 else if (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1)
8167 {
8168 const_op = 0, op1 = const0_rtx;
8169 code = LT;
8170 }
230d793d
RS
8171 break;
8172 }
8173
8174 /* Compute some predicates to simplify code below. */
8175
8176 equality_comparison_p = (code == EQ || code == NE);
8177 sign_bit_comparison_p = ((code == LT || code == GE) && const_op == 0);
8178 unsigned_comparison_p = (code == LTU || code == LEU || code == GTU
8179 || code == LEU);
8180
8181 /* Now try cases based on the opcode of OP0. If none of the cases
8182 does a "continue", we exit this loop immediately after the
8183 switch. */
8184
8185 switch (GET_CODE (op0))
8186 {
8187 case ZERO_EXTRACT:
8188 /* If we are extracting a single bit from a variable position in
8189 a constant that has only a single bit set and are comparing it
8190 with zero, we can convert this into an equality comparison
8191 between the position and the location of the single bit. We can't
8192 do this if bit endian and we don't have an extzv since we then
8193 can't know what mode to use for the endianness adjustment. */
8194
8195#if ! BITS_BIG_ENDIAN || defined (HAVE_extzv)
8196 if (GET_CODE (XEXP (op0, 0)) == CONST_INT
8197 && XEXP (op0, 1) == const1_rtx
8198 && equality_comparison_p && const_op == 0
8199 && (i = exact_log2 (INTVAL (XEXP (op0, 0)))) >= 0)
8200 {
8201#if BITS_BIG_ENDIAN
8202 i = (GET_MODE_BITSIZE
8203 (insn_operand_mode[(int) CODE_FOR_extzv][1]) - 1 - i);
8204#endif
8205
8206 op0 = XEXP (op0, 2);
5f4f0e22 8207 op1 = GEN_INT (i);
230d793d
RS
8208 const_op = i;
8209
8210 /* Result is nonzero iff shift count is equal to I. */
8211 code = reverse_condition (code);
8212 continue;
8213 }
8214#endif
8215
8216 /* ... fall through ... */
8217
8218 case SIGN_EXTRACT:
8219 tem = expand_compound_operation (op0);
8220 if (tem != op0)
8221 {
8222 op0 = tem;
8223 continue;
8224 }
8225 break;
8226
8227 case NOT:
8228 /* If testing for equality, we can take the NOT of the constant. */
8229 if (equality_comparison_p
8230 && (tem = simplify_unary_operation (NOT, mode, op1, mode)) != 0)
8231 {
8232 op0 = XEXP (op0, 0);
8233 op1 = tem;
8234 continue;
8235 }
8236
8237 /* If just looking at the sign bit, reverse the sense of the
8238 comparison. */
8239 if (sign_bit_comparison_p)
8240 {
8241 op0 = XEXP (op0, 0);
8242 code = (code == GE ? LT : GE);
8243 continue;
8244 }
8245 break;
8246
8247 case NEG:
8248 /* If testing for equality, we can take the NEG of the constant. */
8249 if (equality_comparison_p
8250 && (tem = simplify_unary_operation (NEG, mode, op1, mode)) != 0)
8251 {
8252 op0 = XEXP (op0, 0);
8253 op1 = tem;
8254 continue;
8255 }
8256
8257 /* The remaining cases only apply to comparisons with zero. */
8258 if (const_op != 0)
8259 break;
8260
8261 /* When X is ABS or is known positive,
8262 (neg X) is < 0 if and only if X != 0. */
8263
8264 if (sign_bit_comparison_p
8265 && (GET_CODE (XEXP (op0, 0)) == ABS
5f4f0e22 8266 || (mode_width <= HOST_BITS_PER_WIDE_INT
951553af 8267 && (nonzero_bits (XEXP (op0, 0), mode)
5f4f0e22 8268 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)))
230d793d
RS
8269 {
8270 op0 = XEXP (op0, 0);
8271 code = (code == LT ? NE : EQ);
8272 continue;
8273 }
8274
3bed8141
RK
8275 /* If we have NEG of something whose two high-order bits are the
8276 same, we know that "(-a) < 0" is equivalent to "a > 0". */
8277 if (num_sign_bit_copies (op0, mode) >= 2)
230d793d
RS
8278 {
8279 op0 = XEXP (op0, 0);
8280 code = swap_condition (code);
8281 continue;
8282 }
8283 break;
8284
8285 case ROTATE:
8286 /* If we are testing equality and our count is a constant, we
8287 can perform the inverse operation on our RHS. */
8288 if (equality_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
8289 && (tem = simplify_binary_operation (ROTATERT, mode,
8290 op1, XEXP (op0, 1))) != 0)
8291 {
8292 op0 = XEXP (op0, 0);
8293 op1 = tem;
8294 continue;
8295 }
8296
8297 /* If we are doing a < 0 or >= 0 comparison, it means we are testing
8298 a particular bit. Convert it to an AND of a constant of that
8299 bit. This will be converted into a ZERO_EXTRACT. */
8300 if (const_op == 0 && sign_bit_comparison_p
8301 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5f4f0e22 8302 && mode_width <= HOST_BITS_PER_WIDE_INT)
230d793d 8303 {
5f4f0e22
CH
8304 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
8305 ((HOST_WIDE_INT) 1
8306 << (mode_width - 1
8307 - INTVAL (XEXP (op0, 1)))));
230d793d
RS
8308 code = (code == LT ? NE : EQ);
8309 continue;
8310 }
8311
8312 /* ... fall through ... */
8313
8314 case ABS:
8315 /* ABS is ignorable inside an equality comparison with zero. */
8316 if (const_op == 0 && equality_comparison_p)
8317 {
8318 op0 = XEXP (op0, 0);
8319 continue;
8320 }
8321 break;
8322
8323
8324 case SIGN_EXTEND:
8325 /* Can simplify (compare (zero/sign_extend FOO) CONST)
8326 to (compare FOO CONST) if CONST fits in FOO's mode and we
8327 are either testing inequality or have an unsigned comparison
8328 with ZERO_EXTEND or a signed comparison with SIGN_EXTEND. */
8329 if (! unsigned_comparison_p
8330 && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
5f4f0e22
CH
8331 <= HOST_BITS_PER_WIDE_INT)
8332 && ((unsigned HOST_WIDE_INT) const_op
8333 < (((HOST_WIDE_INT) 1
8334 << (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0))) - 1)))))
230d793d
RS
8335 {
8336 op0 = XEXP (op0, 0);
8337 continue;
8338 }
8339 break;
8340
8341 case SUBREG:
a687e897
RK
8342 /* Check for the case where we are comparing A - C1 with C2,
8343 both constants are smaller than 1/2 the maxium positive
8344 value in MODE, and the comparison is equality or unsigned.
8345 In that case, if A is either zero-extended to MODE or has
8346 sufficient sign bits so that the high-order bit in MODE
8347 is a copy of the sign in the inner mode, we can prove that it is
8348 safe to do the operation in the wider mode. This simplifies
8349 many range checks. */
8350
8351 if (mode_width <= HOST_BITS_PER_WIDE_INT
8352 && subreg_lowpart_p (op0)
8353 && GET_CODE (SUBREG_REG (op0)) == PLUS
8354 && GET_CODE (XEXP (SUBREG_REG (op0), 1)) == CONST_INT
8355 && INTVAL (XEXP (SUBREG_REG (op0), 1)) < 0
8356 && (- INTVAL (XEXP (SUBREG_REG (op0), 1))
8357 < GET_MODE_MASK (mode) / 2)
adb7a1cb 8358 && (unsigned HOST_WIDE_INT) const_op < GET_MODE_MASK (mode) / 2
951553af
RK
8359 && (0 == (nonzero_bits (XEXP (SUBREG_REG (op0), 0),
8360 GET_MODE (SUBREG_REG (op0)))
a687e897
RK
8361 & ~ GET_MODE_MASK (mode))
8362 || (num_sign_bit_copies (XEXP (SUBREG_REG (op0), 0),
8363 GET_MODE (SUBREG_REG (op0)))
8364 > (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
8365 - GET_MODE_BITSIZE (mode)))))
8366 {
8367 op0 = SUBREG_REG (op0);
8368 continue;
8369 }
8370
fe0cf571
RK
8371 /* If the inner mode is narrower and we are extracting the low part,
8372 we can treat the SUBREG as if it were a ZERO_EXTEND. */
8373 if (subreg_lowpart_p (op0)
89f1c7f2
RS
8374 && GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) < mode_width)
8375 /* Fall through */ ;
8376 else
230d793d
RS
8377 break;
8378
8379 /* ... fall through ... */
8380
8381 case ZERO_EXTEND:
8382 if ((unsigned_comparison_p || equality_comparison_p)
8383 && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
5f4f0e22
CH
8384 <= HOST_BITS_PER_WIDE_INT)
8385 && ((unsigned HOST_WIDE_INT) const_op
230d793d
RS
8386 < GET_MODE_MASK (GET_MODE (XEXP (op0, 0)))))
8387 {
8388 op0 = XEXP (op0, 0);
8389 continue;
8390 }
8391 break;
8392
8393 case PLUS:
20fdd649 8394 /* (eq (plus X A) B) -> (eq X (minus B A)). We can only do
5089e22e 8395 this for equality comparisons due to pathological cases involving
230d793d 8396 overflows. */
20fdd649
RK
8397 if (equality_comparison_p
8398 && 0 != (tem = simplify_binary_operation (MINUS, mode,
8399 op1, XEXP (op0, 1))))
230d793d
RS
8400 {
8401 op0 = XEXP (op0, 0);
8402 op1 = tem;
8403 continue;
8404 }
8405
8406 /* (plus (abs X) (const_int -1)) is < 0 if and only if X == 0. */
8407 if (const_op == 0 && XEXP (op0, 1) == constm1_rtx
8408 && GET_CODE (XEXP (op0, 0)) == ABS && sign_bit_comparison_p)
8409 {
8410 op0 = XEXP (XEXP (op0, 0), 0);
8411 code = (code == LT ? EQ : NE);
8412 continue;
8413 }
8414 break;
8415
8416 case MINUS:
20fdd649
RK
8417 /* (eq (minus A B) C) -> (eq A (plus B C)) or
8418 (eq B (minus A C)), whichever simplifies. We can only do
8419 this for equality comparisons due to pathological cases involving
8420 overflows. */
8421 if (equality_comparison_p
8422 && 0 != (tem = simplify_binary_operation (PLUS, mode,
8423 XEXP (op0, 1), op1)))
8424 {
8425 op0 = XEXP (op0, 0);
8426 op1 = tem;
8427 continue;
8428 }
8429
8430 if (equality_comparison_p
8431 && 0 != (tem = simplify_binary_operation (MINUS, mode,
8432 XEXP (op0, 0), op1)))
8433 {
8434 op0 = XEXP (op0, 1);
8435 op1 = tem;
8436 continue;
8437 }
8438
230d793d
RS
8439 /* The sign bit of (minus (ashiftrt X C) X), where C is the number
8440 of bits in X minus 1, is one iff X > 0. */
8441 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == ASHIFTRT
8442 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
8443 && INTVAL (XEXP (XEXP (op0, 0), 1)) == mode_width - 1
8444 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
8445 {
8446 op0 = XEXP (op0, 1);
8447 code = (code == GE ? LE : GT);
8448 continue;
8449 }
8450 break;
8451
8452 case XOR:
8453 /* (eq (xor A B) C) -> (eq A (xor B C)). This is a simplification
8454 if C is zero or B is a constant. */
8455 if (equality_comparison_p
8456 && 0 != (tem = simplify_binary_operation (XOR, mode,
8457 XEXP (op0, 1), op1)))
8458 {
8459 op0 = XEXP (op0, 0);
8460 op1 = tem;
8461 continue;
8462 }
8463 break;
8464
8465 case EQ: case NE:
8466 case LT: case LTU: case LE: case LEU:
8467 case GT: case GTU: case GE: case GEU:
8468 /* We can't do anything if OP0 is a condition code value, rather
8469 than an actual data value. */
8470 if (const_op != 0
8471#ifdef HAVE_cc0
8472 || XEXP (op0, 0) == cc0_rtx
8473#endif
8474 || GET_MODE_CLASS (GET_MODE (XEXP (op0, 0))) == MODE_CC)
8475 break;
8476
8477 /* Get the two operands being compared. */
8478 if (GET_CODE (XEXP (op0, 0)) == COMPARE)
8479 tem = XEXP (XEXP (op0, 0), 0), tem1 = XEXP (XEXP (op0, 0), 1);
8480 else
8481 tem = XEXP (op0, 0), tem1 = XEXP (op0, 1);
8482
8483 /* Check for the cases where we simply want the result of the
8484 earlier test or the opposite of that result. */
8485 if (code == NE
8486 || (code == EQ && reversible_comparison_p (op0))
5f4f0e22 8487 || (GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
3f508eca 8488 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
230d793d 8489 && (STORE_FLAG_VALUE
5f4f0e22
CH
8490 & (((HOST_WIDE_INT) 1
8491 << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
230d793d
RS
8492 && (code == LT
8493 || (code == GE && reversible_comparison_p (op0)))))
8494 {
8495 code = (code == LT || code == NE
8496 ? GET_CODE (op0) : reverse_condition (GET_CODE (op0)));
8497 op0 = tem, op1 = tem1;
8498 continue;
8499 }
8500 break;
8501
8502 case IOR:
8503 /* The sign bit of (ior (plus X (const_int -1)) X) is non-zero
8504 iff X <= 0. */
8505 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == PLUS
8506 && XEXP (XEXP (op0, 0), 1) == constm1_rtx
8507 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
8508 {
8509 op0 = XEXP (op0, 1);
8510 code = (code == GE ? GT : LE);
8511 continue;
8512 }
8513 break;
8514
8515 case AND:
8516 /* Convert (and (xshift 1 X) Y) to (and (lshiftrt Y X) 1). This
8517 will be converted to a ZERO_EXTRACT later. */
8518 if (const_op == 0 && equality_comparison_p
8519 && (GET_CODE (XEXP (op0, 0)) == ASHIFT
8520 || GET_CODE (XEXP (op0, 0)) == LSHIFT)
8521 && XEXP (XEXP (op0, 0), 0) == const1_rtx)
8522 {
8523 op0 = simplify_and_const_int
8524 (op0, mode, gen_rtx_combine (LSHIFTRT, mode,
8525 XEXP (op0, 1),
8526 XEXP (XEXP (op0, 0), 1)),
5f4f0e22 8527 (HOST_WIDE_INT) 1);
230d793d
RS
8528 continue;
8529 }
8530
8531 /* If we are comparing (and (lshiftrt X C1) C2) for equality with
8532 zero and X is a comparison and C1 and C2 describe only bits set
8533 in STORE_FLAG_VALUE, we can compare with X. */
8534 if (const_op == 0 && equality_comparison_p
5f4f0e22 8535 && mode_width <= HOST_BITS_PER_WIDE_INT
230d793d
RS
8536 && GET_CODE (XEXP (op0, 1)) == CONST_INT
8537 && GET_CODE (XEXP (op0, 0)) == LSHIFTRT
8538 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
8539 && INTVAL (XEXP (XEXP (op0, 0), 1)) >= 0
5f4f0e22 8540 && INTVAL (XEXP (XEXP (op0, 0), 1)) < HOST_BITS_PER_WIDE_INT)
230d793d
RS
8541 {
8542 mask = ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
8543 << INTVAL (XEXP (XEXP (op0, 0), 1)));
8544 if ((~ STORE_FLAG_VALUE & mask) == 0
8545 && (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (op0, 0), 0))) == '<'
8546 || ((tem = get_last_value (XEXP (XEXP (op0, 0), 0))) != 0
8547 && GET_RTX_CLASS (GET_CODE (tem)) == '<')))
8548 {
8549 op0 = XEXP (XEXP (op0, 0), 0);
8550 continue;
8551 }
8552 }
8553
8554 /* If we are doing an equality comparison of an AND of a bit equal
8555 to the sign bit, replace this with a LT or GE comparison of
8556 the underlying value. */
8557 if (equality_comparison_p
8558 && const_op == 0
8559 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5f4f0e22 8560 && mode_width <= HOST_BITS_PER_WIDE_INT
230d793d 8561 && ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
5f4f0e22 8562 == (HOST_WIDE_INT) 1 << (mode_width - 1)))
230d793d
RS
8563 {
8564 op0 = XEXP (op0, 0);
8565 code = (code == EQ ? GE : LT);
8566 continue;
8567 }
8568
8569 /* If this AND operation is really a ZERO_EXTEND from a narrower
8570 mode, the constant fits within that mode, and this is either an
8571 equality or unsigned comparison, try to do this comparison in
8572 the narrower mode. */
8573 if ((equality_comparison_p || unsigned_comparison_p)
8574 && GET_CODE (XEXP (op0, 1)) == CONST_INT
8575 && (i = exact_log2 ((INTVAL (XEXP (op0, 1))
8576 & GET_MODE_MASK (mode))
8577 + 1)) >= 0
8578 && const_op >> i == 0
8579 && (tmode = mode_for_size (i, MODE_INT, 1)) != BLKmode)
8580 {
8581 op0 = gen_lowpart_for_combine (tmode, XEXP (op0, 0));
8582 continue;
8583 }
8584 break;
8585
8586 case ASHIFT:
8587 case LSHIFT:
8588 /* If we have (compare (xshift FOO N) (const_int C)) and
8589 the high order N bits of FOO (N+1 if an inequality comparison)
951553af 8590 are known to be zero, we can do this by comparing FOO with C
230d793d
RS
8591 shifted right N bits so long as the low-order N bits of C are
8592 zero. */
8593 if (GET_CODE (XEXP (op0, 1)) == CONST_INT
8594 && INTVAL (XEXP (op0, 1)) >= 0
8595 && ((INTVAL (XEXP (op0, 1)) + ! equality_comparison_p)
5f4f0e22
CH
8596 < HOST_BITS_PER_WIDE_INT)
8597 && ((const_op
1a26b032 8598 & ((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1) == 0)
5f4f0e22 8599 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 8600 && (nonzero_bits (XEXP (op0, 0), mode)
230d793d
RS
8601 & ~ (mask >> (INTVAL (XEXP (op0, 1))
8602 + ! equality_comparison_p))) == 0)
8603 {
8604 const_op >>= INTVAL (XEXP (op0, 1));
5f4f0e22 8605 op1 = GEN_INT (const_op);
230d793d
RS
8606 op0 = XEXP (op0, 0);
8607 continue;
8608 }
8609
dfbe1b2f 8610 /* If we are doing a sign bit comparison, it means we are testing
230d793d 8611 a particular bit. Convert it to the appropriate AND. */
dfbe1b2f 8612 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
5f4f0e22 8613 && mode_width <= HOST_BITS_PER_WIDE_INT)
230d793d 8614 {
5f4f0e22
CH
8615 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
8616 ((HOST_WIDE_INT) 1
8617 << (mode_width - 1
8618 - INTVAL (XEXP (op0, 1)))));
230d793d
RS
8619 code = (code == LT ? NE : EQ);
8620 continue;
8621 }
dfbe1b2f
RK
8622
8623 /* If this an equality comparison with zero and we are shifting
8624 the low bit to the sign bit, we can convert this to an AND of the
8625 low-order bit. */
8626 if (const_op == 0 && equality_comparison_p
8627 && GET_CODE (XEXP (op0, 1)) == CONST_INT
8628 && INTVAL (XEXP (op0, 1)) == mode_width - 1)
8629 {
5f4f0e22
CH
8630 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
8631 (HOST_WIDE_INT) 1);
dfbe1b2f
RK
8632 continue;
8633 }
230d793d
RS
8634 break;
8635
8636 case ASHIFTRT:
d0ab8cd3
RK
8637 /* If this is an equality comparison with zero, we can do this
8638 as a logical shift, which might be much simpler. */
8639 if (equality_comparison_p && const_op == 0
8640 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
8641 {
8642 op0 = simplify_shift_const (NULL_RTX, LSHIFTRT, mode,
8643 XEXP (op0, 0),
8644 INTVAL (XEXP (op0, 1)));
8645 continue;
8646 }
8647
230d793d
RS
8648 /* If OP0 is a sign extension and CODE is not an unsigned comparison,
8649 do the comparison in a narrower mode. */
8650 if (! unsigned_comparison_p
8651 && GET_CODE (XEXP (op0, 1)) == CONST_INT
8652 && GET_CODE (XEXP (op0, 0)) == ASHIFT
8653 && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1)
8654 && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)),
22331794 8655 MODE_INT, 1)) != BLKmode
5f4f0e22
CH
8656 && ((unsigned HOST_WIDE_INT) const_op <= GET_MODE_MASK (tmode)
8657 || ((unsigned HOST_WIDE_INT) - const_op
8658 <= GET_MODE_MASK (tmode))))
230d793d
RS
8659 {
8660 op0 = gen_lowpart_for_combine (tmode, XEXP (XEXP (op0, 0), 0));
8661 continue;
8662 }
8663
8664 /* ... fall through ... */
8665 case LSHIFTRT:
8666 /* If we have (compare (xshiftrt FOO N) (const_int C)) and
951553af 8667 the low order N bits of FOO are known to be zero, we can do this
230d793d
RS
8668 by comparing FOO with C shifted left N bits so long as no
8669 overflow occurs. */
8670 if (GET_CODE (XEXP (op0, 1)) == CONST_INT
8671 && INTVAL (XEXP (op0, 1)) >= 0
5f4f0e22
CH
8672 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
8673 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 8674 && (nonzero_bits (XEXP (op0, 0), mode)
5f4f0e22 8675 & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0
230d793d
RS
8676 && (const_op == 0
8677 || (floor_log2 (const_op) + INTVAL (XEXP (op0, 1))
8678 < mode_width)))
8679 {
8680 const_op <<= INTVAL (XEXP (op0, 1));
5f4f0e22 8681 op1 = GEN_INT (const_op);
230d793d
RS
8682 op0 = XEXP (op0, 0);
8683 continue;
8684 }
8685
8686 /* If we are using this shift to extract just the sign bit, we
8687 can replace this with an LT or GE comparison. */
8688 if (const_op == 0
8689 && (equality_comparison_p || sign_bit_comparison_p)
8690 && GET_CODE (XEXP (op0, 1)) == CONST_INT
8691 && INTVAL (XEXP (op0, 1)) == mode_width - 1)
8692 {
8693 op0 = XEXP (op0, 0);
8694 code = (code == NE || code == GT ? LT : GE);
8695 continue;
8696 }
8697 break;
8698 }
8699
8700 break;
8701 }
8702
8703 /* Now make any compound operations involved in this comparison. Then,
8704 check for an outmost SUBREG on OP0 that isn't doing anything or is
8705 paradoxical. The latter case can only occur when it is known that the
8706 "extra" bits will be zero. Therefore, it is safe to remove the SUBREG.
8707 We can never remove a SUBREG for a non-equality comparison because the
8708 sign bit is in a different place in the underlying object. */
8709
8710 op0 = make_compound_operation (op0, op1 == const0_rtx ? COMPARE : SET);
8711 op1 = make_compound_operation (op1, SET);
8712
8713 if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
8714 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
8715 && (code == NE || code == EQ)
8716 && ((GET_MODE_SIZE (GET_MODE (op0))
8717 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0))))))
8718 {
8719 op0 = SUBREG_REG (op0);
8720 op1 = gen_lowpart_for_combine (GET_MODE (op0), op1);
8721 }
8722
8723 else if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
8724 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
8725 && (code == NE || code == EQ)
ac49a949
RS
8726 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
8727 <= HOST_BITS_PER_WIDE_INT)
951553af 8728 && (nonzero_bits (SUBREG_REG (op0), GET_MODE (SUBREG_REG (op0)))
230d793d
RS
8729 & ~ GET_MODE_MASK (GET_MODE (op0))) == 0
8730 && (tem = gen_lowpart_for_combine (GET_MODE (SUBREG_REG (op0)),
8731 op1),
951553af 8732 (nonzero_bits (tem, GET_MODE (SUBREG_REG (op0)))
230d793d
RS
8733 & ~ GET_MODE_MASK (GET_MODE (op0))) == 0))
8734 op0 = SUBREG_REG (op0), op1 = tem;
8735
8736 /* We now do the opposite procedure: Some machines don't have compare
8737 insns in all modes. If OP0's mode is an integer mode smaller than a
8738 word and we can't do a compare in that mode, see if there is a larger
a687e897
RK
8739 mode for which we can do the compare. There are a number of cases in
8740 which we can use the wider mode. */
230d793d
RS
8741
8742 mode = GET_MODE (op0);
8743 if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT
8744 && GET_MODE_SIZE (mode) < UNITS_PER_WORD
8745 && cmp_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
8746 for (tmode = GET_MODE_WIDER_MODE (mode);
5f4f0e22
CH
8747 (tmode != VOIDmode
8748 && GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT);
230d793d 8749 tmode = GET_MODE_WIDER_MODE (tmode))
a687e897 8750 if (cmp_optab->handlers[(int) tmode].insn_code != CODE_FOR_nothing)
230d793d 8751 {
951553af 8752 /* If the only nonzero bits in OP0 and OP1 are those in the
a687e897
RK
8753 narrower mode and this is an equality or unsigned comparison,
8754 we can use the wider mode. Similarly for sign-extended
8755 values and equality or signed comparisons. */
8756 if (((code == EQ || code == NE
8757 || code == GEU || code == GTU || code == LEU || code == LTU)
951553af
RK
8758 && (nonzero_bits (op0, tmode) & ~ GET_MODE_MASK (mode)) == 0
8759 && (nonzero_bits (op1, tmode) & ~ GET_MODE_MASK (mode)) == 0)
a687e897
RK
8760 || ((code == EQ || code == NE
8761 || code == GE || code == GT || code == LE || code == LT)
8762 && (num_sign_bit_copies (op0, tmode)
58744483 8763 > GET_MODE_BITSIZE (tmode) - GET_MODE_BITSIZE (mode))
a687e897 8764 && (num_sign_bit_copies (op1, tmode)
58744483 8765 > GET_MODE_BITSIZE (tmode) - GET_MODE_BITSIZE (mode))))
a687e897
RK
8766 {
8767 op0 = gen_lowpart_for_combine (tmode, op0);
8768 op1 = gen_lowpart_for_combine (tmode, op1);
8769 break;
8770 }
230d793d 8771
a687e897
RK
8772 /* If this is a test for negative, we can make an explicit
8773 test of the sign bit. */
8774
8775 if (op1 == const0_rtx && (code == LT || code == GE)
8776 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
230d793d 8777 {
a687e897
RK
8778 op0 = gen_binary (AND, tmode,
8779 gen_lowpart_for_combine (tmode, op0),
5f4f0e22
CH
8780 GEN_INT ((HOST_WIDE_INT) 1
8781 << (GET_MODE_BITSIZE (mode) - 1)));
230d793d 8782 code = (code == LT) ? NE : EQ;
a687e897 8783 break;
230d793d 8784 }
230d793d
RS
8785 }
8786
8787 *pop0 = op0;
8788 *pop1 = op1;
8789
8790 return code;
8791}
8792\f
8793/* Return 1 if we know that X, a comparison operation, is not operating
8794 on a floating-point value or is EQ or NE, meaning that we can safely
8795 reverse it. */
8796
8797static int
8798reversible_comparison_p (x)
8799 rtx x;
8800{
8801 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
8802 || GET_CODE (x) == NE || GET_CODE (x) == EQ)
8803 return 1;
8804
8805 switch (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))))
8806 {
8807 case MODE_INT:
8808 return 1;
8809
8810 case MODE_CC:
8811 x = get_last_value (XEXP (x, 0));
8812 return (x && GET_CODE (x) == COMPARE
8813 && GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) == MODE_INT);
8814 }
8815
8816 return 0;
8817}
8818\f
8819/* Utility function for following routine. Called when X is part of a value
8820 being stored into reg_last_set_value. Sets reg_last_set_table_tick
8821 for each register mentioned. Similar to mention_regs in cse.c */
8822
8823static void
8824update_table_tick (x)
8825 rtx x;
8826{
8827 register enum rtx_code code = GET_CODE (x);
8828 register char *fmt = GET_RTX_FORMAT (code);
8829 register int i;
8830
8831 if (code == REG)
8832 {
8833 int regno = REGNO (x);
8834 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
8835 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
8836
8837 for (i = regno; i < endregno; i++)
8838 reg_last_set_table_tick[i] = label_tick;
8839
8840 return;
8841 }
8842
8843 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
8844 /* Note that we can't have an "E" in values stored; see
8845 get_last_value_validate. */
8846 if (fmt[i] == 'e')
8847 update_table_tick (XEXP (x, i));
8848}
8849
8850/* Record that REG is set to VALUE in insn INSN. If VALUE is zero, we
8851 are saying that the register is clobbered and we no longer know its
7988fd36
RK
8852 value. If INSN is zero, don't update reg_last_set; this is only permitted
8853 with VALUE also zero and is used to invalidate the register. */
230d793d
RS
8854
8855static void
8856record_value_for_reg (reg, insn, value)
8857 rtx reg;
8858 rtx insn;
8859 rtx value;
8860{
8861 int regno = REGNO (reg);
8862 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
8863 ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1);
8864 int i;
8865
8866 /* If VALUE contains REG and we have a previous value for REG, substitute
8867 the previous value. */
8868 if (value && insn && reg_overlap_mentioned_p (reg, value))
8869 {
8870 rtx tem;
8871
8872 /* Set things up so get_last_value is allowed to see anything set up to
8873 our insn. */
8874 subst_low_cuid = INSN_CUID (insn);
8875 tem = get_last_value (reg);
8876
8877 if (tem)
8878 value = replace_rtx (copy_rtx (value), reg, tem);
8879 }
8880
8881 /* For each register modified, show we don't know its value, that
8882 its value has been updated, and that we don't know the location of
8883 the death of the register. */
8884 for (i = regno; i < endregno; i ++)
8885 {
8886 if (insn)
8887 reg_last_set[i] = insn;
8888 reg_last_set_value[i] = 0;
8889 reg_last_death[i] = 0;
8890 }
8891
8892 /* Mark registers that are being referenced in this value. */
8893 if (value)
8894 update_table_tick (value);
8895
8896 /* Now update the status of each register being set.
8897 If someone is using this register in this block, set this register
8898 to invalid since we will get confused between the two lives in this
8899 basic block. This makes using this register always invalid. In cse, we
8900 scan the table to invalidate all entries using this register, but this
8901 is too much work for us. */
8902
8903 for (i = regno; i < endregno; i++)
8904 {
8905 reg_last_set_label[i] = label_tick;
8906 if (value && reg_last_set_table_tick[i] == label_tick)
8907 reg_last_set_invalid[i] = 1;
8908 else
8909 reg_last_set_invalid[i] = 0;
8910 }
8911
8912 /* The value being assigned might refer to X (like in "x++;"). In that
8913 case, we must replace it with (clobber (const_int 0)) to prevent
8914 infinite loops. */
8915 if (value && ! get_last_value_validate (&value,
8916 reg_last_set_label[regno], 0))
8917 {
8918 value = copy_rtx (value);
8919 if (! get_last_value_validate (&value, reg_last_set_label[regno], 1))
8920 value = 0;
8921 }
8922
55310dad
RK
8923 /* For the main register being modified, update the value, the mode, the
8924 nonzero bits, and the number of sign bit copies. */
8925
230d793d
RS
8926 reg_last_set_value[regno] = value;
8927
55310dad
RK
8928 if (value)
8929 {
2afabb48 8930 subst_low_cuid = INSN_CUID (insn);
55310dad
RK
8931 reg_last_set_mode[regno] = GET_MODE (reg);
8932 reg_last_set_nonzero_bits[regno] = nonzero_bits (value, GET_MODE (reg));
8933 reg_last_set_sign_bit_copies[regno]
8934 = num_sign_bit_copies (value, GET_MODE (reg));
8935 }
230d793d
RS
8936}
8937
8938/* Used for communication between the following two routines. */
8939static rtx record_dead_insn;
8940
8941/* Called via note_stores from record_dead_and_set_regs to handle one
8942 SET or CLOBBER in an insn. */
8943
8944static void
8945record_dead_and_set_regs_1 (dest, setter)
8946 rtx dest, setter;
8947{
8948 if (GET_CODE (dest) == REG)
8949 {
8950 /* If we are setting the whole register, we know its value. Otherwise
8951 show that we don't know the value. We can handle SUBREG in
8952 some cases. */
8953 if (GET_CODE (setter) == SET && dest == SET_DEST (setter))
8954 record_value_for_reg (dest, record_dead_insn, SET_SRC (setter));
8955 else if (GET_CODE (setter) == SET
8956 && GET_CODE (SET_DEST (setter)) == SUBREG
8957 && SUBREG_REG (SET_DEST (setter)) == dest
8958 && subreg_lowpart_p (SET_DEST (setter)))
d0ab8cd3
RK
8959 record_value_for_reg (dest, record_dead_insn,
8960 gen_lowpart_for_combine (GET_MODE (dest),
8961 SET_SRC (setter)));
230d793d 8962 else
5f4f0e22 8963 record_value_for_reg (dest, record_dead_insn, NULL_RTX);
230d793d
RS
8964 }
8965 else if (GET_CODE (dest) == MEM
8966 /* Ignore pushes, they clobber nothing. */
8967 && ! push_operand (dest, GET_MODE (dest)))
8968 mem_last_set = INSN_CUID (record_dead_insn);
8969}
8970
8971/* Update the records of when each REG was most recently set or killed
8972 for the things done by INSN. This is the last thing done in processing
8973 INSN in the combiner loop.
8974
8975 We update reg_last_set, reg_last_set_value, reg_last_death, and also the
8976 similar information mem_last_set (which insn most recently modified memory)
8977 and last_call_cuid (which insn was the most recent subroutine call). */
8978
8979static void
8980record_dead_and_set_regs (insn)
8981 rtx insn;
8982{
8983 register rtx link;
55310dad
RK
8984 int i;
8985
230d793d
RS
8986 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
8987 {
dbc131f3
RK
8988 if (REG_NOTE_KIND (link) == REG_DEAD
8989 && GET_CODE (XEXP (link, 0)) == REG)
8990 {
8991 int regno = REGNO (XEXP (link, 0));
8992 int endregno
8993 = regno + (regno < FIRST_PSEUDO_REGISTER
8994 ? HARD_REGNO_NREGS (regno, GET_MODE (XEXP (link, 0)))
8995 : 1);
dbc131f3
RK
8996
8997 for (i = regno; i < endregno; i++)
8998 reg_last_death[i] = insn;
8999 }
230d793d 9000 else if (REG_NOTE_KIND (link) == REG_INC)
5f4f0e22 9001 record_value_for_reg (XEXP (link, 0), insn, NULL_RTX);
230d793d
RS
9002 }
9003
9004 if (GET_CODE (insn) == CALL_INSN)
55310dad
RK
9005 {
9006 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
9007 if (call_used_regs[i])
9008 {
9009 reg_last_set_value[i] = 0;
9010 reg_last_death[i] = 0;
9011 }
9012
9013 last_call_cuid = mem_last_set = INSN_CUID (insn);
9014 }
230d793d
RS
9015
9016 record_dead_insn = insn;
9017 note_stores (PATTERN (insn), record_dead_and_set_regs_1);
9018}
9019\f
9020/* Utility routine for the following function. Verify that all the registers
9021 mentioned in *LOC are valid when *LOC was part of a value set when
9022 label_tick == TICK. Return 0 if some are not.
9023
9024 If REPLACE is non-zero, replace the invalid reference with
9025 (clobber (const_int 0)) and return 1. This replacement is useful because
9026 we often can get useful information about the form of a value (e.g., if
9027 it was produced by a shift that always produces -1 or 0) even though
9028 we don't know exactly what registers it was produced from. */
9029
9030static int
9031get_last_value_validate (loc, tick, replace)
9032 rtx *loc;
9033 int tick;
9034 int replace;
9035{
9036 rtx x = *loc;
9037 char *fmt = GET_RTX_FORMAT (GET_CODE (x));
9038 int len = GET_RTX_LENGTH (GET_CODE (x));
9039 int i;
9040
9041 if (GET_CODE (x) == REG)
9042 {
9043 int regno = REGNO (x);
9044 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
9045 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
9046 int j;
9047
9048 for (j = regno; j < endregno; j++)
9049 if (reg_last_set_invalid[j]
9050 /* If this is a pseudo-register that was only set once, it is
9051 always valid. */
9052 || (! (regno >= FIRST_PSEUDO_REGISTER && reg_n_sets[regno] == 1)
9053 && reg_last_set_label[j] > tick))
9054 {
9055 if (replace)
9056 *loc = gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
9057 return replace;
9058 }
9059
9060 return 1;
9061 }
9062
9063 for (i = 0; i < len; i++)
9064 if ((fmt[i] == 'e'
9065 && get_last_value_validate (&XEXP (x, i), tick, replace) == 0)
9066 /* Don't bother with these. They shouldn't occur anyway. */
9067 || fmt[i] == 'E')
9068 return 0;
9069
9070 /* If we haven't found a reason for it to be invalid, it is valid. */
9071 return 1;
9072}
9073
9074/* Get the last value assigned to X, if known. Some registers
9075 in the value may be replaced with (clobber (const_int 0)) if their value
9076 is known longer known reliably. */
9077
9078static rtx
9079get_last_value (x)
9080 rtx x;
9081{
9082 int regno;
9083 rtx value;
9084
9085 /* If this is a non-paradoxical SUBREG, get the value of its operand and
9086 then convert it to the desired mode. If this is a paradoxical SUBREG,
9087 we cannot predict what values the "extra" bits might have. */
9088 if (GET_CODE (x) == SUBREG
9089 && subreg_lowpart_p (x)
9090 && (GET_MODE_SIZE (GET_MODE (x))
9091 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
9092 && (value = get_last_value (SUBREG_REG (x))) != 0)
9093 return gen_lowpart_for_combine (GET_MODE (x), value);
9094
9095 if (GET_CODE (x) != REG)
9096 return 0;
9097
9098 regno = REGNO (x);
9099 value = reg_last_set_value[regno];
9100
d0ab8cd3 9101 /* If we don't have a value or if it isn't for this basic block, return 0. */
230d793d
RS
9102
9103 if (value == 0
9104 || (reg_n_sets[regno] != 1
55310dad 9105 && reg_last_set_label[regno] != label_tick))
230d793d
RS
9106 return 0;
9107
d0ab8cd3 9108 /* If the value was set in a later insn that the ones we are processing,
4090a6b3
RK
9109 we can't use it even if the register was only set once, but make a quick
9110 check to see if the previous insn set it to something. This is commonly
9111 the case when the same pseudo is used by repeated insns. */
d0ab8cd3 9112
4090a6b3 9113 if (INSN_CUID (reg_last_set[regno]) >= subst_low_cuid)
d0ab8cd3
RK
9114 {
9115 rtx insn, set;
9116
2fc9c644 9117 for (insn = prev_nonnote_insn (subst_insn);
d0ab8cd3 9118 insn && INSN_CUID (insn) >= subst_low_cuid;
2fc9c644 9119 insn = prev_nonnote_insn (insn))
d0ab8cd3
RK
9120 ;
9121
9122 if (insn
9123 && (set = single_set (insn)) != 0
9124 && rtx_equal_p (SET_DEST (set), x))
9125 {
9126 value = SET_SRC (set);
9127
9128 /* Make sure that VALUE doesn't reference X. Replace any
9129 expliit references with a CLOBBER. If there are any remaining
9130 references (rare), don't use the value. */
9131
9132 if (reg_mentioned_p (x, value))
9133 value = replace_rtx (copy_rtx (value), x,
9134 gen_rtx (CLOBBER, GET_MODE (x), const0_rtx));
9135
9136 if (reg_overlap_mentioned_p (x, value))
9137 return 0;
9138 }
9139 else
9140 return 0;
9141 }
9142
9143 /* If the value has all its registers valid, return it. */
230d793d
RS
9144 if (get_last_value_validate (&value, reg_last_set_label[regno], 0))
9145 return value;
9146
9147 /* Otherwise, make a copy and replace any invalid register with
9148 (clobber (const_int 0)). If that fails for some reason, return 0. */
9149
9150 value = copy_rtx (value);
9151 if (get_last_value_validate (&value, reg_last_set_label[regno], 1))
9152 return value;
9153
9154 return 0;
9155}
9156\f
9157/* Return nonzero if expression X refers to a REG or to memory
9158 that is set in an instruction more recent than FROM_CUID. */
9159
9160static int
9161use_crosses_set_p (x, from_cuid)
9162 register rtx x;
9163 int from_cuid;
9164{
9165 register char *fmt;
9166 register int i;
9167 register enum rtx_code code = GET_CODE (x);
9168
9169 if (code == REG)
9170 {
9171 register int regno = REGNO (x);
9172#ifdef PUSH_ROUNDING
9173 /* Don't allow uses of the stack pointer to be moved,
9174 because we don't know whether the move crosses a push insn. */
9175 if (regno == STACK_POINTER_REGNUM)
9176 return 1;
9177#endif
9178 return (reg_last_set[regno]
9179 && INSN_CUID (reg_last_set[regno]) > from_cuid);
9180 }
9181
9182 if (code == MEM && mem_last_set > from_cuid)
9183 return 1;
9184
9185 fmt = GET_RTX_FORMAT (code);
9186
9187 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
9188 {
9189 if (fmt[i] == 'E')
9190 {
9191 register int j;
9192 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
9193 if (use_crosses_set_p (XVECEXP (x, i, j), from_cuid))
9194 return 1;
9195 }
9196 else if (fmt[i] == 'e'
9197 && use_crosses_set_p (XEXP (x, i), from_cuid))
9198 return 1;
9199 }
9200 return 0;
9201}
9202\f
9203/* Define three variables used for communication between the following
9204 routines. */
9205
9206static int reg_dead_regno, reg_dead_endregno;
9207static int reg_dead_flag;
9208
9209/* Function called via note_stores from reg_dead_at_p.
9210
9211 If DEST is within [reg_dead_rengno, reg_dead_endregno), set
9212 reg_dead_flag to 1 if X is a CLOBBER and to -1 it is a SET. */
9213
9214static void
9215reg_dead_at_p_1 (dest, x)
9216 rtx dest;
9217 rtx x;
9218{
9219 int regno, endregno;
9220
9221 if (GET_CODE (dest) != REG)
9222 return;
9223
9224 regno = REGNO (dest);
9225 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
9226 ? HARD_REGNO_NREGS (regno, GET_MODE (dest)) : 1);
9227
9228 if (reg_dead_endregno > regno && reg_dead_regno < endregno)
9229 reg_dead_flag = (GET_CODE (x) == CLOBBER) ? 1 : -1;
9230}
9231
9232/* Return non-zero if REG is known to be dead at INSN.
9233
9234 We scan backwards from INSN. If we hit a REG_DEAD note or a CLOBBER
9235 referencing REG, it is dead. If we hit a SET referencing REG, it is
9236 live. Otherwise, see if it is live or dead at the start of the basic
9237 block we are in. */
9238
9239static int
9240reg_dead_at_p (reg, insn)
9241 rtx reg;
9242 rtx insn;
9243{
9244 int block, i;
9245
9246 /* Set variables for reg_dead_at_p_1. */
9247 reg_dead_regno = REGNO (reg);
9248 reg_dead_endregno = reg_dead_regno + (reg_dead_regno < FIRST_PSEUDO_REGISTER
9249 ? HARD_REGNO_NREGS (reg_dead_regno,
9250 GET_MODE (reg))
9251 : 1);
9252
9253 reg_dead_flag = 0;
9254
9255 /* Scan backwards until we find a REG_DEAD note, SET, CLOBBER, label, or
9256 beginning of function. */
9257 for (; insn && GET_CODE (insn) != CODE_LABEL;
9258 insn = prev_nonnote_insn (insn))
9259 {
9260 note_stores (PATTERN (insn), reg_dead_at_p_1);
9261 if (reg_dead_flag)
9262 return reg_dead_flag == 1 ? 1 : 0;
9263
9264 if (find_regno_note (insn, REG_DEAD, reg_dead_regno))
9265 return 1;
9266 }
9267
9268 /* Get the basic block number that we were in. */
9269 if (insn == 0)
9270 block = 0;
9271 else
9272 {
9273 for (block = 0; block < n_basic_blocks; block++)
9274 if (insn == basic_block_head[block])
9275 break;
9276
9277 if (block == n_basic_blocks)
9278 return 0;
9279 }
9280
9281 for (i = reg_dead_regno; i < reg_dead_endregno; i++)
5f4f0e22
CH
9282 if (basic_block_live_at_start[block][i / REGSET_ELT_BITS]
9283 & ((REGSET_ELT_TYPE) 1 << (i % REGSET_ELT_BITS)))
230d793d
RS
9284 return 0;
9285
9286 return 1;
9287}
9288\f
9289/* Remove register number REGNO from the dead registers list of INSN.
9290
9291 Return the note used to record the death, if there was one. */
9292
9293rtx
9294remove_death (regno, insn)
9295 int regno;
9296 rtx insn;
9297{
9298 register rtx note = find_regno_note (insn, REG_DEAD, regno);
9299
9300 if (note)
1a26b032
RK
9301 {
9302 reg_n_deaths[regno]--;
9303 remove_note (insn, note);
9304 }
230d793d
RS
9305
9306 return note;
9307}
9308
9309/* For each register (hardware or pseudo) used within expression X, if its
9310 death is in an instruction with cuid between FROM_CUID (inclusive) and
9311 TO_INSN (exclusive), put a REG_DEAD note for that register in the
9312 list headed by PNOTES.
9313
9314 This is done when X is being merged by combination into TO_INSN. These
9315 notes will then be distributed as needed. */
9316
9317static void
9318move_deaths (x, from_cuid, to_insn, pnotes)
9319 rtx x;
9320 int from_cuid;
9321 rtx to_insn;
9322 rtx *pnotes;
9323{
9324 register char *fmt;
9325 register int len, i;
9326 register enum rtx_code code = GET_CODE (x);
9327
9328 if (code == REG)
9329 {
9330 register int regno = REGNO (x);
9331 register rtx where_dead = reg_last_death[regno];
9332
9333 if (where_dead && INSN_CUID (where_dead) >= from_cuid
9334 && INSN_CUID (where_dead) < INSN_CUID (to_insn))
9335 {
dbc131f3 9336 rtx note = remove_death (regno, where_dead);
230d793d
RS
9337
9338 /* It is possible for the call above to return 0. This can occur
9339 when reg_last_death points to I2 or I1 that we combined with.
dbc131f3
RK
9340 In that case make a new note.
9341
9342 We must also check for the case where X is a hard register
9343 and NOTE is a death note for a range of hard registers
9344 including X. In that case, we must put REG_DEAD notes for
9345 the remaining registers in place of NOTE. */
9346
9347 if (note != 0 && regno < FIRST_PSEUDO_REGISTER
9348 && (GET_MODE_SIZE (GET_MODE (XEXP (note, 0)))
9349 != GET_MODE_SIZE (GET_MODE (x))))
9350 {
9351 int deadregno = REGNO (XEXP (note, 0));
9352 int deadend
9353 = (deadregno + HARD_REGNO_NREGS (deadregno,
9354 GET_MODE (XEXP (note, 0))));
9355 int ourend = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
9356 int i;
9357
9358 for (i = deadregno; i < deadend; i++)
9359 if (i < regno || i >= ourend)
9360 REG_NOTES (where_dead)
9361 = gen_rtx (EXPR_LIST, REG_DEAD,
9362 gen_rtx (REG, word_mode, i),
9363 REG_NOTES (where_dead));
9364 }
230d793d 9365
dbc131f3 9366 if (note != 0 && GET_MODE (XEXP (note, 0)) == GET_MODE (x))
230d793d
RS
9367 {
9368 XEXP (note, 1) = *pnotes;
9369 *pnotes = note;
9370 }
9371 else
9372 *pnotes = gen_rtx (EXPR_LIST, REG_DEAD, x, *pnotes);
1a26b032
RK
9373
9374 reg_n_deaths[regno]++;
230d793d
RS
9375 }
9376
9377 return;
9378 }
9379
9380 else if (GET_CODE (x) == SET)
9381 {
9382 rtx dest = SET_DEST (x);
9383
9384 move_deaths (SET_SRC (x), from_cuid, to_insn, pnotes);
9385
a7c99304
RK
9386 /* In the case of a ZERO_EXTRACT, a STRICT_LOW_PART, or a SUBREG
9387 that accesses one word of a multi-word item, some
9388 piece of everything register in the expression is used by
9389 this insn, so remove any old death. */
9390
9391 if (GET_CODE (dest) == ZERO_EXTRACT
9392 || GET_CODE (dest) == STRICT_LOW_PART
9393 || (GET_CODE (dest) == SUBREG
9394 && (((GET_MODE_SIZE (GET_MODE (dest))
9395 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
9396 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
9397 + UNITS_PER_WORD - 1) / UNITS_PER_WORD))))
230d793d 9398 {
a7c99304
RK
9399 move_deaths (dest, from_cuid, to_insn, pnotes);
9400 return;
230d793d
RS
9401 }
9402
a7c99304
RK
9403 /* If this is some other SUBREG, we know it replaces the entire
9404 value, so use that as the destination. */
9405 if (GET_CODE (dest) == SUBREG)
9406 dest = SUBREG_REG (dest);
9407
9408 /* If this is a MEM, adjust deaths of anything used in the address.
9409 For a REG (the only other possibility), the entire value is
9410 being replaced so the old value is not used in this insn. */
230d793d
RS
9411
9412 if (GET_CODE (dest) == MEM)
9413 move_deaths (XEXP (dest, 0), from_cuid, to_insn, pnotes);
9414 return;
9415 }
9416
9417 else if (GET_CODE (x) == CLOBBER)
9418 return;
9419
9420 len = GET_RTX_LENGTH (code);
9421 fmt = GET_RTX_FORMAT (code);
9422
9423 for (i = 0; i < len; i++)
9424 {
9425 if (fmt[i] == 'E')
9426 {
9427 register int j;
9428 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
9429 move_deaths (XVECEXP (x, i, j), from_cuid, to_insn, pnotes);
9430 }
9431 else if (fmt[i] == 'e')
9432 move_deaths (XEXP (x, i), from_cuid, to_insn, pnotes);
9433 }
9434}
9435\f
a7c99304
RK
9436/* Return 1 if X is the target of a bit-field assignment in BODY, the
9437 pattern of an insn. X must be a REG. */
230d793d
RS
9438
9439static int
a7c99304
RK
9440reg_bitfield_target_p (x, body)
9441 rtx x;
230d793d
RS
9442 rtx body;
9443{
9444 int i;
9445
9446 if (GET_CODE (body) == SET)
a7c99304
RK
9447 {
9448 rtx dest = SET_DEST (body);
9449 rtx target;
9450 int regno, tregno, endregno, endtregno;
9451
9452 if (GET_CODE (dest) == ZERO_EXTRACT)
9453 target = XEXP (dest, 0);
9454 else if (GET_CODE (dest) == STRICT_LOW_PART)
9455 target = SUBREG_REG (XEXP (dest, 0));
9456 else
9457 return 0;
9458
9459 if (GET_CODE (target) == SUBREG)
9460 target = SUBREG_REG (target);
9461
9462 if (GET_CODE (target) != REG)
9463 return 0;
9464
9465 tregno = REGNO (target), regno = REGNO (x);
9466 if (tregno >= FIRST_PSEUDO_REGISTER || regno >= FIRST_PSEUDO_REGISTER)
9467 return target == x;
9468
9469 endtregno = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (target));
9470 endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
9471
9472 return endregno > tregno && regno < endtregno;
9473 }
230d793d
RS
9474
9475 else if (GET_CODE (body) == PARALLEL)
9476 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
a7c99304 9477 if (reg_bitfield_target_p (x, XVECEXP (body, 0, i)))
230d793d
RS
9478 return 1;
9479
9480 return 0;
9481}
9482\f
9483/* Given a chain of REG_NOTES originally from FROM_INSN, try to place them
9484 as appropriate. I3 and I2 are the insns resulting from the combination
9485 insns including FROM (I2 may be zero).
9486
9487 ELIM_I2 and ELIM_I1 are either zero or registers that we know will
9488 not need REG_DEAD notes because they are being substituted for. This
9489 saves searching in the most common cases.
9490
9491 Each note in the list is either ignored or placed on some insns, depending
9492 on the type of note. */
9493
9494static void
9495distribute_notes (notes, from_insn, i3, i2, elim_i2, elim_i1)
9496 rtx notes;
9497 rtx from_insn;
9498 rtx i3, i2;
9499 rtx elim_i2, elim_i1;
9500{
9501 rtx note, next_note;
9502 rtx tem;
9503
9504 for (note = notes; note; note = next_note)
9505 {
9506 rtx place = 0, place2 = 0;
9507
9508 /* If this NOTE references a pseudo register, ensure it references
9509 the latest copy of that register. */
9510 if (XEXP (note, 0) && GET_CODE (XEXP (note, 0)) == REG
9511 && REGNO (XEXP (note, 0)) >= FIRST_PSEUDO_REGISTER)
9512 XEXP (note, 0) = regno_reg_rtx[REGNO (XEXP (note, 0))];
9513
9514 next_note = XEXP (note, 1);
9515 switch (REG_NOTE_KIND (note))
9516 {
9517 case REG_UNUSED:
9518 /* If this register is set or clobbered in I3, put the note there
9519 unless there is one already. */
9520 if (reg_set_p (XEXP (note, 0), PATTERN (i3)))
9521 {
9522 if (! (GET_CODE (XEXP (note, 0)) == REG
9523 ? find_regno_note (i3, REG_UNUSED, REGNO (XEXP (note, 0)))
9524 : find_reg_note (i3, REG_UNUSED, XEXP (note, 0))))
9525 place = i3;
9526 }
9527 /* Otherwise, if this register is used by I3, then this register
9528 now dies here, so we must put a REG_DEAD note here unless there
9529 is one already. */
9530 else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3))
9531 && ! (GET_CODE (XEXP (note, 0)) == REG
9532 ? find_regno_note (i3, REG_DEAD, REGNO (XEXP (note, 0)))
9533 : find_reg_note (i3, REG_DEAD, XEXP (note, 0))))
9534 {
9535 PUT_REG_NOTE_KIND (note, REG_DEAD);
9536 place = i3;
9537 }
9538 break;
9539
9540 case REG_EQUAL:
9541 case REG_EQUIV:
9542 case REG_NONNEG:
9543 /* These notes say something about results of an insn. We can
9544 only support them if they used to be on I3 in which case they
a687e897
RK
9545 remain on I3. Otherwise they are ignored.
9546
9547 If the note refers to an expression that is not a constant, we
9548 must also ignore the note since we cannot tell whether the
9549 equivalence is still true. It might be possible to do
9550 slightly better than this (we only have a problem if I2DEST
9551 or I1DEST is present in the expression), but it doesn't
9552 seem worth the trouble. */
9553
9554 if (from_insn == i3
9555 && (XEXP (note, 0) == 0 || CONSTANT_P (XEXP (note, 0))))
230d793d
RS
9556 place = i3;
9557 break;
9558
9559 case REG_INC:
9560 case REG_NO_CONFLICT:
9561 case REG_LABEL:
9562 /* These notes say something about how a register is used. They must
9563 be present on any use of the register in I2 or I3. */
9564 if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3)))
9565 place = i3;
9566
9567 if (i2 && reg_mentioned_p (XEXP (note, 0), PATTERN (i2)))
9568 {
9569 if (place)
9570 place2 = i2;
9571 else
9572 place = i2;
9573 }
9574 break;
9575
9576 case REG_WAS_0:
9577 /* It is too much trouble to try to see if this note is still
9578 correct in all situations. It is better to simply delete it. */
9579 break;
9580
9581 case REG_RETVAL:
9582 /* If the insn previously containing this note still exists,
9583 put it back where it was. Otherwise move it to the previous
9584 insn. Adjust the corresponding REG_LIBCALL note. */
9585 if (GET_CODE (from_insn) != NOTE)
9586 place = from_insn;
9587 else
9588 {
5f4f0e22 9589 tem = find_reg_note (XEXP (note, 0), REG_LIBCALL, NULL_RTX);
230d793d
RS
9590 place = prev_real_insn (from_insn);
9591 if (tem && place)
9592 XEXP (tem, 0) = place;
9593 }
9594 break;
9595
9596 case REG_LIBCALL:
9597 /* This is handled similarly to REG_RETVAL. */
9598 if (GET_CODE (from_insn) != NOTE)
9599 place = from_insn;
9600 else
9601 {
5f4f0e22 9602 tem = find_reg_note (XEXP (note, 0), REG_RETVAL, NULL_RTX);
230d793d
RS
9603 place = next_real_insn (from_insn);
9604 if (tem && place)
9605 XEXP (tem, 0) = place;
9606 }
9607 break;
9608
9609 case REG_DEAD:
9610 /* If the register is used as an input in I3, it dies there.
9611 Similarly for I2, if it is non-zero and adjacent to I3.
9612
9613 If the register is not used as an input in either I3 or I2
9614 and it is not one of the registers we were supposed to eliminate,
9615 there are two possibilities. We might have a non-adjacent I2
9616 or we might have somehow eliminated an additional register
9617 from a computation. For example, we might have had A & B where
9618 we discover that B will always be zero. In this case we will
9619 eliminate the reference to A.
9620
9621 In both cases, we must search to see if we can find a previous
9622 use of A and put the death note there. */
9623
9624 if (reg_referenced_p (XEXP (note, 0), PATTERN (i3)))
9625 place = i3;
9626 else if (i2 != 0 && next_nonnote_insn (i2) == i3
9627 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
9628 place = i2;
9629
9630 if (XEXP (note, 0) == elim_i2 || XEXP (note, 0) == elim_i1)
9631 break;
9632
510dd77e
RK
9633 /* If the register is used in both I2 and I3 and it dies in I3,
9634 we might have added another reference to it. If reg_n_refs
9635 was 2, bump it to 3. This has to be correct since the
9636 register must have been set somewhere. The reason this is
9637 done is because local-alloc.c treats 2 references as a
9638 special case. */
9639
9640 if (place == i3 && i2 != 0 && GET_CODE (XEXP (note, 0)) == REG
9641 && reg_n_refs[REGNO (XEXP (note, 0))]== 2
9642 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
9643 reg_n_refs[REGNO (XEXP (note, 0))] = 3;
9644
230d793d
RS
9645 if (place == 0)
9646 for (tem = prev_nonnote_insn (i3);
9647 tem && (GET_CODE (tem) == INSN
9648 || GET_CODE (tem) == CALL_INSN);
9649 tem = prev_nonnote_insn (tem))
9650 {
9651 /* If the register is being set at TEM, see if that is all
9652 TEM is doing. If so, delete TEM. Otherwise, make this
9653 into a REG_UNUSED note instead. */
9654 if (reg_set_p (XEXP (note, 0), PATTERN (tem)))
9655 {
9656 rtx set = single_set (tem);
9657
5089e22e
RS
9658 /* Verify that it was the set, and not a clobber that
9659 modified the register. */
9660
9661 if (set != 0 && ! side_effects_p (SET_SRC (set))
9662 && rtx_equal_p (XEXP (note, 0), SET_DEST (set)))
230d793d
RS
9663 {
9664 /* Move the notes and links of TEM elsewhere.
9665 This might delete other dead insns recursively.
9666 First set the pattern to something that won't use
9667 any register. */
9668
9669 PATTERN (tem) = pc_rtx;
9670
5f4f0e22
CH
9671 distribute_notes (REG_NOTES (tem), tem, tem,
9672 NULL_RTX, NULL_RTX, NULL_RTX);
230d793d
RS
9673 distribute_links (LOG_LINKS (tem));
9674
9675 PUT_CODE (tem, NOTE);
9676 NOTE_LINE_NUMBER (tem) = NOTE_INSN_DELETED;
9677 NOTE_SOURCE_FILE (tem) = 0;
9678 }
9679 else
9680 {
9681 PUT_REG_NOTE_KIND (note, REG_UNUSED);
9682
9683 /* If there isn't already a REG_UNUSED note, put one
9684 here. */
9685 if (! find_regno_note (tem, REG_UNUSED,
9686 REGNO (XEXP (note, 0))))
9687 place = tem;
9688 break;
9689 }
9690 }
9691 else if (reg_referenced_p (XEXP (note, 0), PATTERN (tem)))
9692 {
9693 place = tem;
9694 break;
9695 }
9696 }
9697
9698 /* If the register is set or already dead at PLACE, we needn't do
9699 anything with this note if it is still a REG_DEAD note.
9700
9701 Note that we cannot use just `dead_or_set_p' here since we can
9702 convert an assignment to a register into a bit-field assignment.
9703 Therefore, we must also omit the note if the register is the
9704 target of a bitfield assignment. */
9705
9706 if (place && REG_NOTE_KIND (note) == REG_DEAD)
9707 {
9708 int regno = REGNO (XEXP (note, 0));
9709
9710 if (dead_or_set_p (place, XEXP (note, 0))
9711 || reg_bitfield_target_p (XEXP (note, 0), PATTERN (place)))
9712 {
9713 /* Unless the register previously died in PLACE, clear
9714 reg_last_death. [I no longer understand why this is
9715 being done.] */
9716 if (reg_last_death[regno] != place)
9717 reg_last_death[regno] = 0;
9718 place = 0;
9719 }
9720 else
9721 reg_last_death[regno] = place;
9722
9723 /* If this is a death note for a hard reg that is occupying
9724 multiple registers, ensure that we are still using all
9725 parts of the object. If we find a piece of the object
9726 that is unused, we must add a USE for that piece before
9727 PLACE and put the appropriate REG_DEAD note on it.
9728
9729 An alternative would be to put a REG_UNUSED for the pieces
9730 on the insn that set the register, but that can't be done if
9731 it is not in the same block. It is simpler, though less
9732 efficient, to add the USE insns. */
9733
9734 if (place && regno < FIRST_PSEUDO_REGISTER
9735 && HARD_REGNO_NREGS (regno, GET_MODE (XEXP (note, 0))) > 1)
9736 {
9737 int endregno
9738 = regno + HARD_REGNO_NREGS (regno,
9739 GET_MODE (XEXP (note, 0)));
9740 int all_used = 1;
9741 int i;
9742
9743 for (i = regno; i < endregno; i++)
9744 if (! refers_to_regno_p (i, i + 1, PATTERN (place), 0))
9745 {
9746 rtx piece = gen_rtx (REG, word_mode, i);
28f6d3af
RK
9747 rtx p;
9748
9749 /* See if we already placed a USE note for this
9750 register in front of PLACE. */
9751 for (p = place;
9752 GET_CODE (PREV_INSN (p)) == INSN
9753 && GET_CODE (PATTERN (PREV_INSN (p))) == USE;
9754 p = PREV_INSN (p))
9755 if (rtx_equal_p (piece,
9756 XEXP (PATTERN (PREV_INSN (p)), 0)))
9757 {
9758 p = 0;
9759 break;
9760 }
9761
9762 if (p)
9763 {
9764 rtx use_insn
9765 = emit_insn_before (gen_rtx (USE, VOIDmode,
9766 piece),
9767 p);
9768 REG_NOTES (use_insn)
9769 = gen_rtx (EXPR_LIST, REG_DEAD, piece,
9770 REG_NOTES (use_insn));
9771 }
230d793d 9772
5089e22e 9773 all_used = 0;
230d793d
RS
9774 }
9775
9776 if (! all_used)
9777 {
9778 /* Put only REG_DEAD notes for pieces that are
9779 still used and that are not already dead or set. */
9780
9781 for (i = regno; i < endregno; i++)
9782 {
9783 rtx piece = gen_rtx (REG, word_mode, i);
9784
9785 if (reg_referenced_p (piece, PATTERN (place))
9786 && ! dead_or_set_p (place, piece)
9787 && ! reg_bitfield_target_p (piece,
9788 PATTERN (place)))
9789 REG_NOTES (place) = gen_rtx (EXPR_LIST, REG_DEAD,
9790 piece,
9791 REG_NOTES (place));
9792 }
9793
9794 place = 0;
9795 }
9796 }
9797 }
9798 break;
9799
9800 default:
9801 /* Any other notes should not be present at this point in the
9802 compilation. */
9803 abort ();
9804 }
9805
9806 if (place)
9807 {
9808 XEXP (note, 1) = REG_NOTES (place);
9809 REG_NOTES (place) = note;
9810 }
1a26b032
RK
9811 else if ((REG_NOTE_KIND (note) == REG_DEAD
9812 || REG_NOTE_KIND (note) == REG_UNUSED)
9813 && GET_CODE (XEXP (note, 0)) == REG)
9814 reg_n_deaths[REGNO (XEXP (note, 0))]--;
230d793d
RS
9815
9816 if (place2)
1a26b032
RK
9817 {
9818 if ((REG_NOTE_KIND (note) == REG_DEAD
9819 || REG_NOTE_KIND (note) == REG_UNUSED)
9820 && GET_CODE (XEXP (note, 0)) == REG)
9821 reg_n_deaths[REGNO (XEXP (note, 0))]++;
9822
9823 REG_NOTES (place2) = gen_rtx (GET_CODE (note), REG_NOTE_KIND (note),
9824 XEXP (note, 0), REG_NOTES (place2));
9825 }
230d793d
RS
9826 }
9827}
9828\f
9829/* Similarly to above, distribute the LOG_LINKS that used to be present on
5089e22e
RS
9830 I3, I2, and I1 to new locations. This is also called in one case to
9831 add a link pointing at I3 when I3's destination is changed. */
230d793d
RS
9832
9833static void
9834distribute_links (links)
9835 rtx links;
9836{
9837 rtx link, next_link;
9838
9839 for (link = links; link; link = next_link)
9840 {
9841 rtx place = 0;
9842 rtx insn;
9843 rtx set, reg;
9844
9845 next_link = XEXP (link, 1);
9846
9847 /* If the insn that this link points to is a NOTE or isn't a single
9848 set, ignore it. In the latter case, it isn't clear what we
9849 can do other than ignore the link, since we can't tell which
9850 register it was for. Such links wouldn't be used by combine
9851 anyway.
9852
9853 It is not possible for the destination of the target of the link to
9854 have been changed by combine. The only potential of this is if we
9855 replace I3, I2, and I1 by I3 and I2. But in that case the
9856 destination of I2 also remains unchanged. */
9857
9858 if (GET_CODE (XEXP (link, 0)) == NOTE
9859 || (set = single_set (XEXP (link, 0))) == 0)
9860 continue;
9861
9862 reg = SET_DEST (set);
9863 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
9864 || GET_CODE (reg) == SIGN_EXTRACT
9865 || GET_CODE (reg) == STRICT_LOW_PART)
9866 reg = XEXP (reg, 0);
9867
9868 /* A LOG_LINK is defined as being placed on the first insn that uses
9869 a register and points to the insn that sets the register. Start
9870 searching at the next insn after the target of the link and stop
9871 when we reach a set of the register or the end of the basic block.
9872
9873 Note that this correctly handles the link that used to point from
5089e22e 9874 I3 to I2. Also note that not much searching is typically done here
230d793d
RS
9875 since most links don't point very far away. */
9876
9877 for (insn = NEXT_INSN (XEXP (link, 0));
9878 (insn && GET_CODE (insn) != CODE_LABEL
9879 && GET_CODE (PREV_INSN (insn)) != JUMP_INSN);
9880 insn = NEXT_INSN (insn))
9881 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
9882 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
9883 {
9884 if (reg_referenced_p (reg, PATTERN (insn)))
9885 place = insn;
9886 break;
9887 }
9888
9889 /* If we found a place to put the link, place it there unless there
9890 is already a link to the same insn as LINK at that point. */
9891
9892 if (place)
9893 {
9894 rtx link2;
9895
9896 for (link2 = LOG_LINKS (place); link2; link2 = XEXP (link2, 1))
9897 if (XEXP (link2, 0) == XEXP (link, 0))
9898 break;
9899
9900 if (link2 == 0)
9901 {
9902 XEXP (link, 1) = LOG_LINKS (place);
9903 LOG_LINKS (place) = link;
9904 }
9905 }
9906 }
9907}
9908\f
9909void
9910dump_combine_stats (file)
9911 FILE *file;
9912{
9913 fprintf
9914 (file,
9915 ";; Combiner statistics: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n\n",
9916 combine_attempts, combine_merges, combine_extras, combine_successes);
9917}
9918
9919void
9920dump_combine_total_stats (file)
9921 FILE *file;
9922{
9923 fprintf
9924 (file,
9925 "\n;; Combiner totals: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n",
9926 total_attempts, total_merges, total_extras, total_successes);
9927}
This page took 1.25991 seconds and 5 git commands to generate.