]> gcc.gnu.org Git - gcc.git/blame - gcc/combine.c
(set_nonzero_bits_and_sign_copies): Don't record data for a a pseudo that is undefine...
[gcc.git] / gcc / combine.c
CommitLineData
230d793d 1/* Optimize by combining instructions for GNU compiler.
dc3e17ad 2 Copyright (C) 1987, 1988, 1992, 1993 Free Software Foundation, Inc.
230d793d
RS
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
18the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21/* This module is essentially the "combiner" phase of the U. of Arizona
22 Portable Optimizer, but redone to work on our list-structured
23 representation for RTL instead of their string representation.
24
25 The LOG_LINKS of each insn identify the most recent assignment
26 to each REG used in the insn. It is a list of previous insns,
27 each of which contains a SET for a REG that is used in this insn
28 and not used or set in between. LOG_LINKs never cross basic blocks.
29 They were set up by the preceding pass (lifetime analysis).
30
31 We try to combine each pair of insns joined by a logical link.
32 We also try to combine triples of insns A, B and C when
33 C has a link back to B and B has a link back to A.
34
35 LOG_LINKS does not have links for use of the CC0. They don't
36 need to, because the insn that sets the CC0 is always immediately
37 before the insn that tests it. So we always regard a branch
38 insn as having a logical link to the preceding insn. The same is true
39 for an insn explicitly using CC0.
40
41 We check (with use_crosses_set_p) to avoid combining in such a way
42 as to move a computation to a place where its value would be different.
43
44 Combination is done by mathematically substituting the previous
45 insn(s) values for the regs they set into the expressions in
46 the later insns that refer to these regs. If the result is a valid insn
47 for our target machine, according to the machine description,
48 we install it, delete the earlier insns, and update the data flow
49 information (LOG_LINKS and REG_NOTES) for what we did.
50
51 There are a few exceptions where the dataflow information created by
52 flow.c aren't completely updated:
53
54 - reg_live_length is not updated
55 - reg_n_refs is not adjusted in the rare case when a register is
56 no longer required in a computation
57 - there are extremely rare cases (see distribute_regnotes) when a
58 REG_DEAD note is lost
59 - a LOG_LINKS entry that refers to an insn with multiple SETs may be
60 removed because there is no way to know which register it was
61 linking
62
63 To simplify substitution, we combine only when the earlier insn(s)
64 consist of only a single assignment. To simplify updating afterward,
65 we never combine when a subroutine call appears in the middle.
66
67 Since we do not represent assignments to CC0 explicitly except when that
68 is all an insn does, there is no LOG_LINKS entry in an insn that uses
69 the condition code for the insn that set the condition code.
70 Fortunately, these two insns must be consecutive.
71 Therefore, every JUMP_INSN is taken to have an implicit logical link
72 to the preceding insn. This is not quite right, since non-jumps can
73 also use the condition code; but in practice such insns would not
74 combine anyway. */
75
230d793d
RS
76#include "config.h"
77#include "gvarargs.h"
78#include "rtl.h"
79#include "flags.h"
80#include "regs.h"
55310dad 81#include "hard-reg-set.h"
230d793d
RS
82#include "expr.h"
83#include "basic-block.h"
84#include "insn-config.h"
85#include "insn-flags.h"
86#include "insn-codes.h"
87#include "insn-attr.h"
88#include "recog.h"
89#include "real.h"
f8d97cf4 90#include <stdio.h>
230d793d
RS
91
92/* It is not safe to use ordinary gen_lowpart in combine.
93 Use gen_lowpart_for_combine instead. See comments there. */
94#define gen_lowpart dont_use_gen_lowpart_you_dummy
95
c6dc70d6
RK
96/* If byte loads either zero- or sign- extend, define BYTE_LOADS_EXTEND
97 for cases when we don't care which is true. Define LOAD_EXTEND to
98 be ZERO_EXTEND or SIGN_EXTEND, depending on which was defined. */
99
100#ifdef BYTE_LOADS_ZERO_EXTEND
101#define BYTE_LOADS_EXTEND
102#define LOAD_EXTEND ZERO_EXTEND
103#endif
104
500c518b 105#ifdef BYTE_LOADS_SIGN_EXTEND
c6dc70d6
RK
106#define BYTE_LOADS_EXTEND
107#define LOAD_EXTEND SIGN_EXTEND
108#endif
109
230d793d
RS
110/* Number of attempts to combine instructions in this function. */
111
112static int combine_attempts;
113
114/* Number of attempts that got as far as substitution in this function. */
115
116static int combine_merges;
117
118/* Number of instructions combined with added SETs in this function. */
119
120static int combine_extras;
121
122/* Number of instructions combined in this function. */
123
124static int combine_successes;
125
126/* Totals over entire compilation. */
127
128static int total_attempts, total_merges, total_extras, total_successes;
129\f
130/* Vector mapping INSN_UIDs to cuids.
5089e22e 131 The cuids are like uids but increase monotonically always.
230d793d
RS
132 Combine always uses cuids so that it can compare them.
133 But actually renumbering the uids, which we used to do,
134 proves to be a bad idea because it makes it hard to compare
135 the dumps produced by earlier passes with those from later passes. */
136
137static int *uid_cuid;
138
139/* Get the cuid of an insn. */
140
141#define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
142
143/* Maximum register number, which is the size of the tables below. */
144
145static int combine_max_regno;
146
147/* Record last point of death of (hard or pseudo) register n. */
148
149static rtx *reg_last_death;
150
151/* Record last point of modification of (hard or pseudo) register n. */
152
153static rtx *reg_last_set;
154
155/* Record the cuid of the last insn that invalidated memory
156 (anything that writes memory, and subroutine calls, but not pushes). */
157
158static int mem_last_set;
159
160/* Record the cuid of the last CALL_INSN
161 so we can tell whether a potential combination crosses any calls. */
162
163static int last_call_cuid;
164
165/* When `subst' is called, this is the insn that is being modified
166 (by combining in a previous insn). The PATTERN of this insn
167 is still the old pattern partially modified and it should not be
168 looked at, but this may be used to examine the successors of the insn
169 to judge whether a simplification is valid. */
170
171static rtx subst_insn;
172
173/* This is the lowest CUID that `subst' is currently dealing with.
174 get_last_value will not return a value if the register was set at or
175 after this CUID. If not for this mechanism, we could get confused if
176 I2 or I1 in try_combine were an insn that used the old value of a register
177 to obtain a new value. In that case, we might erroneously get the
178 new value of the register when we wanted the old one. */
179
180static int subst_low_cuid;
181
182/* This is the value of undobuf.num_undo when we started processing this
183 substitution. This will prevent gen_rtx_combine from re-used a piece
184 from the previous expression. Doing so can produce circular rtl
185 structures. */
186
187static int previous_num_undos;
188\f
189/* The next group of arrays allows the recording of the last value assigned
190 to (hard or pseudo) register n. We use this information to see if a
5089e22e 191 operation being processed is redundant given a prior operation performed
230d793d
RS
192 on the register. For example, an `and' with a constant is redundant if
193 all the zero bits are already known to be turned off.
194
195 We use an approach similar to that used by cse, but change it in the
196 following ways:
197
198 (1) We do not want to reinitialize at each label.
199 (2) It is useful, but not critical, to know the actual value assigned
200 to a register. Often just its form is helpful.
201
202 Therefore, we maintain the following arrays:
203
204 reg_last_set_value the last value assigned
205 reg_last_set_label records the value of label_tick when the
206 register was assigned
207 reg_last_set_table_tick records the value of label_tick when a
208 value using the register is assigned
209 reg_last_set_invalid set to non-zero when it is not valid
210 to use the value of this register in some
211 register's value
212
213 To understand the usage of these tables, it is important to understand
214 the distinction between the value in reg_last_set_value being valid
215 and the register being validly contained in some other expression in the
216 table.
217
218 Entry I in reg_last_set_value is valid if it is non-zero, and either
219 reg_n_sets[i] is 1 or reg_last_set_label[i] == label_tick.
220
221 Register I may validly appear in any expression returned for the value
222 of another register if reg_n_sets[i] is 1. It may also appear in the
223 value for register J if reg_last_set_label[i] < reg_last_set_label[j] or
224 reg_last_set_invalid[j] is zero.
225
226 If an expression is found in the table containing a register which may
227 not validly appear in an expression, the register is replaced by
228 something that won't match, (clobber (const_int 0)).
229
230 reg_last_set_invalid[i] is set non-zero when register I is being assigned
231 to and reg_last_set_table_tick[i] == label_tick. */
232
233/* Record last value assigned to (hard or pseudo) register n. */
234
235static rtx *reg_last_set_value;
236
237/* Record the value of label_tick when the value for register n is placed in
238 reg_last_set_value[n]. */
239
568356af 240static int *reg_last_set_label;
230d793d
RS
241
242/* Record the value of label_tick when an expression involving register n
243 is placed in reg_last_set_value. */
244
568356af 245static int *reg_last_set_table_tick;
230d793d
RS
246
247/* Set non-zero if references to register n in expressions should not be
248 used. */
249
250static char *reg_last_set_invalid;
251
252/* Incremented for each label. */
253
568356af 254static int label_tick;
230d793d
RS
255
256/* Some registers that are set more than once and used in more than one
257 basic block are nevertheless always set in similar ways. For example,
258 a QImode register may be loaded from memory in two places on a machine
259 where byte loads zero extend.
260
951553af 261 We record in the following array what we know about the nonzero
230d793d
RS
262 bits of a register, specifically which bits are known to be zero.
263
264 If an entry is zero, it means that we don't know anything special. */
265
55310dad 266static unsigned HOST_WIDE_INT *reg_nonzero_bits;
230d793d 267
951553af 268/* Mode used to compute significance in reg_nonzero_bits. It is the largest
5f4f0e22 269 integer mode that can fit in HOST_BITS_PER_WIDE_INT. */
230d793d 270
951553af 271static enum machine_mode nonzero_bits_mode;
230d793d 272
d0ab8cd3
RK
273/* Nonzero if we know that a register has some leading bits that are always
274 equal to the sign bit. */
275
276static char *reg_sign_bit_copies;
277
951553af 278/* Nonzero when reg_nonzero_bits and reg_sign_bit_copies can be safely used.
1a26b032
RK
279 It is zero while computing them and after combine has completed. This
280 former test prevents propagating values based on previously set values,
281 which can be incorrect if a variable is modified in a loop. */
230d793d 282
951553af 283static int nonzero_sign_valid;
55310dad
RK
284
285/* These arrays are maintained in parallel with reg_last_set_value
286 and are used to store the mode in which the register was last set,
287 the bits that were known to be zero when it was last set, and the
288 number of sign bits copies it was known to have when it was last set. */
289
290static enum machine_mode *reg_last_set_mode;
291static unsigned HOST_WIDE_INT *reg_last_set_nonzero_bits;
292static char *reg_last_set_sign_bit_copies;
230d793d
RS
293\f
294/* Record one modification to rtl structure
295 to be undone by storing old_contents into *where.
296 is_int is 1 if the contents are an int. */
297
298struct undo
299{
230d793d 300 int is_int;
7c046e4e
RK
301 union {rtx rtx; int i;} old_contents;
302 union {rtx *rtx; int *i;} where;
230d793d
RS
303};
304
305/* Record a bunch of changes to be undone, up to MAX_UNDO of them.
306 num_undo says how many are currently recorded.
307
308 storage is nonzero if we must undo the allocation of new storage.
309 The value of storage is what to pass to obfree.
310
311 other_insn is nonzero if we have modified some other insn in the process
312 of working on subst_insn. It must be verified too. */
313
314#define MAX_UNDO 50
315
316struct undobuf
317{
318 int num_undo;
319 char *storage;
320 struct undo undo[MAX_UNDO];
321 rtx other_insn;
322};
323
324static struct undobuf undobuf;
325
cc876596 326/* Substitute NEWVAL, an rtx expression, into INTO, a place in some
230d793d 327 insn. The substitution can be undone by undo_all. If INTO is already
cc876596
RK
328 set to NEWVAL, do not record this change. Because computing NEWVAL might
329 also call SUBST, we have to compute it before we put anything into
330 the undo table. */
230d793d
RS
331
332#define SUBST(INTO, NEWVAL) \
cc876596
RK
333 do { rtx _new = (NEWVAL); \
334 if (undobuf.num_undo < MAX_UNDO) \
230d793d 335 { \
230d793d 336 undobuf.undo[undobuf.num_undo].is_int = 0; \
7c046e4e
RK
337 undobuf.undo[undobuf.num_undo].where.rtx = &INTO; \
338 undobuf.undo[undobuf.num_undo].old_contents.rtx = INTO; \
cc876596 339 INTO = _new; \
7c046e4e 340 if (undobuf.undo[undobuf.num_undo].old_contents.rtx != INTO) \
230d793d
RS
341 undobuf.num_undo++; \
342 } \
343 } while (0)
344
345/* Similar to SUBST, but NEWVAL is an int. INTO will normally be an XINT
346 expression.
347 Note that substitution for the value of a CONST_INT is not safe. */
348
349#define SUBST_INT(INTO, NEWVAL) \
350 do { if (undobuf.num_undo < MAX_UNDO) \
351{ \
7c046e4e
RK
352 undobuf.undo[undobuf.num_undo].is_int = 1; \
353 undobuf.undo[undobuf.num_undo].where.i = (int *) &INTO; \
354 undobuf.undo[undobuf.num_undo].old_contents.i = INTO; \
230d793d 355 INTO = NEWVAL; \
7c046e4e 356 if (undobuf.undo[undobuf.num_undo].old_contents.i != INTO) \
230d793d
RS
357 undobuf.num_undo++; \
358 } \
359 } while (0)
360
361/* Number of times the pseudo being substituted for
362 was found and replaced. */
363
364static int n_occurrences;
365
951553af 366static void set_nonzero_bits_and_sign_copies ();
7988fd36 367static void setup_incoming_promotions ();
230d793d
RS
368static void move_deaths ();
369rtx remove_death ();
370static void record_value_for_reg ();
371static void record_dead_and_set_regs ();
372static int use_crosses_set_p ();
373static rtx try_combine ();
374static rtx *find_split_point ();
375static rtx subst ();
376static void undo_all ();
377static int reg_dead_at_p ();
378static rtx expand_compound_operation ();
379static rtx expand_field_assignment ();
380static rtx make_extraction ();
381static int get_pos_from_mask ();
77fa0940 382static rtx force_to_mode ();
1a26b032 383static rtx known_cond ();
230d793d
RS
384static rtx make_field_assignment ();
385static rtx make_compound_operation ();
386static rtx apply_distributive_law ();
387static rtx simplify_and_const_int ();
951553af 388static unsigned HOST_WIDE_INT nonzero_bits ();
d0ab8cd3 389static int num_sign_bit_copies ();
230d793d
RS
390static int merge_outer_ops ();
391static rtx simplify_shift_const ();
392static int recog_for_combine ();
393static rtx gen_lowpart_for_combine ();
394static rtx gen_rtx_combine ();
395static rtx gen_binary ();
396static rtx gen_unary ();
397static enum rtx_code simplify_comparison ();
398static int reversible_comparison_p ();
399static int get_last_value_validate ();
400static rtx get_last_value ();
401static void distribute_notes ();
402static void distribute_links ();
403\f
404/* Main entry point for combiner. F is the first insn of the function.
405 NREGS is the first unused pseudo-reg number. */
406
407void
408combine_instructions (f, nregs)
409 rtx f;
410 int nregs;
411{
412 register rtx insn, next, prev;
413 register int i;
414 register rtx links, nextlinks;
415
416 combine_attempts = 0;
417 combine_merges = 0;
418 combine_extras = 0;
419 combine_successes = 0;
bef9925b 420 undobuf.num_undo = previous_num_undos = 0;
230d793d
RS
421
422 combine_max_regno = nregs;
423
424 reg_last_death = (rtx *) alloca (nregs * sizeof (rtx));
425 reg_last_set = (rtx *) alloca (nregs * sizeof (rtx));
426 reg_last_set_value = (rtx *) alloca (nregs * sizeof (rtx));
568356af
RK
427 reg_last_set_table_tick = (int *) alloca (nregs * sizeof (int));
428 reg_last_set_label = (int *) alloca (nregs * sizeof (int));
5f4f0e22 429 reg_last_set_invalid = (char *) alloca (nregs * sizeof (char));
55310dad
RK
430 reg_last_set_mode
431 = (enum machine_mode *) alloca (nregs * sizeof (enum machine_mode));
432 reg_last_set_nonzero_bits
433 = (unsigned HOST_WIDE_INT *) alloca (nregs * sizeof (HOST_WIDE_INT));
434 reg_last_set_sign_bit_copies
435 = (char *) alloca (nregs * sizeof (char));
436
437 reg_nonzero_bits
438 = (unsigned HOST_WIDE_INT *) alloca (nregs * sizeof (HOST_WIDE_INT));
d0ab8cd3 439 reg_sign_bit_copies = (char *) alloca (nregs * sizeof (char));
230d793d
RS
440
441 bzero (reg_last_death, nregs * sizeof (rtx));
442 bzero (reg_last_set, nregs * sizeof (rtx));
443 bzero (reg_last_set_value, nregs * sizeof (rtx));
568356af
RK
444 bzero (reg_last_set_table_tick, nregs * sizeof (int));
445 bzero (reg_last_set_label, nregs * sizeof (int));
230d793d 446 bzero (reg_last_set_invalid, nregs * sizeof (char));
55310dad
RK
447 bzero (reg_last_set_mode, nregs * sizeof (enum machine_mode));
448 bzero (reg_last_set_nonzero_bits, nregs * sizeof (HOST_WIDE_INT));
449 bzero (reg_last_set_sign_bit_copies, nregs * sizeof (char));
951553af 450 bzero (reg_nonzero_bits, nregs * sizeof (HOST_WIDE_INT));
d0ab8cd3 451 bzero (reg_sign_bit_copies, nregs * sizeof (char));
230d793d
RS
452
453 init_recog_no_volatile ();
454
455 /* Compute maximum uid value so uid_cuid can be allocated. */
456
457 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
458 if (INSN_UID (insn) > i)
459 i = INSN_UID (insn);
460
461 uid_cuid = (int *) alloca ((i + 1) * sizeof (int));
462
951553af 463 nonzero_bits_mode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
230d793d 464
951553af 465 /* Don't use reg_nonzero_bits when computing it. This can cause problems
230d793d
RS
466 when, for example, we have j <<= 1 in a loop. */
467
951553af 468 nonzero_sign_valid = 0;
230d793d
RS
469
470 /* Compute the mapping from uids to cuids.
471 Cuids are numbers assigned to insns, like uids,
472 except that cuids increase monotonically through the code.
473
474 Scan all SETs and see if we can deduce anything about what
951553af 475 bits are known to be zero for some registers and how many copies
d79f08e0
RK
476 of the sign bit are known to exist for those registers.
477
478 Also set any known values so that we can use it while searching
479 for what bits are known to be set. */
480
481 label_tick = 1;
230d793d 482
7988fd36
RK
483 setup_incoming_promotions ();
484
230d793d
RS
485 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
486 {
487 INSN_CUID (insn) = ++i;
d79f08e0
RK
488 subst_low_cuid = i;
489 subst_insn = insn;
490
230d793d 491 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
d79f08e0
RK
492 {
493 note_stores (PATTERN (insn), set_nonzero_bits_and_sign_copies);
494 record_dead_and_set_regs (insn);
495 }
496
497 if (GET_CODE (insn) == CODE_LABEL)
498 label_tick++;
230d793d
RS
499 }
500
951553af 501 nonzero_sign_valid = 1;
230d793d
RS
502
503 /* Now scan all the insns in forward order. */
504
505 label_tick = 1;
506 last_call_cuid = 0;
507 mem_last_set = 0;
d79f08e0
RK
508 bzero (reg_last_death, nregs * sizeof (rtx));
509 bzero (reg_last_set, nregs * sizeof (rtx));
510 bzero (reg_last_set_value, nregs * sizeof (rtx));
568356af
RK
511 bzero (reg_last_set_table_tick, nregs * sizeof (int));
512 bzero (reg_last_set_label, nregs * sizeof (int));
d79f08e0 513 bzero (reg_last_set_invalid, nregs * sizeof (char));
230d793d 514
7988fd36
RK
515 setup_incoming_promotions ();
516
230d793d
RS
517 for (insn = f; insn; insn = next ? next : NEXT_INSN (insn))
518 {
519 next = 0;
520
521 if (GET_CODE (insn) == CODE_LABEL)
522 label_tick++;
523
524 else if (GET_CODE (insn) == INSN
525 || GET_CODE (insn) == CALL_INSN
526 || GET_CODE (insn) == JUMP_INSN)
527 {
528 /* Try this insn with each insn it links back to. */
529
530 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
5f4f0e22 531 if ((next = try_combine (insn, XEXP (links, 0), NULL_RTX)) != 0)
230d793d
RS
532 goto retry;
533
534 /* Try each sequence of three linked insns ending with this one. */
535
536 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
537 for (nextlinks = LOG_LINKS (XEXP (links, 0)); nextlinks;
538 nextlinks = XEXP (nextlinks, 1))
539 if ((next = try_combine (insn, XEXP (links, 0),
540 XEXP (nextlinks, 0))) != 0)
541 goto retry;
542
543#ifdef HAVE_cc0
544 /* Try to combine a jump insn that uses CC0
545 with a preceding insn that sets CC0, and maybe with its
546 logical predecessor as well.
547 This is how we make decrement-and-branch insns.
548 We need this special code because data flow connections
549 via CC0 do not get entered in LOG_LINKS. */
550
551 if (GET_CODE (insn) == JUMP_INSN
552 && (prev = prev_nonnote_insn (insn)) != 0
553 && GET_CODE (prev) == INSN
554 && sets_cc0_p (PATTERN (prev)))
555 {
5f4f0e22 556 if ((next = try_combine (insn, prev, NULL_RTX)) != 0)
230d793d
RS
557 goto retry;
558
559 for (nextlinks = LOG_LINKS (prev); nextlinks;
560 nextlinks = XEXP (nextlinks, 1))
561 if ((next = try_combine (insn, prev,
562 XEXP (nextlinks, 0))) != 0)
563 goto retry;
564 }
565
566 /* Do the same for an insn that explicitly references CC0. */
567 if (GET_CODE (insn) == INSN
568 && (prev = prev_nonnote_insn (insn)) != 0
569 && GET_CODE (prev) == INSN
570 && sets_cc0_p (PATTERN (prev))
571 && GET_CODE (PATTERN (insn)) == SET
572 && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (insn))))
573 {
5f4f0e22 574 if ((next = try_combine (insn, prev, NULL_RTX)) != 0)
230d793d
RS
575 goto retry;
576
577 for (nextlinks = LOG_LINKS (prev); nextlinks;
578 nextlinks = XEXP (nextlinks, 1))
579 if ((next = try_combine (insn, prev,
580 XEXP (nextlinks, 0))) != 0)
581 goto retry;
582 }
583
584 /* Finally, see if any of the insns that this insn links to
585 explicitly references CC0. If so, try this insn, that insn,
5089e22e 586 and its predecessor if it sets CC0. */
230d793d
RS
587 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
588 if (GET_CODE (XEXP (links, 0)) == INSN
589 && GET_CODE (PATTERN (XEXP (links, 0))) == SET
590 && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (XEXP (links, 0))))
591 && (prev = prev_nonnote_insn (XEXP (links, 0))) != 0
592 && GET_CODE (prev) == INSN
593 && sets_cc0_p (PATTERN (prev))
594 && (next = try_combine (insn, XEXP (links, 0), prev)) != 0)
595 goto retry;
596#endif
597
598 /* Try combining an insn with two different insns whose results it
599 uses. */
600 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
601 for (nextlinks = XEXP (links, 1); nextlinks;
602 nextlinks = XEXP (nextlinks, 1))
603 if ((next = try_combine (insn, XEXP (links, 0),
604 XEXP (nextlinks, 0))) != 0)
605 goto retry;
606
607 if (GET_CODE (insn) != NOTE)
608 record_dead_and_set_regs (insn);
609
610 retry:
611 ;
612 }
613 }
614
615 total_attempts += combine_attempts;
616 total_merges += combine_merges;
617 total_extras += combine_extras;
618 total_successes += combine_successes;
1a26b032 619
951553af 620 nonzero_sign_valid = 0;
230d793d
RS
621}
622\f
7988fd36
RK
623/* Set up any promoted values for incoming argument registers. */
624
ee791cc3 625static void
7988fd36
RK
626setup_incoming_promotions ()
627{
628#ifdef PROMOTE_FUNCTION_ARGS
629 int regno;
630 rtx reg;
631 enum machine_mode mode;
632 int unsignedp;
633 rtx first = get_insns ();
634
635 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
636 if (FUNCTION_ARG_REGNO_P (regno)
637 && (reg = promoted_input_arg (regno, &mode, &unsignedp)) != 0)
638 record_value_for_reg (reg, first,
639 gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
500c518b
RK
640 GET_MODE (reg),
641 gen_rtx (CLOBBER, mode, const0_rtx)));
7988fd36
RK
642#endif
643}
644\f
230d793d 645/* Called via note_stores. If X is a pseudo that is used in more than
5f4f0e22 646 one basic block, is narrower that HOST_BITS_PER_WIDE_INT, and is being
951553af 647 set, record what bits are known zero. If we are clobbering X,
230d793d
RS
648 ignore this "set" because the clobbered value won't be used.
649
650 If we are setting only a portion of X and we can't figure out what
651 portion, assume all bits will be used since we don't know what will
d0ab8cd3
RK
652 be happening.
653
654 Similarly, set how many bits of X are known to be copies of the sign bit
655 at all locations in the function. This is the smallest number implied
656 by any set of X. */
230d793d
RS
657
658static void
951553af 659set_nonzero_bits_and_sign_copies (x, set)
230d793d
RS
660 rtx x;
661 rtx set;
662{
d0ab8cd3
RK
663 int num;
664
230d793d
RS
665 if (GET_CODE (x) == REG
666 && REGNO (x) >= FIRST_PSEUDO_REGISTER
667 && reg_n_sets[REGNO (x)] > 1
668 && reg_basic_block[REGNO (x)] < 0
e8095e80
RK
669 /* If this register is undefined at the start of the file, we can't
670 say what its contents were. */
671 && ! (basic_block_live_at_start[0][REGNO (x) / REGSET_ELT_BITS]
672 & ((REGSET_ELT_TYPE) 1 << (REGNO (x) % REGSET_ELT_BITS)))
5f4f0e22 673 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
230d793d
RS
674 {
675 if (GET_CODE (set) == CLOBBER)
e8095e80
RK
676 {
677 reg_nonzero_bits[REGNO (x)] = GET_MODE_MASK (GET_MODE (x));
678 reg_sign_bit_copies[REGNO (x)] = 0;
679 return;
680 }
230d793d
RS
681
682 /* If this is a complex assignment, see if we can convert it into a
5089e22e 683 simple assignment. */
230d793d 684 set = expand_field_assignment (set);
d79f08e0
RK
685
686 /* If this is a simple assignment, or we have a paradoxical SUBREG,
687 set what we know about X. */
688
689 if (SET_DEST (set) == x
690 || (GET_CODE (SET_DEST (set)) == SUBREG
705c7b3b
JW
691 && (GET_MODE_SIZE (GET_MODE (SET_DEST (set)))
692 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (set)))))
d79f08e0 693 && SUBREG_REG (SET_DEST (set)) == x))
d0ab8cd3 694 {
951553af
RK
695 reg_nonzero_bits[REGNO (x)]
696 |= nonzero_bits (SET_SRC (set), nonzero_bits_mode);
d0ab8cd3
RK
697 num = num_sign_bit_copies (SET_SRC (set), GET_MODE (x));
698 if (reg_sign_bit_copies[REGNO (x)] == 0
699 || reg_sign_bit_copies[REGNO (x)] > num)
700 reg_sign_bit_copies[REGNO (x)] = num;
701 }
230d793d 702 else
d0ab8cd3 703 {
951553af 704 reg_nonzero_bits[REGNO (x)] = GET_MODE_MASK (GET_MODE (x));
d0ab8cd3
RK
705 reg_sign_bit_copies[REGNO (x)] = 0;
706 }
230d793d
RS
707 }
708}
709\f
710/* See if INSN can be combined into I3. PRED and SUCC are optionally
711 insns that were previously combined into I3 or that will be combined
712 into the merger of INSN and I3.
713
714 Return 0 if the combination is not allowed for any reason.
715
716 If the combination is allowed, *PDEST will be set to the single
717 destination of INSN and *PSRC to the single source, and this function
718 will return 1. */
719
720static int
721can_combine_p (insn, i3, pred, succ, pdest, psrc)
722 rtx insn;
723 rtx i3;
724 rtx pred, succ;
725 rtx *pdest, *psrc;
726{
727 int i;
728 rtx set = 0, src, dest;
729 rtx p, link;
730 int all_adjacent = (succ ? (next_active_insn (insn) == succ
731 && next_active_insn (succ) == i3)
732 : next_active_insn (insn) == i3);
733
734 /* Can combine only if previous insn is a SET of a REG, a SUBREG or CC0.
735 or a PARALLEL consisting of such a SET and CLOBBERs.
736
737 If INSN has CLOBBER parallel parts, ignore them for our processing.
738 By definition, these happen during the execution of the insn. When it
739 is merged with another insn, all bets are off. If they are, in fact,
740 needed and aren't also supplied in I3, they may be added by
741 recog_for_combine. Otherwise, it won't match.
742
743 We can also ignore a SET whose SET_DEST is mentioned in a REG_UNUSED
744 note.
745
746 Get the source and destination of INSN. If more than one, can't
747 combine. */
748
749 if (GET_CODE (PATTERN (insn)) == SET)
750 set = PATTERN (insn);
751 else if (GET_CODE (PATTERN (insn)) == PARALLEL
752 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
753 {
754 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
755 {
756 rtx elt = XVECEXP (PATTERN (insn), 0, i);
757
758 switch (GET_CODE (elt))
759 {
760 /* We can ignore CLOBBERs. */
761 case CLOBBER:
762 break;
763
764 case SET:
765 /* Ignore SETs whose result isn't used but not those that
766 have side-effects. */
767 if (find_reg_note (insn, REG_UNUSED, SET_DEST (elt))
768 && ! side_effects_p (elt))
769 break;
770
771 /* If we have already found a SET, this is a second one and
772 so we cannot combine with this insn. */
773 if (set)
774 return 0;
775
776 set = elt;
777 break;
778
779 default:
780 /* Anything else means we can't combine. */
781 return 0;
782 }
783 }
784
785 if (set == 0
786 /* If SET_SRC is an ASM_OPERANDS we can't throw away these CLOBBERs,
787 so don't do anything with it. */
788 || GET_CODE (SET_SRC (set)) == ASM_OPERANDS)
789 return 0;
790 }
791 else
792 return 0;
793
794 if (set == 0)
795 return 0;
796
797 set = expand_field_assignment (set);
798 src = SET_SRC (set), dest = SET_DEST (set);
799
800 /* Don't eliminate a store in the stack pointer. */
801 if (dest == stack_pointer_rtx
802 /* Don't install a subreg involving two modes not tieable.
803 It can worsen register allocation, and can even make invalid reload
804 insns, since the reg inside may need to be copied from in the
805 outside mode, and that may be invalid if it is an fp reg copied in
5089e22e
RS
806 integer mode. As a special exception, we can allow this if
807 I3 is simply copying DEST, a REG, to CC0. */
230d793d 808 || (GET_CODE (src) == SUBREG
5089e22e
RS
809 && ! MODES_TIEABLE_P (GET_MODE (src), GET_MODE (SUBREG_REG (src)))
810#ifdef HAVE_cc0
811 && ! (GET_CODE (i3) == INSN && GET_CODE (PATTERN (i3)) == SET
812 && SET_DEST (PATTERN (i3)) == cc0_rtx
813 && GET_CODE (dest) == REG && dest == SET_SRC (PATTERN (i3)))
814#endif
815 )
230d793d
RS
816 /* If we couldn't eliminate a field assignment, we can't combine. */
817 || GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == STRICT_LOW_PART
818 /* Don't combine with an insn that sets a register to itself if it has
819 a REG_EQUAL note. This may be part of a REG_NO_CONFLICT sequence. */
5f4f0e22 820 || (rtx_equal_p (src, dest) && find_reg_note (insn, REG_EQUAL, NULL_RTX))
230d793d
RS
821 /* Can't merge a function call. */
822 || GET_CODE (src) == CALL
823 /* Don't substitute into an incremented register. */
824 || FIND_REG_INC_NOTE (i3, dest)
825 || (succ && FIND_REG_INC_NOTE (succ, dest))
826 /* Don't combine the end of a libcall into anything. */
5f4f0e22 827 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
230d793d
RS
828 /* Make sure that DEST is not used after SUCC but before I3. */
829 || (succ && ! all_adjacent
830 && reg_used_between_p (dest, succ, i3))
831 /* Make sure that the value that is to be substituted for the register
832 does not use any registers whose values alter in between. However,
833 If the insns are adjacent, a use can't cross a set even though we
834 think it might (this can happen for a sequence of insns each setting
835 the same destination; reg_last_set of that register might point to
a66a10c7
RS
836 a NOTE). Also, don't move a volatile asm or UNSPEC_VOLATILE across
837 any other insns. */
230d793d
RS
838 || (! all_adjacent
839 && (use_crosses_set_p (src, INSN_CUID (insn))
a66a10c7
RS
840 || (GET_CODE (src) == ASM_OPERANDS && MEM_VOLATILE_P (src))
841 || GET_CODE (src) == UNSPEC_VOLATILE))
230d793d
RS
842 /* If there is a REG_NO_CONFLICT note for DEST in I3 or SUCC, we get
843 better register allocation by not doing the combine. */
844 || find_reg_note (i3, REG_NO_CONFLICT, dest)
845 || (succ && find_reg_note (succ, REG_NO_CONFLICT, dest))
846 /* Don't combine across a CALL_INSN, because that would possibly
847 change whether the life span of some REGs crosses calls or not,
848 and it is a pain to update that information.
849 Exception: if source is a constant, moving it later can't hurt.
850 Accept that special case, because it helps -fforce-addr a lot. */
851 || (INSN_CUID (insn) < last_call_cuid && ! CONSTANT_P (src)))
852 return 0;
853
854 /* DEST must either be a REG or CC0. */
855 if (GET_CODE (dest) == REG)
856 {
857 /* If register alignment is being enforced for multi-word items in all
858 cases except for parameters, it is possible to have a register copy
859 insn referencing a hard register that is not allowed to contain the
860 mode being copied and which would not be valid as an operand of most
861 insns. Eliminate this problem by not combining with such an insn.
862
863 Also, on some machines we don't want to extend the life of a hard
864 register. */
865
866 if (GET_CODE (src) == REG
867 && ((REGNO (dest) < FIRST_PSEUDO_REGISTER
868 && ! HARD_REGNO_MODE_OK (REGNO (dest), GET_MODE (dest)))
869#ifdef SMALL_REGISTER_CLASSES
870 /* Don't extend the life of a hard register. */
871 || REGNO (src) < FIRST_PSEUDO_REGISTER
872#else
873 || (REGNO (src) < FIRST_PSEUDO_REGISTER
874 && ! HARD_REGNO_MODE_OK (REGNO (src), GET_MODE (src)))
875#endif
876 ))
877 return 0;
878 }
879 else if (GET_CODE (dest) != CC0)
880 return 0;
881
5f96750d
RS
882 /* Don't substitute for a register intended as a clobberable operand.
883 Similarly, don't substitute an expression containing a register that
884 will be clobbered in I3. */
230d793d
RS
885 if (GET_CODE (PATTERN (i3)) == PARALLEL)
886 for (i = XVECLEN (PATTERN (i3), 0) - 1; i >= 0; i--)
887 if (GET_CODE (XVECEXP (PATTERN (i3), 0, i)) == CLOBBER
5f96750d
RS
888 && (reg_overlap_mentioned_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0),
889 src)
890 || rtx_equal_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0), dest)))
230d793d
RS
891 return 0;
892
893 /* If INSN contains anything volatile, or is an `asm' (whether volatile
894 or not), reject, unless nothing volatile comes between it and I3,
895 with the exception of SUCC. */
896
897 if (GET_CODE (src) == ASM_OPERANDS || volatile_refs_p (src))
898 for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
899 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
900 && p != succ && volatile_refs_p (PATTERN (p)))
901 return 0;
902
903 /* If INSN or I2 contains an autoincrement or autodecrement,
904 make sure that register is not used between there and I3,
905 and not already used in I3 either.
906 Also insist that I3 not be a jump; if it were one
907 and the incremented register were spilled, we would lose. */
908
909#ifdef AUTO_INC_DEC
910 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
911 if (REG_NOTE_KIND (link) == REG_INC
912 && (GET_CODE (i3) == JUMP_INSN
913 || reg_used_between_p (XEXP (link, 0), insn, i3)
914 || reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i3))))
915 return 0;
916#endif
917
918#ifdef HAVE_cc0
919 /* Don't combine an insn that follows a CC0-setting insn.
920 An insn that uses CC0 must not be separated from the one that sets it.
921 We do, however, allow I2 to follow a CC0-setting insn if that insn
922 is passed as I1; in that case it will be deleted also.
923 We also allow combining in this case if all the insns are adjacent
924 because that would leave the two CC0 insns adjacent as well.
925 It would be more logical to test whether CC0 occurs inside I1 or I2,
926 but that would be much slower, and this ought to be equivalent. */
927
928 p = prev_nonnote_insn (insn);
929 if (p && p != pred && GET_CODE (p) == INSN && sets_cc0_p (PATTERN (p))
930 && ! all_adjacent)
931 return 0;
932#endif
933
934 /* If we get here, we have passed all the tests and the combination is
935 to be allowed. */
936
937 *pdest = dest;
938 *psrc = src;
939
940 return 1;
941}
942\f
943/* LOC is the location within I3 that contains its pattern or the component
944 of a PARALLEL of the pattern. We validate that it is valid for combining.
945
946 One problem is if I3 modifies its output, as opposed to replacing it
947 entirely, we can't allow the output to contain I2DEST or I1DEST as doing
948 so would produce an insn that is not equivalent to the original insns.
949
950 Consider:
951
952 (set (reg:DI 101) (reg:DI 100))
953 (set (subreg:SI (reg:DI 101) 0) <foo>)
954
955 This is NOT equivalent to:
956
957 (parallel [(set (subreg:SI (reg:DI 100) 0) <foo>)
958 (set (reg:DI 101) (reg:DI 100))])
959
960 Not only does this modify 100 (in which case it might still be valid
961 if 100 were dead in I2), it sets 101 to the ORIGINAL value of 100.
962
963 We can also run into a problem if I2 sets a register that I1
964 uses and I1 gets directly substituted into I3 (not via I2). In that
965 case, we would be getting the wrong value of I2DEST into I3, so we
966 must reject the combination. This case occurs when I2 and I1 both
967 feed into I3, rather than when I1 feeds into I2, which feeds into I3.
968 If I1_NOT_IN_SRC is non-zero, it means that finding I1 in the source
969 of a SET must prevent combination from occurring.
970
971 On machines where SMALL_REGISTER_CLASSES is defined, we don't combine
972 if the destination of a SET is a hard register.
973
974 Before doing the above check, we first try to expand a field assignment
975 into a set of logical operations.
976
977 If PI3_DEST_KILLED is non-zero, it is a pointer to a location in which
978 we place a register that is both set and used within I3. If more than one
979 such register is detected, we fail.
980
981 Return 1 if the combination is valid, zero otherwise. */
982
983static int
984combinable_i3pat (i3, loc, i2dest, i1dest, i1_not_in_src, pi3dest_killed)
985 rtx i3;
986 rtx *loc;
987 rtx i2dest;
988 rtx i1dest;
989 int i1_not_in_src;
990 rtx *pi3dest_killed;
991{
992 rtx x = *loc;
993
994 if (GET_CODE (x) == SET)
995 {
996 rtx set = expand_field_assignment (x);
997 rtx dest = SET_DEST (set);
998 rtx src = SET_SRC (set);
999 rtx inner_dest = dest, inner_src = src;
1000
1001 SUBST (*loc, set);
1002
1003 while (GET_CODE (inner_dest) == STRICT_LOW_PART
1004 || GET_CODE (inner_dest) == SUBREG
1005 || GET_CODE (inner_dest) == ZERO_EXTRACT)
1006 inner_dest = XEXP (inner_dest, 0);
1007
1008 /* We probably don't need this any more now that LIMIT_RELOAD_CLASS
1009 was added. */
1010#if 0
1011 while (GET_CODE (inner_src) == STRICT_LOW_PART
1012 || GET_CODE (inner_src) == SUBREG
1013 || GET_CODE (inner_src) == ZERO_EXTRACT)
1014 inner_src = XEXP (inner_src, 0);
1015
1016 /* If it is better that two different modes keep two different pseudos,
1017 avoid combining them. This avoids producing the following pattern
1018 on a 386:
1019 (set (subreg:SI (reg/v:QI 21) 0)
1020 (lshiftrt:SI (reg/v:SI 20)
1021 (const_int 24)))
1022 If that were made, reload could not handle the pair of
1023 reg 20/21, since it would try to get any GENERAL_REGS
1024 but some of them don't handle QImode. */
1025
1026 if (rtx_equal_p (inner_src, i2dest)
1027 && GET_CODE (inner_dest) == REG
1028 && ! MODES_TIEABLE_P (GET_MODE (i2dest), GET_MODE (inner_dest)))
1029 return 0;
1030#endif
1031
1032 /* Check for the case where I3 modifies its output, as
1033 discussed above. */
1034 if ((inner_dest != dest
1035 && (reg_overlap_mentioned_p (i2dest, inner_dest)
1036 || (i1dest && reg_overlap_mentioned_p (i1dest, inner_dest))))
3f508eca
RK
1037 /* This is the same test done in can_combine_p except that we
1038 allow a hard register with SMALL_REGISTER_CLASSES if SRC is a
1039 CALL operation. */
230d793d 1040 || (GET_CODE (inner_dest) == REG
dfbe1b2f 1041 && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER
3f508eca
RK
1042#ifdef SMALL_REGISTER_CLASSES
1043 && GET_CODE (src) != CALL
1044#else
dfbe1b2f
RK
1045 && ! HARD_REGNO_MODE_OK (REGNO (inner_dest),
1046 GET_MODE (inner_dest))
230d793d 1047#endif
dfbe1b2f
RK
1048 )
1049
230d793d
RS
1050 || (i1_not_in_src && reg_overlap_mentioned_p (i1dest, src)))
1051 return 0;
1052
1053 /* If DEST is used in I3, it is being killed in this insn,
36a9c2e9
JL
1054 so record that for later.
1055 Never add REG_DEAD notes for the FRAME_POINTER_REGNUM or the
1056 STACK_POINTER_REGNUM, since these are always considered to be
1057 live. Similarly for ARG_POINTER_REGNUM if it is fixed. */
230d793d 1058 if (pi3dest_killed && GET_CODE (dest) == REG
36a9c2e9
JL
1059 && reg_referenced_p (dest, PATTERN (i3))
1060 && REGNO (dest) != FRAME_POINTER_REGNUM
1061#if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
1062 && (REGNO (dest) != ARG_POINTER_REGNUM
1063 || ! fixed_regs [REGNO (dest)])
1064#endif
1065 && REGNO (dest) != STACK_POINTER_REGNUM)
230d793d
RS
1066 {
1067 if (*pi3dest_killed)
1068 return 0;
1069
1070 *pi3dest_killed = dest;
1071 }
1072 }
1073
1074 else if (GET_CODE (x) == PARALLEL)
1075 {
1076 int i;
1077
1078 for (i = 0; i < XVECLEN (x, 0); i++)
1079 if (! combinable_i3pat (i3, &XVECEXP (x, 0, i), i2dest, i1dest,
1080 i1_not_in_src, pi3dest_killed))
1081 return 0;
1082 }
1083
1084 return 1;
1085}
1086\f
1087/* Try to combine the insns I1 and I2 into I3.
1088 Here I1 and I2 appear earlier than I3.
1089 I1 can be zero; then we combine just I2 into I3.
1090
1091 It we are combining three insns and the resulting insn is not recognized,
1092 try splitting it into two insns. If that happens, I2 and I3 are retained
1093 and I1 is pseudo-deleted by turning it into a NOTE. Otherwise, I1 and I2
1094 are pseudo-deleted.
1095
1096 If we created two insns, return I2; otherwise return I3.
1097 Return 0 if the combination does not work. Then nothing is changed. */
1098
1099static rtx
1100try_combine (i3, i2, i1)
1101 register rtx i3, i2, i1;
1102{
1103 /* New patterns for I3 and I3, respectively. */
1104 rtx newpat, newi2pat = 0;
1105 /* Indicates need to preserve SET in I1 or I2 in I3 if it is not dead. */
1106 int added_sets_1, added_sets_2;
1107 /* Total number of SETs to put into I3. */
1108 int total_sets;
1109 /* Nonzero is I2's body now appears in I3. */
1110 int i2_is_used;
1111 /* INSN_CODEs for new I3, new I2, and user of condition code. */
1112 int insn_code_number, i2_code_number, other_code_number;
1113 /* Contains I3 if the destination of I3 is used in its source, which means
1114 that the old life of I3 is being killed. If that usage is placed into
1115 I2 and not in I3, a REG_DEAD note must be made. */
1116 rtx i3dest_killed = 0;
1117 /* SET_DEST and SET_SRC of I2 and I1. */
1118 rtx i2dest, i2src, i1dest = 0, i1src = 0;
1119 /* PATTERN (I2), or a copy of it in certain cases. */
1120 rtx i2pat;
1121 /* Indicates if I2DEST or I1DEST is in I2SRC or I1_SRC. */
1122 int i2dest_in_i2src, i1dest_in_i1src = 0, i2dest_in_i1src = 0;
1123 int i1_feeds_i3 = 0;
1124 /* Notes that must be added to REG_NOTES in I3 and I2. */
1125 rtx new_i3_notes, new_i2_notes;
1126
1127 int maxreg;
1128 rtx temp;
1129 register rtx link;
1130 int i;
1131
1132 /* If any of I1, I2, and I3 isn't really an insn, we can't do anything.
1133 This can occur when flow deletes an insn that it has merged into an
1134 auto-increment address. We also can't do anything if I3 has a
1135 REG_LIBCALL note since we don't want to disrupt the contiguity of a
1136 libcall. */
1137
1138 if (GET_RTX_CLASS (GET_CODE (i3)) != 'i'
1139 || GET_RTX_CLASS (GET_CODE (i2)) != 'i'
1140 || (i1 && GET_RTX_CLASS (GET_CODE (i1)) != 'i')
5f4f0e22 1141 || find_reg_note (i3, REG_LIBCALL, NULL_RTX))
230d793d
RS
1142 return 0;
1143
1144 combine_attempts++;
1145
1146 undobuf.num_undo = previous_num_undos = 0;
1147 undobuf.other_insn = 0;
1148
1149 /* Save the current high-water-mark so we can free storage if we didn't
1150 accept this combination. */
1151 undobuf.storage = (char *) oballoc (0);
1152
1153 /* If I1 and I2 both feed I3, they can be in any order. To simplify the
1154 code below, set I1 to be the earlier of the two insns. */
1155 if (i1 && INSN_CUID (i1) > INSN_CUID (i2))
1156 temp = i1, i1 = i2, i2 = temp;
1157
1158 /* First check for one important special-case that the code below will
1159 not handle. Namely, the case where I1 is zero, I2 has multiple sets,
1160 and I3 is a SET whose SET_SRC is a SET_DEST in I2. In that case,
1161 we may be able to replace that destination with the destination of I3.
1162 This occurs in the common code where we compute both a quotient and
1163 remainder into a structure, in which case we want to do the computation
1164 directly into the structure to avoid register-register copies.
1165
1166 We make very conservative checks below and only try to handle the
1167 most common cases of this. For example, we only handle the case
1168 where I2 and I3 are adjacent to avoid making difficult register
1169 usage tests. */
1170
1171 if (i1 == 0 && GET_CODE (i3) == INSN && GET_CODE (PATTERN (i3)) == SET
1172 && GET_CODE (SET_SRC (PATTERN (i3))) == REG
1173 && REGNO (SET_SRC (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER
1174#ifdef SMALL_REGISTER_CLASSES
1175 && (GET_CODE (SET_DEST (PATTERN (i3))) != REG
1176 || REGNO (SET_DEST (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER)
1177#endif
1178 && find_reg_note (i3, REG_DEAD, SET_SRC (PATTERN (i3)))
1179 && GET_CODE (PATTERN (i2)) == PARALLEL
1180 && ! side_effects_p (SET_DEST (PATTERN (i3)))
5089e22e
RS
1181 /* If the dest of I3 is a ZERO_EXTRACT or STRICT_LOW_PART, the code
1182 below would need to check what is inside (and reg_overlap_mentioned_p
1183 doesn't support those codes anyway). Don't allow those destinations;
1184 the resulting insn isn't likely to be recognized anyway. */
1185 && GET_CODE (SET_DEST (PATTERN (i3))) != ZERO_EXTRACT
1186 && GET_CODE (SET_DEST (PATTERN (i3))) != STRICT_LOW_PART
230d793d
RS
1187 && ! reg_overlap_mentioned_p (SET_SRC (PATTERN (i3)),
1188 SET_DEST (PATTERN (i3)))
1189 && next_real_insn (i2) == i3)
5089e22e
RS
1190 {
1191 rtx p2 = PATTERN (i2);
1192
1193 /* Make sure that the destination of I3,
1194 which we are going to substitute into one output of I2,
1195 is not used within another output of I2. We must avoid making this:
1196 (parallel [(set (mem (reg 69)) ...)
1197 (set (reg 69) ...)])
1198 which is not well-defined as to order of actions.
1199 (Besides, reload can't handle output reloads for this.)
1200
1201 The problem can also happen if the dest of I3 is a memory ref,
1202 if another dest in I2 is an indirect memory ref. */
1203 for (i = 0; i < XVECLEN (p2, 0); i++)
1204 if (GET_CODE (XVECEXP (p2, 0, i)) == SET
1205 && reg_overlap_mentioned_p (SET_DEST (PATTERN (i3)),
1206 SET_DEST (XVECEXP (p2, 0, i))))
1207 break;
230d793d 1208
5089e22e
RS
1209 if (i == XVECLEN (p2, 0))
1210 for (i = 0; i < XVECLEN (p2, 0); i++)
1211 if (SET_DEST (XVECEXP (p2, 0, i)) == SET_SRC (PATTERN (i3)))
1212 {
1213 combine_merges++;
230d793d 1214
5089e22e
RS
1215 subst_insn = i3;
1216 subst_low_cuid = INSN_CUID (i2);
230d793d 1217
5089e22e
RS
1218 added_sets_2 = 0;
1219 i2dest = SET_SRC (PATTERN (i3));
230d793d 1220
5089e22e
RS
1221 /* Replace the dest in I2 with our dest and make the resulting
1222 insn the new pattern for I3. Then skip to where we
1223 validate the pattern. Everything was set up above. */
1224 SUBST (SET_DEST (XVECEXP (p2, 0, i)),
1225 SET_DEST (PATTERN (i3)));
1226
1227 newpat = p2;
1228 goto validate_replacement;
1229 }
1230 }
230d793d
RS
1231
1232#ifndef HAVE_cc0
1233 /* If we have no I1 and I2 looks like:
1234 (parallel [(set (reg:CC X) (compare:CC OP (const_int 0)))
1235 (set Y OP)])
1236 make up a dummy I1 that is
1237 (set Y OP)
1238 and change I2 to be
1239 (set (reg:CC X) (compare:CC Y (const_int 0)))
1240
1241 (We can ignore any trailing CLOBBERs.)
1242
1243 This undoes a previous combination and allows us to match a branch-and-
1244 decrement insn. */
1245
1246 if (i1 == 0 && GET_CODE (PATTERN (i2)) == PARALLEL
1247 && XVECLEN (PATTERN (i2), 0) >= 2
1248 && GET_CODE (XVECEXP (PATTERN (i2), 0, 0)) == SET
1249 && (GET_MODE_CLASS (GET_MODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 0))))
1250 == MODE_CC)
1251 && GET_CODE (SET_SRC (XVECEXP (PATTERN (i2), 0, 0))) == COMPARE
1252 && XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 1) == const0_rtx
1253 && GET_CODE (XVECEXP (PATTERN (i2), 0, 1)) == SET
1254 && GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 1))) == REG
1255 && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 0),
1256 SET_SRC (XVECEXP (PATTERN (i2), 0, 1))))
1257 {
1258 for (i = XVECLEN (PATTERN (i2), 0) - 1; i >= 2; i--)
1259 if (GET_CODE (XVECEXP (PATTERN (i2), 0, i)) != CLOBBER)
1260 break;
1261
1262 if (i == 1)
1263 {
1264 /* We make I1 with the same INSN_UID as I2. This gives it
1265 the same INSN_CUID for value tracking. Our fake I1 will
1266 never appear in the insn stream so giving it the same INSN_UID
1267 as I2 will not cause a problem. */
1268
1269 i1 = gen_rtx (INSN, VOIDmode, INSN_UID (i2), 0, i2,
1270 XVECEXP (PATTERN (i2), 0, 1), -1, 0, 0);
1271
1272 SUBST (PATTERN (i2), XVECEXP (PATTERN (i2), 0, 0));
1273 SUBST (XEXP (SET_SRC (PATTERN (i2)), 0),
1274 SET_DEST (PATTERN (i1)));
1275 }
1276 }
1277#endif
1278
1279 /* Verify that I2 and I1 are valid for combining. */
5f4f0e22
CH
1280 if (! can_combine_p (i2, i3, i1, NULL_RTX, &i2dest, &i2src)
1281 || (i1 && ! can_combine_p (i1, i3, NULL_RTX, i2, &i1dest, &i1src)))
230d793d
RS
1282 {
1283 undo_all ();
1284 return 0;
1285 }
1286
1287 /* Record whether I2DEST is used in I2SRC and similarly for the other
1288 cases. Knowing this will help in register status updating below. */
1289 i2dest_in_i2src = reg_overlap_mentioned_p (i2dest, i2src);
1290 i1dest_in_i1src = i1 && reg_overlap_mentioned_p (i1dest, i1src);
1291 i2dest_in_i1src = i1 && reg_overlap_mentioned_p (i2dest, i1src);
1292
916f14f1 1293 /* See if I1 directly feeds into I3. It does if I1DEST is not used
230d793d
RS
1294 in I2SRC. */
1295 i1_feeds_i3 = i1 && ! reg_overlap_mentioned_p (i1dest, i2src);
1296
1297 /* Ensure that I3's pattern can be the destination of combines. */
1298 if (! combinable_i3pat (i3, &PATTERN (i3), i2dest, i1dest,
1299 i1 && i2dest_in_i1src && i1_feeds_i3,
1300 &i3dest_killed))
1301 {
1302 undo_all ();
1303 return 0;
1304 }
1305
1306 /* If I3 has an inc, then give up if I1 or I2 uses the reg that is inc'd.
1307 We used to do this EXCEPT in one case: I3 has a post-inc in an
1308 output operand. However, that exception can give rise to insns like
1309 mov r3,(r3)+
1310 which is a famous insn on the PDP-11 where the value of r3 used as the
5089e22e 1311 source was model-dependent. Avoid this sort of thing. */
230d793d
RS
1312
1313#if 0
1314 if (!(GET_CODE (PATTERN (i3)) == SET
1315 && GET_CODE (SET_SRC (PATTERN (i3))) == REG
1316 && GET_CODE (SET_DEST (PATTERN (i3))) == MEM
1317 && (GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_INC
1318 || GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_DEC)))
1319 /* It's not the exception. */
1320#endif
1321#ifdef AUTO_INC_DEC
1322 for (link = REG_NOTES (i3); link; link = XEXP (link, 1))
1323 if (REG_NOTE_KIND (link) == REG_INC
1324 && (reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i2))
1325 || (i1 != 0
1326 && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i1)))))
1327 {
1328 undo_all ();
1329 return 0;
1330 }
1331#endif
1332
1333 /* See if the SETs in I1 or I2 need to be kept around in the merged
1334 instruction: whenever the value set there is still needed past I3.
1335 For the SETs in I2, this is easy: we see if I2DEST dies or is set in I3.
1336
1337 For the SET in I1, we have two cases: If I1 and I2 independently
1338 feed into I3, the set in I1 needs to be kept around if I1DEST dies
1339 or is set in I3. Otherwise (if I1 feeds I2 which feeds I3), the set
1340 in I1 needs to be kept around unless I1DEST dies or is set in either
1341 I2 or I3. We can distinguish these cases by seeing if I2SRC mentions
1342 I1DEST. If so, we know I1 feeds into I2. */
1343
1344 added_sets_2 = ! dead_or_set_p (i3, i2dest);
1345
1346 added_sets_1
1347 = i1 && ! (i1_feeds_i3 ? dead_or_set_p (i3, i1dest)
1348 : (dead_or_set_p (i3, i1dest) || dead_or_set_p (i2, i1dest)));
1349
1350 /* If the set in I2 needs to be kept around, we must make a copy of
1351 PATTERN (I2), so that when we substitute I1SRC for I1DEST in
5089e22e 1352 PATTERN (I2), we are only substituting for the original I1DEST, not into
230d793d
RS
1353 an already-substituted copy. This also prevents making self-referential
1354 rtx. If I2 is a PARALLEL, we just need the piece that assigns I2SRC to
1355 I2DEST. */
1356
1357 i2pat = (GET_CODE (PATTERN (i2)) == PARALLEL
1358 ? gen_rtx (SET, VOIDmode, i2dest, i2src)
1359 : PATTERN (i2));
1360
1361 if (added_sets_2)
1362 i2pat = copy_rtx (i2pat);
1363
1364 combine_merges++;
1365
1366 /* Substitute in the latest insn for the regs set by the earlier ones. */
1367
1368 maxreg = max_reg_num ();
1369
1370 subst_insn = i3;
230d793d
RS
1371
1372 /* It is possible that the source of I2 or I1 may be performing an
1373 unneeded operation, such as a ZERO_EXTEND of something that is known
1374 to have the high part zero. Handle that case by letting subst look at
1375 the innermost one of them.
1376
1377 Another way to do this would be to have a function that tries to
1378 simplify a single insn instead of merging two or more insns. We don't
1379 do this because of the potential of infinite loops and because
1380 of the potential extra memory required. However, doing it the way
1381 we are is a bit of a kludge and doesn't catch all cases.
1382
1383 But only do this if -fexpensive-optimizations since it slows things down
1384 and doesn't usually win. */
1385
1386 if (flag_expensive_optimizations)
1387 {
1388 /* Pass pc_rtx so no substitutions are done, just simplifications.
1389 The cases that we are interested in here do not involve the few
1390 cases were is_replaced is checked. */
1391 if (i1)
d0ab8cd3
RK
1392 {
1393 subst_low_cuid = INSN_CUID (i1);
1394 i1src = subst (i1src, pc_rtx, pc_rtx, 0, 0);
1395 }
230d793d 1396 else
d0ab8cd3
RK
1397 {
1398 subst_low_cuid = INSN_CUID (i2);
1399 i2src = subst (i2src, pc_rtx, pc_rtx, 0, 0);
1400 }
230d793d
RS
1401
1402 previous_num_undos = undobuf.num_undo;
1403 }
1404
1405#ifndef HAVE_cc0
1406 /* Many machines that don't use CC0 have insns that can both perform an
1407 arithmetic operation and set the condition code. These operations will
1408 be represented as a PARALLEL with the first element of the vector
1409 being a COMPARE of an arithmetic operation with the constant zero.
1410 The second element of the vector will set some pseudo to the result
1411 of the same arithmetic operation. If we simplify the COMPARE, we won't
1412 match such a pattern and so will generate an extra insn. Here we test
1413 for this case, where both the comparison and the operation result are
1414 needed, and make the PARALLEL by just replacing I2DEST in I3SRC with
1415 I2SRC. Later we will make the PARALLEL that contains I2. */
1416
1417 if (i1 == 0 && added_sets_2 && GET_CODE (PATTERN (i3)) == SET
1418 && GET_CODE (SET_SRC (PATTERN (i3))) == COMPARE
1419 && XEXP (SET_SRC (PATTERN (i3)), 1) == const0_rtx
1420 && rtx_equal_p (XEXP (SET_SRC (PATTERN (i3)), 0), i2dest))
1421 {
1422 rtx *cc_use;
1423 enum machine_mode compare_mode;
1424
1425 newpat = PATTERN (i3);
1426 SUBST (XEXP (SET_SRC (newpat), 0), i2src);
1427
1428 i2_is_used = 1;
1429
1430#ifdef EXTRA_CC_MODES
1431 /* See if a COMPARE with the operand we substituted in should be done
1432 with the mode that is currently being used. If not, do the same
1433 processing we do in `subst' for a SET; namely, if the destination
1434 is used only once, try to replace it with a register of the proper
1435 mode and also replace the COMPARE. */
1436 if (undobuf.other_insn == 0
1437 && (cc_use = find_single_use (SET_DEST (newpat), i3,
1438 &undobuf.other_insn))
77fa0940
RK
1439 && ((compare_mode = SELECT_CC_MODE (GET_CODE (*cc_use),
1440 i2src, const0_rtx))
230d793d
RS
1441 != GET_MODE (SET_DEST (newpat))))
1442 {
1443 int regno = REGNO (SET_DEST (newpat));
1444 rtx new_dest = gen_rtx (REG, compare_mode, regno);
1445
1446 if (regno < FIRST_PSEUDO_REGISTER
1447 || (reg_n_sets[regno] == 1 && ! added_sets_2
1448 && ! REG_USERVAR_P (SET_DEST (newpat))))
1449 {
1450 if (regno >= FIRST_PSEUDO_REGISTER)
1451 SUBST (regno_reg_rtx[regno], new_dest);
1452
1453 SUBST (SET_DEST (newpat), new_dest);
1454 SUBST (XEXP (*cc_use, 0), new_dest);
1455 SUBST (SET_SRC (newpat),
1456 gen_rtx_combine (COMPARE, compare_mode,
1457 i2src, const0_rtx));
1458 }
1459 else
1460 undobuf.other_insn = 0;
1461 }
1462#endif
1463 }
1464 else
1465#endif
1466 {
1467 n_occurrences = 0; /* `subst' counts here */
1468
1469 /* If I1 feeds into I2 (not into I3) and I1DEST is in I1SRC, we
1470 need to make a unique copy of I2SRC each time we substitute it
1471 to avoid self-referential rtl. */
1472
d0ab8cd3 1473 subst_low_cuid = INSN_CUID (i2);
230d793d
RS
1474 newpat = subst (PATTERN (i3), i2dest, i2src, 0,
1475 ! i1_feeds_i3 && i1dest_in_i1src);
1476 previous_num_undos = undobuf.num_undo;
1477
1478 /* Record whether i2's body now appears within i3's body. */
1479 i2_is_used = n_occurrences;
1480 }
1481
1482 /* If we already got a failure, don't try to do more. Otherwise,
1483 try to substitute in I1 if we have it. */
1484
1485 if (i1 && GET_CODE (newpat) != CLOBBER)
1486 {
1487 /* Before we can do this substitution, we must redo the test done
1488 above (see detailed comments there) that ensures that I1DEST
1489 isn't mentioned in any SETs in NEWPAT that are field assignments. */
1490
5f4f0e22
CH
1491 if (! combinable_i3pat (NULL_RTX, &newpat, i1dest, NULL_RTX,
1492 0, NULL_PTR))
230d793d
RS
1493 {
1494 undo_all ();
1495 return 0;
1496 }
1497
1498 n_occurrences = 0;
d0ab8cd3 1499 subst_low_cuid = INSN_CUID (i1);
230d793d
RS
1500 newpat = subst (newpat, i1dest, i1src, 0, 0);
1501 previous_num_undos = undobuf.num_undo;
1502 }
1503
916f14f1
RK
1504 /* Fail if an autoincrement side-effect has been duplicated. Be careful
1505 to count all the ways that I2SRC and I1SRC can be used. */
5f4f0e22 1506 if ((FIND_REG_INC_NOTE (i2, NULL_RTX) != 0
916f14f1 1507 && i2_is_used + added_sets_2 > 1)
5f4f0e22 1508 || (i1 != 0 && FIND_REG_INC_NOTE (i1, NULL_RTX) != 0
916f14f1
RK
1509 && (n_occurrences + added_sets_1 + (added_sets_2 && ! i1_feeds_i3)
1510 > 1))
230d793d
RS
1511 /* Fail if we tried to make a new register (we used to abort, but there's
1512 really no reason to). */
1513 || max_reg_num () != maxreg
1514 /* Fail if we couldn't do something and have a CLOBBER. */
1515 || GET_CODE (newpat) == CLOBBER)
1516 {
1517 undo_all ();
1518 return 0;
1519 }
1520
1521 /* If the actions of the earlier insns must be kept
1522 in addition to substituting them into the latest one,
1523 we must make a new PARALLEL for the latest insn
1524 to hold additional the SETs. */
1525
1526 if (added_sets_1 || added_sets_2)
1527 {
1528 combine_extras++;
1529
1530 if (GET_CODE (newpat) == PARALLEL)
1531 {
1532 rtvec old = XVEC (newpat, 0);
1533 total_sets = XVECLEN (newpat, 0) + added_sets_1 + added_sets_2;
1534 newpat = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (total_sets));
1535 bcopy (&old->elem[0], &XVECEXP (newpat, 0, 0),
1536 sizeof (old->elem[0]) * old->num_elem);
1537 }
1538 else
1539 {
1540 rtx old = newpat;
1541 total_sets = 1 + added_sets_1 + added_sets_2;
1542 newpat = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (total_sets));
1543 XVECEXP (newpat, 0, 0) = old;
1544 }
1545
1546 if (added_sets_1)
1547 XVECEXP (newpat, 0, --total_sets)
1548 = (GET_CODE (PATTERN (i1)) == PARALLEL
1549 ? gen_rtx (SET, VOIDmode, i1dest, i1src) : PATTERN (i1));
1550
1551 if (added_sets_2)
1552 {
1553 /* If there is no I1, use I2's body as is. We used to also not do
1554 the subst call below if I2 was substituted into I3,
1555 but that could lose a simplification. */
1556 if (i1 == 0)
1557 XVECEXP (newpat, 0, --total_sets) = i2pat;
1558 else
1559 /* See comment where i2pat is assigned. */
1560 XVECEXP (newpat, 0, --total_sets)
1561 = subst (i2pat, i1dest, i1src, 0, 0);
1562 }
1563 }
1564
1565 /* We come here when we are replacing a destination in I2 with the
1566 destination of I3. */
1567 validate_replacement:
1568
1569 /* Is the result of combination a valid instruction? */
1570 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1571
1572 /* If the result isn't valid, see if it is a PARALLEL of two SETs where
1573 the second SET's destination is a register that is unused. In that case,
1574 we just need the first SET. This can occur when simplifying a divmod
1575 insn. We *must* test for this case here because the code below that
1576 splits two independent SETs doesn't handle this case correctly when it
1577 updates the register status. Also check the case where the first
1578 SET's destination is unused. That would not cause incorrect code, but
1579 does cause an unneeded insn to remain. */
1580
1581 if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
1582 && XVECLEN (newpat, 0) == 2
1583 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1584 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1585 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == REG
1586 && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 1)))
1587 && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 1)))
1588 && asm_noperands (newpat) < 0)
1589 {
1590 newpat = XVECEXP (newpat, 0, 0);
1591 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1592 }
1593
1594 else if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
1595 && XVECLEN (newpat, 0) == 2
1596 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1597 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1598 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) == REG
1599 && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 0)))
1600 && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 0)))
1601 && asm_noperands (newpat) < 0)
1602 {
1603 newpat = XVECEXP (newpat, 0, 1);
1604 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1605 }
1606
d0ab8cd3
RK
1607 /* See if this is an XOR. If so, perhaps the problem is that the
1608 constant is out of range. Replace it with a complemented XOR with
1609 a complemented constant; it might be in range. */
1610
1611 else if (insn_code_number < 0 && GET_CODE (newpat) == SET
1612 && GET_CODE (SET_SRC (newpat)) == XOR
1613 && GET_CODE (XEXP (SET_SRC (newpat), 1)) == CONST_INT
1614 && ((temp = simplify_unary_operation (NOT,
1615 GET_MODE (SET_SRC (newpat)),
1616 XEXP (SET_SRC (newpat), 1),
1617 GET_MODE (SET_SRC (newpat))))
1618 != 0))
1619 {
1620 enum machine_mode i_mode = GET_MODE (SET_SRC (newpat));
1621 rtx pat
1622 = gen_rtx_combine (SET, VOIDmode, SET_DEST (newpat),
1623 gen_unary (NOT, i_mode,
1624 gen_binary (XOR, i_mode,
1625 XEXP (SET_SRC (newpat), 0),
1626 temp)));
1627
1628 insn_code_number = recog_for_combine (&pat, i3, &new_i3_notes);
1629 if (insn_code_number >= 0)
1630 newpat = pat;
1631 }
1632
230d793d
RS
1633 /* If we were combining three insns and the result is a simple SET
1634 with no ASM_OPERANDS that wasn't recognized, try to split it into two
916f14f1
RK
1635 insns. There are two ways to do this. It can be split using a
1636 machine-specific method (like when you have an addition of a large
1637 constant) or by combine in the function find_split_point. */
1638
230d793d
RS
1639 if (i1 && insn_code_number < 0 && GET_CODE (newpat) == SET
1640 && asm_noperands (newpat) < 0)
1641 {
916f14f1 1642 rtx m_split, *split;
42495ca0 1643 rtx ni2dest = i2dest;
916f14f1
RK
1644
1645 /* See if the MD file can split NEWPAT. If it can't, see if letting it
42495ca0
RK
1646 use I2DEST as a scratch register will help. In the latter case,
1647 convert I2DEST to the mode of the source of NEWPAT if we can. */
916f14f1
RK
1648
1649 m_split = split_insns (newpat, i3);
a70c61d9
JW
1650
1651 /* We can only use I2DEST as a scratch reg if it doesn't overlap any
1652 inputs of NEWPAT. */
1653
1654 /* ??? If I2DEST is not safe, and I1DEST exists, then it would be
1655 possible to try that as a scratch reg. This would require adding
1656 more code to make it work though. */
1657
1658 if (m_split == 0 && ! reg_overlap_mentioned_p (ni2dest, newpat))
42495ca0
RK
1659 {
1660 /* If I2DEST is a hard register or the only use of a pseudo,
1661 we can change its mode. */
1662 if (GET_MODE (SET_DEST (newpat)) != GET_MODE (i2dest)
02f4ada4 1663 && GET_MODE (SET_DEST (newpat)) != VOIDmode
60654f77 1664 && GET_CODE (i2dest) == REG
42495ca0
RK
1665 && (REGNO (i2dest) < FIRST_PSEUDO_REGISTER
1666 || (reg_n_sets[REGNO (i2dest)] == 1 && ! added_sets_2
1667 && ! REG_USERVAR_P (i2dest))))
1668 ni2dest = gen_rtx (REG, GET_MODE (SET_DEST (newpat)),
1669 REGNO (i2dest));
1670
1671 m_split = split_insns (gen_rtx (PARALLEL, VOIDmode,
1672 gen_rtvec (2, newpat,
1673 gen_rtx (CLOBBER,
1674 VOIDmode,
1675 ni2dest))),
1676 i3);
1677 }
916f14f1
RK
1678
1679 if (m_split && GET_CODE (m_split) == SEQUENCE
3f508eca
RK
1680 && XVECLEN (m_split, 0) == 2
1681 && (next_real_insn (i2) == i3
1682 || ! use_crosses_set_p (PATTERN (XVECEXP (m_split, 0, 0)),
1683 INSN_CUID (i2))))
916f14f1 1684 {
1a26b032 1685 rtx i2set, i3set;
d0ab8cd3 1686 rtx newi3pat = PATTERN (XVECEXP (m_split, 0, 1));
916f14f1 1687 newi2pat = PATTERN (XVECEXP (m_split, 0, 0));
916f14f1 1688
e4ba89be
RK
1689 i3set = single_set (XVECEXP (m_split, 0, 1));
1690 i2set = single_set (XVECEXP (m_split, 0, 0));
1a26b032 1691
42495ca0
RK
1692 /* In case we changed the mode of I2DEST, replace it in the
1693 pseudo-register table here. We can't do it above in case this
1694 code doesn't get executed and we do a split the other way. */
1695
1696 if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
1697 SUBST (regno_reg_rtx[REGNO (i2dest)], ni2dest);
1698
916f14f1 1699 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
1a26b032
RK
1700
1701 /* If I2 or I3 has multiple SETs, we won't know how to track
1702 register status, so don't use these insns. */
1703
1704 if (i2_code_number >= 0 && i2set && i3set)
8888fada
RK
1705 insn_code_number = recog_for_combine (&newi3pat, i3,
1706 &new_i3_notes);
c767f54b 1707
d0ab8cd3
RK
1708 if (insn_code_number >= 0)
1709 newpat = newi3pat;
1710
c767f54b 1711 /* It is possible that both insns now set the destination of I3.
22609cbf 1712 If so, we must show an extra use of it. */
c767f54b 1713
1a26b032
RK
1714 if (insn_code_number >= 0 && GET_CODE (SET_DEST (i3set)) == REG
1715 && GET_CODE (SET_DEST (i2set)) == REG
1716 && REGNO (SET_DEST (i3set)) == REGNO (SET_DEST (i2set)))
22609cbf 1717 reg_n_sets[REGNO (SET_DEST (i2set))]++;
916f14f1 1718 }
230d793d
RS
1719
1720 /* If we can split it and use I2DEST, go ahead and see if that
1721 helps things be recognized. Verify that none of the registers
1722 are set between I2 and I3. */
d0ab8cd3 1723 if (insn_code_number < 0 && (split = find_split_point (&newpat, i3)) != 0
230d793d
RS
1724#ifdef HAVE_cc0
1725 && GET_CODE (i2dest) == REG
1726#endif
1727 /* We need I2DEST in the proper mode. If it is a hard register
1728 or the only use of a pseudo, we can change its mode. */
1729 && (GET_MODE (*split) == GET_MODE (i2dest)
1730 || GET_MODE (*split) == VOIDmode
1731 || REGNO (i2dest) < FIRST_PSEUDO_REGISTER
1732 || (reg_n_sets[REGNO (i2dest)] == 1 && ! added_sets_2
1733 && ! REG_USERVAR_P (i2dest)))
1734 && (next_real_insn (i2) == i3
1735 || ! use_crosses_set_p (*split, INSN_CUID (i2)))
1736 /* We can't overwrite I2DEST if its value is still used by
1737 NEWPAT. */
1738 && ! reg_referenced_p (i2dest, newpat))
1739 {
1740 rtx newdest = i2dest;
1741
1742 /* Get NEWDEST as a register in the proper mode. We have already
1743 validated that we can do this. */
1744 if (GET_MODE (i2dest) != GET_MODE (*split)
1745 && GET_MODE (*split) != VOIDmode)
1746 {
1747 newdest = gen_rtx (REG, GET_MODE (*split), REGNO (i2dest));
1748
1749 if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
1750 SUBST (regno_reg_rtx[REGNO (i2dest)], newdest);
1751 }
1752
1753 /* If *SPLIT is a (mult FOO (const_int pow2)), convert it to
1754 an ASHIFT. This can occur if it was inside a PLUS and hence
1755 appeared to be a memory address. This is a kludge. */
1756 if (GET_CODE (*split) == MULT
1757 && GET_CODE (XEXP (*split, 1)) == CONST_INT
1758 && (i = exact_log2 (INTVAL (XEXP (*split, 1)))) >= 0)
1759 SUBST (*split, gen_rtx_combine (ASHIFT, GET_MODE (*split),
5f4f0e22 1760 XEXP (*split, 0), GEN_INT (i)));
230d793d
RS
1761
1762#ifdef INSN_SCHEDULING
1763 /* If *SPLIT is a paradoxical SUBREG, when we split it, it should
1764 be written as a ZERO_EXTEND. */
1765 if (GET_CODE (*split) == SUBREG
1766 && GET_CODE (SUBREG_REG (*split)) == MEM)
1767 SUBST (*split, gen_rtx_combine (ZERO_EXTEND, GET_MODE (*split),
1768 XEXP (*split, 0)));
1769#endif
1770
1771 newi2pat = gen_rtx_combine (SET, VOIDmode, newdest, *split);
1772 SUBST (*split, newdest);
1773 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
1774 if (i2_code_number >= 0)
1775 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1776 }
1777 }
1778
1779 /* Check for a case where we loaded from memory in a narrow mode and
1780 then sign extended it, but we need both registers. In that case,
1781 we have a PARALLEL with both loads from the same memory location.
1782 We can split this into a load from memory followed by a register-register
1783 copy. This saves at least one insn, more if register allocation can
1784 eliminate the copy. */
1785
1786 else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
1787 && GET_CODE (newpat) == PARALLEL
1788 && XVECLEN (newpat, 0) == 2
1789 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1790 && GET_CODE (SET_SRC (XVECEXP (newpat, 0, 0))) == SIGN_EXTEND
1791 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1792 && rtx_equal_p (SET_SRC (XVECEXP (newpat, 0, 1)),
1793 XEXP (SET_SRC (XVECEXP (newpat, 0, 0)), 0))
1794 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
1795 INSN_CUID (i2))
1796 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
1797 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
1798 && ! reg_overlap_mentioned_p (SET_DEST (XVECEXP (newpat, 0, 1)),
1799 SET_SRC (XVECEXP (newpat, 0, 1)))
1800 && ! find_reg_note (i3, REG_UNUSED,
1801 SET_DEST (XVECEXP (newpat, 0, 0))))
1802 {
472fbdd1
RK
1803 rtx ni2dest;
1804
230d793d 1805 newi2pat = XVECEXP (newpat, 0, 0);
472fbdd1 1806 ni2dest = SET_DEST (XVECEXP (newpat, 0, 0));
230d793d
RS
1807 newpat = XVECEXP (newpat, 0, 1);
1808 SUBST (SET_SRC (newpat),
472fbdd1 1809 gen_lowpart_for_combine (GET_MODE (SET_SRC (newpat)), ni2dest));
230d793d
RS
1810 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
1811 if (i2_code_number >= 0)
1812 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
5089e22e
RS
1813
1814 if (insn_code_number >= 0)
1815 {
1816 rtx insn;
1817 rtx link;
1818
1819 /* If we will be able to accept this, we have made a change to the
1820 destination of I3. This can invalidate a LOG_LINKS pointing
1821 to I3. No other part of combine.c makes such a transformation.
1822
1823 The new I3 will have a destination that was previously the
1824 destination of I1 or I2 and which was used in i2 or I3. Call
1825 distribute_links to make a LOG_LINK from the next use of
1826 that destination. */
1827
1828 PATTERN (i3) = newpat;
5f4f0e22 1829 distribute_links (gen_rtx (INSN_LIST, VOIDmode, i3, NULL_RTX));
5089e22e
RS
1830
1831 /* I3 now uses what used to be its destination and which is
1832 now I2's destination. That means we need a LOG_LINK from
1833 I3 to I2. But we used to have one, so we still will.
1834
1835 However, some later insn might be using I2's dest and have
1836 a LOG_LINK pointing at I3. We must remove this link.
1837 The simplest way to remove the link is to point it at I1,
1838 which we know will be a NOTE. */
1839
1840 for (insn = NEXT_INSN (i3);
1841 insn && GET_CODE (insn) != CODE_LABEL
1842 && GET_CODE (PREV_INSN (insn)) != JUMP_INSN;
1843 insn = NEXT_INSN (insn))
1844 {
1845 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
472fbdd1 1846 && reg_referenced_p (ni2dest, PATTERN (insn)))
5089e22e
RS
1847 {
1848 for (link = LOG_LINKS (insn); link;
1849 link = XEXP (link, 1))
1850 if (XEXP (link, 0) == i3)
1851 XEXP (link, 0) = i1;
1852
1853 break;
1854 }
1855 }
1856 }
230d793d
RS
1857 }
1858
1859 /* Similarly, check for a case where we have a PARALLEL of two independent
1860 SETs but we started with three insns. In this case, we can do the sets
1861 as two separate insns. This case occurs when some SET allows two
1862 other insns to combine, but the destination of that SET is still live. */
1863
1864 else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
1865 && GET_CODE (newpat) == PARALLEL
1866 && XVECLEN (newpat, 0) == 2
1867 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1868 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != ZERO_EXTRACT
1869 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != STRICT_LOW_PART
1870 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1871 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
1872 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
1873 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
1874 INSN_CUID (i2))
1875 /* Don't pass sets with (USE (MEM ...)) dests to the following. */
1876 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != USE
1877 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != USE
1878 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 1)),
1879 XVECEXP (newpat, 0, 0))
1880 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 0)),
1881 XVECEXP (newpat, 0, 1)))
1882 {
1883 newi2pat = XVECEXP (newpat, 0, 1);
1884 newpat = XVECEXP (newpat, 0, 0);
1885
1886 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
1887 if (i2_code_number >= 0)
1888 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1889 }
1890
1891 /* If it still isn't recognized, fail and change things back the way they
1892 were. */
1893 if ((insn_code_number < 0
1894 /* Is the result a reasonable ASM_OPERANDS? */
1895 && (! check_asm_operands (newpat) || added_sets_1 || added_sets_2)))
1896 {
1897 undo_all ();
1898 return 0;
1899 }
1900
1901 /* If we had to change another insn, make sure it is valid also. */
1902 if (undobuf.other_insn)
1903 {
1904 rtx other_notes = REG_NOTES (undobuf.other_insn);
1905 rtx other_pat = PATTERN (undobuf.other_insn);
1906 rtx new_other_notes;
1907 rtx note, next;
1908
1909 other_code_number = recog_for_combine (&other_pat, undobuf.other_insn,
1910 &new_other_notes);
1911
1912 if (other_code_number < 0 && ! check_asm_operands (other_pat))
1913 {
1914 undo_all ();
1915 return 0;
1916 }
1917
1918 PATTERN (undobuf.other_insn) = other_pat;
1919
1920 /* If any of the notes in OTHER_INSN were REG_UNUSED, ensure that they
1921 are still valid. Then add any non-duplicate notes added by
1922 recog_for_combine. */
1923 for (note = REG_NOTES (undobuf.other_insn); note; note = next)
1924 {
1925 next = XEXP (note, 1);
1926
1927 if (REG_NOTE_KIND (note) == REG_UNUSED
1928 && ! reg_set_p (XEXP (note, 0), PATTERN (undobuf.other_insn)))
1a26b032
RK
1929 {
1930 if (GET_CODE (XEXP (note, 0)) == REG)
1931 reg_n_deaths[REGNO (XEXP (note, 0))]--;
1932
1933 remove_note (undobuf.other_insn, note);
1934 }
230d793d
RS
1935 }
1936
1a26b032
RK
1937 for (note = new_other_notes; note; note = XEXP (note, 1))
1938 if (GET_CODE (XEXP (note, 0)) == REG)
1939 reg_n_deaths[REGNO (XEXP (note, 0))]++;
1940
230d793d 1941 distribute_notes (new_other_notes, undobuf.other_insn,
5f4f0e22 1942 undobuf.other_insn, NULL_RTX, NULL_RTX, NULL_RTX);
230d793d
RS
1943 }
1944
1945 /* We now know that we can do this combination. Merge the insns and
1946 update the status of registers and LOG_LINKS. */
1947
1948 {
1949 rtx i3notes, i2notes, i1notes = 0;
1950 rtx i3links, i2links, i1links = 0;
1951 rtx midnotes = 0;
1952 int all_adjacent = (next_real_insn (i2) == i3
1953 && (i1 == 0 || next_real_insn (i1) == i2));
1954 register int regno;
1955 /* Compute which registers we expect to eliminate. */
1956 rtx elim_i2 = (newi2pat || i2dest_in_i2src || i2dest_in_i1src
1957 ? 0 : i2dest);
1958 rtx elim_i1 = i1 == 0 || i1dest_in_i1src ? 0 : i1dest;
1959
1960 /* Get the old REG_NOTES and LOG_LINKS from all our insns and
1961 clear them. */
1962 i3notes = REG_NOTES (i3), i3links = LOG_LINKS (i3);
1963 i2notes = REG_NOTES (i2), i2links = LOG_LINKS (i2);
1964 if (i1)
1965 i1notes = REG_NOTES (i1), i1links = LOG_LINKS (i1);
1966
1967 /* Ensure that we do not have something that should not be shared but
1968 occurs multiple times in the new insns. Check this by first
5089e22e 1969 resetting all the `used' flags and then copying anything is shared. */
230d793d
RS
1970
1971 reset_used_flags (i3notes);
1972 reset_used_flags (i2notes);
1973 reset_used_flags (i1notes);
1974 reset_used_flags (newpat);
1975 reset_used_flags (newi2pat);
1976 if (undobuf.other_insn)
1977 reset_used_flags (PATTERN (undobuf.other_insn));
1978
1979 i3notes = copy_rtx_if_shared (i3notes);
1980 i2notes = copy_rtx_if_shared (i2notes);
1981 i1notes = copy_rtx_if_shared (i1notes);
1982 newpat = copy_rtx_if_shared (newpat);
1983 newi2pat = copy_rtx_if_shared (newi2pat);
1984 if (undobuf.other_insn)
1985 reset_used_flags (PATTERN (undobuf.other_insn));
1986
1987 INSN_CODE (i3) = insn_code_number;
1988 PATTERN (i3) = newpat;
1989 if (undobuf.other_insn)
1990 INSN_CODE (undobuf.other_insn) = other_code_number;
1991
1992 /* We had one special case above where I2 had more than one set and
1993 we replaced a destination of one of those sets with the destination
1994 of I3. In that case, we have to update LOG_LINKS of insns later
1995 in this basic block. Note that this (expensive) case is rare. */
1996
1997 if (GET_CODE (PATTERN (i2)) == PARALLEL)
1998 for (i = 0; i < XVECLEN (PATTERN (i2), 0); i++)
1999 if (GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, i))) == REG
2000 && SET_DEST (XVECEXP (PATTERN (i2), 0, i)) != i2dest
2001 && ! find_reg_note (i2, REG_UNUSED,
2002 SET_DEST (XVECEXP (PATTERN (i2), 0, i))))
2003 {
2004 register rtx insn;
2005
2006 for (insn = NEXT_INSN (i2); insn; insn = NEXT_INSN (insn))
2007 {
2008 if (insn != i3 && GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2009 for (link = LOG_LINKS (insn); link; link = XEXP (link, 1))
2010 if (XEXP (link, 0) == i2)
2011 XEXP (link, 0) = i3;
2012
2013 if (GET_CODE (insn) == CODE_LABEL
2014 || GET_CODE (insn) == JUMP_INSN)
2015 break;
2016 }
2017 }
2018
2019 LOG_LINKS (i3) = 0;
2020 REG_NOTES (i3) = 0;
2021 LOG_LINKS (i2) = 0;
2022 REG_NOTES (i2) = 0;
2023
2024 if (newi2pat)
2025 {
2026 INSN_CODE (i2) = i2_code_number;
2027 PATTERN (i2) = newi2pat;
2028 }
2029 else
2030 {
2031 PUT_CODE (i2, NOTE);
2032 NOTE_LINE_NUMBER (i2) = NOTE_INSN_DELETED;
2033 NOTE_SOURCE_FILE (i2) = 0;
2034 }
2035
2036 if (i1)
2037 {
2038 LOG_LINKS (i1) = 0;
2039 REG_NOTES (i1) = 0;
2040 PUT_CODE (i1, NOTE);
2041 NOTE_LINE_NUMBER (i1) = NOTE_INSN_DELETED;
2042 NOTE_SOURCE_FILE (i1) = 0;
2043 }
2044
2045 /* Get death notes for everything that is now used in either I3 or
2046 I2 and used to die in a previous insn. */
2047
2048 move_deaths (newpat, i1 ? INSN_CUID (i1) : INSN_CUID (i2), i3, &midnotes);
2049 if (newi2pat)
2050 move_deaths (newi2pat, INSN_CUID (i1), i2, &midnotes);
2051
2052 /* Distribute all the LOG_LINKS and REG_NOTES from I1, I2, and I3. */
2053 if (i3notes)
5f4f0e22
CH
2054 distribute_notes (i3notes, i3, i3, newi2pat ? i2 : NULL_RTX,
2055 elim_i2, elim_i1);
230d793d 2056 if (i2notes)
5f4f0e22
CH
2057 distribute_notes (i2notes, i2, i3, newi2pat ? i2 : NULL_RTX,
2058 elim_i2, elim_i1);
230d793d 2059 if (i1notes)
5f4f0e22
CH
2060 distribute_notes (i1notes, i1, i3, newi2pat ? i2 : NULL_RTX,
2061 elim_i2, elim_i1);
230d793d 2062 if (midnotes)
5f4f0e22
CH
2063 distribute_notes (midnotes, NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2064 elim_i2, elim_i1);
230d793d
RS
2065
2066 /* Distribute any notes added to I2 or I3 by recog_for_combine. We
2067 know these are REG_UNUSED and want them to go to the desired insn,
1a26b032
RK
2068 so we always pass it as i3. We have not counted the notes in
2069 reg_n_deaths yet, so we need to do so now. */
2070
230d793d 2071 if (newi2pat && new_i2_notes)
1a26b032
RK
2072 {
2073 for (temp = new_i2_notes; temp; temp = XEXP (temp, 1))
2074 if (GET_CODE (XEXP (temp, 0)) == REG)
2075 reg_n_deaths[REGNO (XEXP (temp, 0))]++;
2076
2077 distribute_notes (new_i2_notes, i2, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2078 }
2079
230d793d 2080 if (new_i3_notes)
1a26b032
RK
2081 {
2082 for (temp = new_i3_notes; temp; temp = XEXP (temp, 1))
2083 if (GET_CODE (XEXP (temp, 0)) == REG)
2084 reg_n_deaths[REGNO (XEXP (temp, 0))]++;
2085
2086 distribute_notes (new_i3_notes, i3, i3, NULL_RTX, NULL_RTX, NULL_RTX);
2087 }
230d793d
RS
2088
2089 /* If I3DEST was used in I3SRC, it really died in I3. We may need to
1a26b032
RK
2090 put a REG_DEAD note for it somewhere. Similarly for I2 and I1.
2091 Show an additional death due to the REG_DEAD note we make here. If
2092 we discard it in distribute_notes, we will decrement it again. */
d0ab8cd3 2093
230d793d 2094 if (i3dest_killed)
1a26b032
RK
2095 {
2096 if (GET_CODE (i3dest_killed) == REG)
2097 reg_n_deaths[REGNO (i3dest_killed)]++;
2098
2099 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i3dest_killed,
2100 NULL_RTX),
2101 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2102 NULL_RTX, NULL_RTX);
2103 }
58c8c593
RK
2104
2105 /* For I2 and I1, we have to be careful. If NEWI2PAT exists and sets
2106 I2DEST or I1DEST, the death must be somewhere before I2, not I3. If
2107 we passed I3 in that case, it might delete I2. */
2108
230d793d 2109 if (i2dest_in_i2src)
58c8c593 2110 {
1a26b032
RK
2111 if (GET_CODE (i2dest) == REG)
2112 reg_n_deaths[REGNO (i2dest)]++;
2113
58c8c593
RK
2114 if (newi2pat && reg_set_p (i2dest, newi2pat))
2115 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i2dest, NULL_RTX),
2116 NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2117 else
2118 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i2dest, NULL_RTX),
2119 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2120 NULL_RTX, NULL_RTX);
2121 }
2122
230d793d 2123 if (i1dest_in_i1src)
58c8c593 2124 {
1a26b032
RK
2125 if (GET_CODE (i1dest) == REG)
2126 reg_n_deaths[REGNO (i1dest)]++;
2127
58c8c593
RK
2128 if (newi2pat && reg_set_p (i1dest, newi2pat))
2129 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i1dest, NULL_RTX),
2130 NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2131 else
2132 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i1dest, NULL_RTX),
2133 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2134 NULL_RTX, NULL_RTX);
2135 }
230d793d
RS
2136
2137 distribute_links (i3links);
2138 distribute_links (i2links);
2139 distribute_links (i1links);
2140
2141 if (GET_CODE (i2dest) == REG)
2142 {
d0ab8cd3
RK
2143 rtx link;
2144 rtx i2_insn = 0, i2_val = 0, set;
2145
2146 /* The insn that used to set this register doesn't exist, and
2147 this life of the register may not exist either. See if one of
2148 I3's links points to an insn that sets I2DEST. If it does,
2149 that is now the last known value for I2DEST. If we don't update
2150 this and I2 set the register to a value that depended on its old
230d793d
RS
2151 contents, we will get confused. If this insn is used, thing
2152 will be set correctly in combine_instructions. */
d0ab8cd3
RK
2153
2154 for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2155 if ((set = single_set (XEXP (link, 0))) != 0
2156 && rtx_equal_p (i2dest, SET_DEST (set)))
2157 i2_insn = XEXP (link, 0), i2_val = SET_SRC (set);
2158
2159 record_value_for_reg (i2dest, i2_insn, i2_val);
230d793d
RS
2160
2161 /* If the reg formerly set in I2 died only once and that was in I3,
2162 zero its use count so it won't make `reload' do any work. */
2163 if (! added_sets_2 && newi2pat == 0)
2164 {
2165 regno = REGNO (i2dest);
2166 reg_n_sets[regno]--;
2167 if (reg_n_sets[regno] == 0
5f4f0e22
CH
2168 && ! (basic_block_live_at_start[0][regno / REGSET_ELT_BITS]
2169 & ((REGSET_ELT_TYPE) 1 << (regno % REGSET_ELT_BITS))))
230d793d
RS
2170 reg_n_refs[regno] = 0;
2171 }
2172 }
2173
2174 if (i1 && GET_CODE (i1dest) == REG)
2175 {
d0ab8cd3
RK
2176 rtx link;
2177 rtx i1_insn = 0, i1_val = 0, set;
2178
2179 for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2180 if ((set = single_set (XEXP (link, 0))) != 0
2181 && rtx_equal_p (i1dest, SET_DEST (set)))
2182 i1_insn = XEXP (link, 0), i1_val = SET_SRC (set);
2183
2184 record_value_for_reg (i1dest, i1_insn, i1_val);
2185
230d793d
RS
2186 regno = REGNO (i1dest);
2187 if (! added_sets_1)
2188 {
2189 reg_n_sets[regno]--;
2190 if (reg_n_sets[regno] == 0
5f4f0e22
CH
2191 && ! (basic_block_live_at_start[0][regno / REGSET_ELT_BITS]
2192 & ((REGSET_ELT_TYPE) 1 << (regno % REGSET_ELT_BITS))))
230d793d
RS
2193 reg_n_refs[regno] = 0;
2194 }
2195 }
2196
951553af 2197 /* Update reg_nonzero_bits et al for any changes that may have been made
22609cbf
RK
2198 to this insn. */
2199
951553af 2200 note_stores (newpat, set_nonzero_bits_and_sign_copies);
22609cbf 2201 if (newi2pat)
951553af 2202 note_stores (newi2pat, set_nonzero_bits_and_sign_copies);
22609cbf 2203
230d793d
RS
2204 /* If I3 is now an unconditional jump, ensure that it has a
2205 BARRIER following it since it may have initially been a
381ee8af 2206 conditional jump. It may also be the last nonnote insn. */
230d793d
RS
2207
2208 if ((GET_CODE (newpat) == RETURN || simplejump_p (i3))
381ee8af
TW
2209 && ((temp = next_nonnote_insn (i3)) == NULL_RTX
2210 || GET_CODE (temp) != BARRIER))
230d793d
RS
2211 emit_barrier_after (i3);
2212 }
2213
2214 combine_successes++;
2215
2216 return newi2pat ? i2 : i3;
2217}
2218\f
2219/* Undo all the modifications recorded in undobuf. */
2220
2221static void
2222undo_all ()
2223{
2224 register int i;
2225 if (undobuf.num_undo > MAX_UNDO)
2226 undobuf.num_undo = MAX_UNDO;
2227 for (i = undobuf.num_undo - 1; i >= 0; i--)
7c046e4e
RK
2228 {
2229 if (undobuf.undo[i].is_int)
2230 *undobuf.undo[i].where.i = undobuf.undo[i].old_contents.i;
2231 else
2232 *undobuf.undo[i].where.rtx = undobuf.undo[i].old_contents.rtx;
2233
2234 }
230d793d
RS
2235
2236 obfree (undobuf.storage);
2237 undobuf.num_undo = 0;
2238}
2239\f
2240/* Find the innermost point within the rtx at LOC, possibly LOC itself,
d0ab8cd3
RK
2241 where we have an arithmetic expression and return that point. LOC will
2242 be inside INSN.
230d793d
RS
2243
2244 try_combine will call this function to see if an insn can be split into
2245 two insns. */
2246
2247static rtx *
d0ab8cd3 2248find_split_point (loc, insn)
230d793d 2249 rtx *loc;
d0ab8cd3 2250 rtx insn;
230d793d
RS
2251{
2252 rtx x = *loc;
2253 enum rtx_code code = GET_CODE (x);
2254 rtx *split;
2255 int len = 0, pos, unsignedp;
2256 rtx inner;
2257
2258 /* First special-case some codes. */
2259 switch (code)
2260 {
2261 case SUBREG:
2262#ifdef INSN_SCHEDULING
2263 /* If we are making a paradoxical SUBREG invalid, it becomes a split
2264 point. */
2265 if (GET_CODE (SUBREG_REG (x)) == MEM)
2266 return loc;
2267#endif
d0ab8cd3 2268 return find_split_point (&SUBREG_REG (x), insn);
230d793d 2269
230d793d 2270 case MEM:
916f14f1 2271#ifdef HAVE_lo_sum
230d793d
RS
2272 /* If we have (mem (const ..)) or (mem (symbol_ref ...)), split it
2273 using LO_SUM and HIGH. */
2274 if (GET_CODE (XEXP (x, 0)) == CONST
2275 || GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
2276 {
2277 SUBST (XEXP (x, 0),
2278 gen_rtx_combine (LO_SUM, Pmode,
2279 gen_rtx_combine (HIGH, Pmode, XEXP (x, 0)),
2280 XEXP (x, 0)));
2281 return &XEXP (XEXP (x, 0), 0);
2282 }
230d793d
RS
2283#endif
2284
916f14f1
RK
2285 /* If we have a PLUS whose second operand is a constant and the
2286 address is not valid, perhaps will can split it up using
2287 the machine-specific way to split large constants. We use
d0ab8cd3 2288 the first psuedo-reg (one of the virtual regs) as a placeholder;
916f14f1
RK
2289 it will not remain in the result. */
2290 if (GET_CODE (XEXP (x, 0)) == PLUS
2291 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2292 && ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
2293 {
2294 rtx reg = regno_reg_rtx[FIRST_PSEUDO_REGISTER];
2295 rtx seq = split_insns (gen_rtx (SET, VOIDmode, reg, XEXP (x, 0)),
2296 subst_insn);
2297
2298 /* This should have produced two insns, each of which sets our
2299 placeholder. If the source of the second is a valid address,
2300 we can make put both sources together and make a split point
2301 in the middle. */
2302
2303 if (seq && XVECLEN (seq, 0) == 2
2304 && GET_CODE (XVECEXP (seq, 0, 0)) == INSN
2305 && GET_CODE (PATTERN (XVECEXP (seq, 0, 0))) == SET
2306 && SET_DEST (PATTERN (XVECEXP (seq, 0, 0))) == reg
2307 && ! reg_mentioned_p (reg,
2308 SET_SRC (PATTERN (XVECEXP (seq, 0, 0))))
2309 && GET_CODE (XVECEXP (seq, 0, 1)) == INSN
2310 && GET_CODE (PATTERN (XVECEXP (seq, 0, 1))) == SET
2311 && SET_DEST (PATTERN (XVECEXP (seq, 0, 1))) == reg
2312 && memory_address_p (GET_MODE (x),
2313 SET_SRC (PATTERN (XVECEXP (seq, 0, 1)))))
2314 {
2315 rtx src1 = SET_SRC (PATTERN (XVECEXP (seq, 0, 0)));
2316 rtx src2 = SET_SRC (PATTERN (XVECEXP (seq, 0, 1)));
2317
2318 /* Replace the placeholder in SRC2 with SRC1. If we can
2319 find where in SRC2 it was placed, that can become our
2320 split point and we can replace this address with SRC2.
2321 Just try two obvious places. */
2322
2323 src2 = replace_rtx (src2, reg, src1);
2324 split = 0;
2325 if (XEXP (src2, 0) == src1)
2326 split = &XEXP (src2, 0);
2327 else if (GET_RTX_FORMAT (GET_CODE (XEXP (src2, 0)))[0] == 'e'
2328 && XEXP (XEXP (src2, 0), 0) == src1)
2329 split = &XEXP (XEXP (src2, 0), 0);
2330
2331 if (split)
2332 {
2333 SUBST (XEXP (x, 0), src2);
2334 return split;
2335 }
2336 }
1a26b032
RK
2337
2338 /* If that didn't work, perhaps the first operand is complex and
2339 needs to be computed separately, so make a split point there.
2340 This will occur on machines that just support REG + CONST
2341 and have a constant moved through some previous computation. */
2342
2343 else if (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (x, 0), 0))) != 'o'
2344 && ! (GET_CODE (XEXP (XEXP (x, 0), 0)) == SUBREG
2345 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (XEXP (x, 0), 0))))
2346 == 'o')))
2347 return &XEXP (XEXP (x, 0), 0);
916f14f1
RK
2348 }
2349 break;
2350
230d793d
RS
2351 case SET:
2352#ifdef HAVE_cc0
2353 /* If SET_DEST is CC0 and SET_SRC is not an operand, a COMPARE, or a
2354 ZERO_EXTRACT, the most likely reason why this doesn't match is that
2355 we need to put the operand into a register. So split at that
2356 point. */
2357
2358 if (SET_DEST (x) == cc0_rtx
2359 && GET_CODE (SET_SRC (x)) != COMPARE
2360 && GET_CODE (SET_SRC (x)) != ZERO_EXTRACT
2361 && GET_RTX_CLASS (GET_CODE (SET_SRC (x))) != 'o'
2362 && ! (GET_CODE (SET_SRC (x)) == SUBREG
2363 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (SET_SRC (x)))) == 'o'))
2364 return &SET_SRC (x);
2365#endif
2366
2367 /* See if we can split SET_SRC as it stands. */
d0ab8cd3 2368 split = find_split_point (&SET_SRC (x), insn);
230d793d
RS
2369 if (split && split != &SET_SRC (x))
2370 return split;
2371
2372 /* See if this is a bitfield assignment with everything constant. If
2373 so, this is an IOR of an AND, so split it into that. */
2374 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2375 && (GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
5f4f0e22 2376 <= HOST_BITS_PER_WIDE_INT)
230d793d
RS
2377 && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT
2378 && GET_CODE (XEXP (SET_DEST (x), 2)) == CONST_INT
2379 && GET_CODE (SET_SRC (x)) == CONST_INT
2380 && ((INTVAL (XEXP (SET_DEST (x), 1))
2381 + INTVAL (XEXP (SET_DEST (x), 2)))
2382 <= GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0))))
2383 && ! side_effects_p (XEXP (SET_DEST (x), 0)))
2384 {
2385 int pos = INTVAL (XEXP (SET_DEST (x), 2));
2386 int len = INTVAL (XEXP (SET_DEST (x), 1));
2387 int src = INTVAL (SET_SRC (x));
2388 rtx dest = XEXP (SET_DEST (x), 0);
2389 enum machine_mode mode = GET_MODE (dest);
5f4f0e22 2390 unsigned HOST_WIDE_INT mask = ((HOST_WIDE_INT) 1 << len) - 1;
230d793d
RS
2391
2392#if BITS_BIG_ENDIAN
2393 pos = GET_MODE_BITSIZE (mode) - len - pos;
2394#endif
2395
2396 if (src == mask)
2397 SUBST (SET_SRC (x),
5f4f0e22 2398 gen_binary (IOR, mode, dest, GEN_INT (src << pos)));
230d793d
RS
2399 else
2400 SUBST (SET_SRC (x),
2401 gen_binary (IOR, mode,
2402 gen_binary (AND, mode, dest,
5f4f0e22
CH
2403 GEN_INT (~ (mask << pos)
2404 & GET_MODE_MASK (mode))),
2405 GEN_INT (src << pos)));
230d793d
RS
2406
2407 SUBST (SET_DEST (x), dest);
2408
d0ab8cd3 2409 split = find_split_point (&SET_SRC (x), insn);
230d793d
RS
2410 if (split && split != &SET_SRC (x))
2411 return split;
2412 }
2413
2414 /* Otherwise, see if this is an operation that we can split into two.
2415 If so, try to split that. */
2416 code = GET_CODE (SET_SRC (x));
2417
2418 switch (code)
2419 {
d0ab8cd3
RK
2420 case AND:
2421 /* If we are AND'ing with a large constant that is only a single
2422 bit and the result is only being used in a context where we
2423 need to know if it is zero or non-zero, replace it with a bit
2424 extraction. This will avoid the large constant, which might
2425 have taken more than one insn to make. If the constant were
2426 not a valid argument to the AND but took only one insn to make,
2427 this is no worse, but if it took more than one insn, it will
2428 be better. */
2429
2430 if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
2431 && GET_CODE (XEXP (SET_SRC (x), 0)) == REG
2432 && (pos = exact_log2 (INTVAL (XEXP (SET_SRC (x), 1)))) >= 7
2433 && GET_CODE (SET_DEST (x)) == REG
2434 && (split = find_single_use (SET_DEST (x), insn, NULL_PTR)) != 0
2435 && (GET_CODE (*split) == EQ || GET_CODE (*split) == NE)
2436 && XEXP (*split, 0) == SET_DEST (x)
2437 && XEXP (*split, 1) == const0_rtx)
2438 {
2439 SUBST (SET_SRC (x),
2440 make_extraction (GET_MODE (SET_DEST (x)),
2441 XEXP (SET_SRC (x), 0),
2442 pos, NULL_RTX, 1, 1, 0, 0));
2443 return find_split_point (loc, insn);
2444 }
2445 break;
2446
230d793d
RS
2447 case SIGN_EXTEND:
2448 inner = XEXP (SET_SRC (x), 0);
2449 pos = 0;
2450 len = GET_MODE_BITSIZE (GET_MODE (inner));
2451 unsignedp = 0;
2452 break;
2453
2454 case SIGN_EXTRACT:
2455 case ZERO_EXTRACT:
2456 if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
2457 && GET_CODE (XEXP (SET_SRC (x), 2)) == CONST_INT)
2458 {
2459 inner = XEXP (SET_SRC (x), 0);
2460 len = INTVAL (XEXP (SET_SRC (x), 1));
2461 pos = INTVAL (XEXP (SET_SRC (x), 2));
2462
2463#if BITS_BIG_ENDIAN
2464 pos = GET_MODE_BITSIZE (GET_MODE (inner)) - len - pos;
2465#endif
2466 unsignedp = (code == ZERO_EXTRACT);
2467 }
2468 break;
2469 }
2470
2471 if (len && pos >= 0 && pos + len <= GET_MODE_BITSIZE (GET_MODE (inner)))
2472 {
2473 enum machine_mode mode = GET_MODE (SET_SRC (x));
2474
d0ab8cd3
RK
2475 /* For unsigned, we have a choice of a shift followed by an
2476 AND or two shifts. Use two shifts for field sizes where the
2477 constant might be too large. We assume here that we can
2478 always at least get 8-bit constants in an AND insn, which is
2479 true for every current RISC. */
2480
2481 if (unsignedp && len <= 8)
230d793d
RS
2482 {
2483 SUBST (SET_SRC (x),
2484 gen_rtx_combine
2485 (AND, mode,
2486 gen_rtx_combine (LSHIFTRT, mode,
2487 gen_lowpart_for_combine (mode, inner),
5f4f0e22
CH
2488 GEN_INT (pos)),
2489 GEN_INT (((HOST_WIDE_INT) 1 << len) - 1)));
230d793d 2490
d0ab8cd3 2491 split = find_split_point (&SET_SRC (x), insn);
230d793d
RS
2492 if (split && split != &SET_SRC (x))
2493 return split;
2494 }
2495 else
2496 {
2497 SUBST (SET_SRC (x),
2498 gen_rtx_combine
d0ab8cd3 2499 (unsignedp ? LSHIFTRT : ASHIFTRT, mode,
230d793d
RS
2500 gen_rtx_combine (ASHIFT, mode,
2501 gen_lowpart_for_combine (mode, inner),
5f4f0e22
CH
2502 GEN_INT (GET_MODE_BITSIZE (mode)
2503 - len - pos)),
2504 GEN_INT (GET_MODE_BITSIZE (mode) - len)));
230d793d 2505
d0ab8cd3 2506 split = find_split_point (&SET_SRC (x), insn);
230d793d
RS
2507 if (split && split != &SET_SRC (x))
2508 return split;
2509 }
2510 }
2511
2512 /* See if this is a simple operation with a constant as the second
2513 operand. It might be that this constant is out of range and hence
2514 could be used as a split point. */
2515 if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
2516 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
2517 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<')
2518 && CONSTANT_P (XEXP (SET_SRC (x), 1))
2519 && (GET_RTX_CLASS (GET_CODE (XEXP (SET_SRC (x), 0))) == 'o'
2520 || (GET_CODE (XEXP (SET_SRC (x), 0)) == SUBREG
2521 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (SET_SRC (x), 0))))
2522 == 'o'))))
2523 return &XEXP (SET_SRC (x), 1);
2524
2525 /* Finally, see if this is a simple operation with its first operand
2526 not in a register. The operation might require this operand in a
2527 register, so return it as a split point. We can always do this
2528 because if the first operand were another operation, we would have
2529 already found it as a split point. */
2530 if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
2531 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
2532 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<'
2533 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '1')
2534 && ! register_operand (XEXP (SET_SRC (x), 0), VOIDmode))
2535 return &XEXP (SET_SRC (x), 0);
2536
2537 return 0;
2538
2539 case AND:
2540 case IOR:
2541 /* We write NOR as (and (not A) (not B)), but if we don't have a NOR,
2542 it is better to write this as (not (ior A B)) so we can split it.
2543 Similarly for IOR. */
2544 if (GET_CODE (XEXP (x, 0)) == NOT && GET_CODE (XEXP (x, 1)) == NOT)
2545 {
2546 SUBST (*loc,
2547 gen_rtx_combine (NOT, GET_MODE (x),
2548 gen_rtx_combine (code == IOR ? AND : IOR,
2549 GET_MODE (x),
2550 XEXP (XEXP (x, 0), 0),
2551 XEXP (XEXP (x, 1), 0))));
d0ab8cd3 2552 return find_split_point (loc, insn);
230d793d
RS
2553 }
2554
2555 /* Many RISC machines have a large set of logical insns. If the
2556 second operand is a NOT, put it first so we will try to split the
2557 other operand first. */
2558 if (GET_CODE (XEXP (x, 1)) == NOT)
2559 {
2560 rtx tem = XEXP (x, 0);
2561 SUBST (XEXP (x, 0), XEXP (x, 1));
2562 SUBST (XEXP (x, 1), tem);
2563 }
2564 break;
2565 }
2566
2567 /* Otherwise, select our actions depending on our rtx class. */
2568 switch (GET_RTX_CLASS (code))
2569 {
2570 case 'b': /* This is ZERO_EXTRACT and SIGN_EXTRACT. */
2571 case '3':
d0ab8cd3 2572 split = find_split_point (&XEXP (x, 2), insn);
230d793d
RS
2573 if (split)
2574 return split;
2575 /* ... fall through ... */
2576 case '2':
2577 case 'c':
2578 case '<':
d0ab8cd3 2579 split = find_split_point (&XEXP (x, 1), insn);
230d793d
RS
2580 if (split)
2581 return split;
2582 /* ... fall through ... */
2583 case '1':
2584 /* Some machines have (and (shift ...) ...) insns. If X is not
2585 an AND, but XEXP (X, 0) is, use it as our split point. */
2586 if (GET_CODE (x) != AND && GET_CODE (XEXP (x, 0)) == AND)
2587 return &XEXP (x, 0);
2588
d0ab8cd3 2589 split = find_split_point (&XEXP (x, 0), insn);
230d793d
RS
2590 if (split)
2591 return split;
2592 return loc;
2593 }
2594
2595 /* Otherwise, we don't have a split point. */
2596 return 0;
2597}
2598\f
2599/* Throughout X, replace FROM with TO, and return the result.
2600 The result is TO if X is FROM;
2601 otherwise the result is X, but its contents may have been modified.
2602 If they were modified, a record was made in undobuf so that
2603 undo_all will (among other things) return X to its original state.
2604
2605 If the number of changes necessary is too much to record to undo,
2606 the excess changes are not made, so the result is invalid.
2607 The changes already made can still be undone.
2608 undobuf.num_undo is incremented for such changes, so by testing that
2609 the caller can tell whether the result is valid.
2610
2611 `n_occurrences' is incremented each time FROM is replaced.
2612
2613 IN_DEST is non-zero if we are processing the SET_DEST of a SET.
2614
5089e22e 2615 UNIQUE_COPY is non-zero if each substitution must be unique. We do this
230d793d
RS
2616 by copying if `n_occurrences' is non-zero. */
2617
2618static rtx
2619subst (x, from, to, in_dest, unique_copy)
2620 register rtx x, from, to;
2621 int in_dest;
2622 int unique_copy;
2623{
2624 register char *fmt;
2625 register int len, i;
2626 register enum rtx_code code = GET_CODE (x), orig_code = code;
2627 rtx temp;
2628 enum machine_mode mode = GET_MODE (x);
2629 enum machine_mode op0_mode = VOIDmode;
2630 rtx other_insn;
2631 rtx *cc_use;
2632 int n_restarts = 0;
2633
2634/* FAKE_EXTEND_SAFE_P (MODE, FROM) is 1 if (subreg:MODE FROM 0) is a safe
2635 replacement for (zero_extend:MODE FROM) or (sign_extend:MODE FROM).
2636 If it is 0, that cannot be done. We can now do this for any MEM
2637 because (SUBREG (MEM...)) is guaranteed to cause the MEM to be reloaded.
2638 If not for that, MEM's would very rarely be safe. */
2639
2640/* Reject MODEs bigger than a word, because we might not be able
2641 to reference a two-register group starting with an arbitrary register
2642 (and currently gen_lowpart might crash for a SUBREG). */
2643
2644#define FAKE_EXTEND_SAFE_P(MODE, FROM) \
2645 (GET_MODE_SIZE (MODE) <= UNITS_PER_WORD)
2646
2647/* Two expressions are equal if they are identical copies of a shared
2648 RTX or if they are both registers with the same register number
2649 and mode. */
2650
2651#define COMBINE_RTX_EQUAL_P(X,Y) \
2652 ((X) == (Y) \
2653 || (GET_CODE (X) == REG && GET_CODE (Y) == REG \
2654 && REGNO (X) == REGNO (Y) && GET_MODE (X) == GET_MODE (Y)))
2655
2656 if (! in_dest && COMBINE_RTX_EQUAL_P (x, from))
2657 {
2658 n_occurrences++;
2659 return (unique_copy && n_occurrences > 1 ? copy_rtx (to) : to);
2660 }
2661
2662 /* If X and FROM are the same register but different modes, they will
2663 not have been seen as equal above. However, flow.c will make a
2664 LOG_LINKS entry for that case. If we do nothing, we will try to
2665 rerecognize our original insn and, when it succeeds, we will
2666 delete the feeding insn, which is incorrect.
2667
2668 So force this insn not to match in this (rare) case. */
2669 if (! in_dest && code == REG && GET_CODE (from) == REG
2670 && REGNO (x) == REGNO (from))
2671 return gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
2672
2673 /* If this is an object, we are done unless it is a MEM or LO_SUM, both
2674 of which may contain things that can be combined. */
2675 if (code != MEM && code != LO_SUM && GET_RTX_CLASS (code) == 'o')
2676 return x;
2677
2678 /* It is possible to have a subexpression appear twice in the insn.
2679 Suppose that FROM is a register that appears within TO.
2680 Then, after that subexpression has been scanned once by `subst',
2681 the second time it is scanned, TO may be found. If we were
2682 to scan TO here, we would find FROM within it and create a
2683 self-referent rtl structure which is completely wrong. */
2684 if (COMBINE_RTX_EQUAL_P (x, to))
2685 return to;
2686
2687 len = GET_RTX_LENGTH (code);
2688 fmt = GET_RTX_FORMAT (code);
2689
2690 /* We don't need to process a SET_DEST that is a register, CC0, or PC, so
2691 set up to skip this common case. All other cases where we want to
2692 suppress replacing something inside a SET_SRC are handled via the
2693 IN_DEST operand. */
2694 if (code == SET
2695 && (GET_CODE (SET_DEST (x)) == REG
2696 || GET_CODE (SET_DEST (x)) == CC0
2697 || GET_CODE (SET_DEST (x)) == PC))
2698 fmt = "ie";
2699
2700 /* Get the mode of operand 0 in case X is now a SIGN_EXTEND of a constant. */
2701 if (fmt[0] == 'e')
2702 op0_mode = GET_MODE (XEXP (x, 0));
2703
2704 for (i = 0; i < len; i++)
2705 {
2706 if (fmt[i] == 'E')
2707 {
2708 register int j;
2709 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2710 {
2711 register rtx new;
2712 if (COMBINE_RTX_EQUAL_P (XVECEXP (x, i, j), from))
2713 {
2714 new = (unique_copy && n_occurrences ? copy_rtx (to) : to);
2715 n_occurrences++;
2716 }
2717 else
2718 {
2719 new = subst (XVECEXP (x, i, j), from, to, 0, unique_copy);
2720
2721 /* If this substitution failed, this whole thing fails. */
2722 if (GET_CODE (new) == CLOBBER && XEXP (new, 0) == const0_rtx)
2723 return new;
2724 }
2725
2726 SUBST (XVECEXP (x, i, j), new);
2727 }
2728 }
2729 else if (fmt[i] == 'e')
2730 {
2731 register rtx new;
2732
2733 if (COMBINE_RTX_EQUAL_P (XEXP (x, i), from))
2734 {
2735 new = (unique_copy && n_occurrences ? copy_rtx (to) : to);
2736 n_occurrences++;
2737 }
2738 else
2739 /* If we are in a SET_DEST, suppress most cases unless we
2740 have gone inside a MEM, in which case we want to
2741 simplify the address. We assume here that things that
2742 are actually part of the destination have their inner
2743 parts in the first expression. This is true for SUBREG,
2744 STRICT_LOW_PART, and ZERO_EXTRACT, which are the only
2745 things aside from REG and MEM that should appear in a
2746 SET_DEST. */
2747 new = subst (XEXP (x, i), from, to,
2748 (((in_dest
2749 && (code == SUBREG || code == STRICT_LOW_PART
2750 || code == ZERO_EXTRACT))
2751 || code == SET)
2752 && i == 0), unique_copy);
2753
2754 /* If we found that we will have to reject this combination,
2755 indicate that by returning the CLOBBER ourselves, rather than
2756 an expression containing it. This will speed things up as
2757 well as prevent accidents where two CLOBBERs are considered
2758 to be equal, thus producing an incorrect simplification. */
2759
2760 if (GET_CODE (new) == CLOBBER && XEXP (new, 0) == const0_rtx)
2761 return new;
2762
2763 SUBST (XEXP (x, i), new);
2764 }
2765 }
2766
d0ab8cd3
RK
2767 /* We come back to here if we have replaced the expression with one of
2768 a different code and it is likely that further simplification will be
2769 possible. */
2770
2771 restart:
2772
eeb43d32
RK
2773 /* If we have restarted more than 4 times, we are probably looping, so
2774 give up. */
2775 if (++n_restarts > 4)
2776 return x;
2777
2778 /* If we are restarting at all, it means that we no longer know the
2779 original mode of operand 0 (since we have probably changed the
2780 form of X). */
2781
2782 if (n_restarts > 1)
2783 op0_mode = VOIDmode;
2784
d0ab8cd3
RK
2785 code = GET_CODE (x);
2786
230d793d
RS
2787 /* If this is a commutative operation, put a constant last and a complex
2788 expression first. We don't need to do this for comparisons here. */
2789 if (GET_RTX_CLASS (code) == 'c'
2790 && ((CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
2791 || (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == 'o'
2792 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o')
2793 || (GET_CODE (XEXP (x, 0)) == SUBREG
2794 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0)))) == 'o'
2795 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o')))
2796 {
2797 temp = XEXP (x, 0);
2798 SUBST (XEXP (x, 0), XEXP (x, 1));
2799 SUBST (XEXP (x, 1), temp);
2800 }
2801
22609cbf
RK
2802 /* If this is a PLUS, MINUS, or MULT, and the first operand is the
2803 sign extension of a PLUS with a constant, reverse the order of the sign
2804 extension and the addition. Note that this not the same as the original
2805 code, but overflow is undefined for signed values. Also note that the
2806 PLUS will have been partially moved "inside" the sign-extension, so that
2807 the first operand of X will really look like:
2808 (ashiftrt (plus (ashift A C4) C5) C4).
2809 We convert this to
2810 (plus (ashiftrt (ashift A C4) C2) C4)
2811 and replace the first operand of X with that expression. Later parts
2812 of this function may simplify the expression further.
2813
2814 For example, if we start with (mult (sign_extend (plus A C1)) C2),
2815 we swap the SIGN_EXTEND and PLUS. Later code will apply the
2816 distributive law to produce (plus (mult (sign_extend X) C1) C3).
2817
2818 We do this to simplify address expressions. */
2819
2820 if ((code == PLUS || code == MINUS || code == MULT)
2821 && GET_CODE (XEXP (x, 0)) == ASHIFTRT
2822 && GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS
2823 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ASHIFT
2824 && GET_CODE (XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1)) == CONST_INT
2825 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2826 && XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1) == XEXP (XEXP (x, 0), 1)
2827 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
2828 && (temp = simplify_binary_operation (ASHIFTRT, mode,
2829 XEXP (XEXP (XEXP (x, 0), 0), 1),
2830 XEXP (XEXP (x, 0), 1))) != 0)
2831 {
2832 rtx new
2833 = simplify_shift_const (NULL_RTX, ASHIFT, mode,
2834 XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 0),
2835 INTVAL (XEXP (XEXP (x, 0), 1)));
2836
2837 new = simplify_shift_const (NULL_RTX, ASHIFTRT, mode, new,
2838 INTVAL (XEXP (XEXP (x, 0), 1)));
2839
2840 SUBST (XEXP (x, 0), gen_binary (PLUS, mode, new, temp));
2841 }
2842
d0ab8cd3
RK
2843 /* If this is a simple operation applied to an IF_THEN_ELSE, try
2844 applying it to the arms of the IF_THEN_ELSE. This often simplifies
2845 things. Don't deal with operations that change modes here. */
2846
2847 if ((GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c')
2848 && GET_CODE (XEXP (x, 0)) == IF_THEN_ELSE)
2849 {
58744483
RK
2850 /* Don't do this by using SUBST inside X since we might be messing
2851 up a shared expression. */
2852 rtx cond = XEXP (XEXP (x, 0), 0);
2853 rtx t_arm = subst (gen_binary (code, mode, XEXP (XEXP (x, 0), 1),
2854 XEXP (x, 1)),
1a26b032 2855 pc_rtx, pc_rtx, 0, 0);
58744483
RK
2856 rtx f_arm = subst (gen_binary (code, mode, XEXP (XEXP (x, 0), 2),
2857 XEXP (x, 1)),
1a26b032 2858 pc_rtx, pc_rtx, 0, 0);
58744483
RK
2859
2860
2861 x = gen_rtx (IF_THEN_ELSE, mode, cond, t_arm, f_arm);
d0ab8cd3
RK
2862 goto restart;
2863 }
2864
2865 else if (GET_RTX_CLASS (code) == '1'
2866 && GET_CODE (XEXP (x, 0)) == IF_THEN_ELSE
2867 && GET_MODE (XEXP (x, 0)) == mode)
2868 {
58744483
RK
2869 rtx cond = XEXP (XEXP (x, 0), 0);
2870 rtx t_arm = subst (gen_unary (code, mode, XEXP (XEXP (x, 0), 1)),
1a26b032 2871 pc_rtx, pc_rtx, 0, 0);
58744483 2872 rtx f_arm = subst (gen_unary (code, mode, XEXP (XEXP (x, 0), 2)),
1a26b032 2873 pc_rtx, pc_rtx, 0, 0);
58744483
RK
2874
2875 x = gen_rtx_combine (IF_THEN_ELSE, mode, cond, t_arm, f_arm);
d0ab8cd3
RK
2876 goto restart;
2877 }
2878
230d793d
RS
2879 /* Try to fold this expression in case we have constants that weren't
2880 present before. */
2881 temp = 0;
2882 switch (GET_RTX_CLASS (code))
2883 {
2884 case '1':
2885 temp = simplify_unary_operation (code, mode, XEXP (x, 0), op0_mode);
2886 break;
2887 case '<':
2888 temp = simplify_relational_operation (code, op0_mode,
2889 XEXP (x, 0), XEXP (x, 1));
77fa0940
RK
2890#ifdef FLOAT_STORE_FLAG_VALUE
2891 if (temp != 0 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2892 temp = ((temp == const0_rtx) ? CONST0_RTX (GET_MODE (x))
2893 : immed_real_const_1 (FLOAT_STORE_FLAG_VALUE, GET_MODE (x)));
2894#endif
230d793d
RS
2895 break;
2896 case 'c':
2897 case '2':
2898 temp = simplify_binary_operation (code, mode, XEXP (x, 0), XEXP (x, 1));
2899 break;
2900 case 'b':
2901 case '3':
2902 temp = simplify_ternary_operation (code, mode, op0_mode, XEXP (x, 0),
2903 XEXP (x, 1), XEXP (x, 2));
2904 break;
2905 }
2906
2907 if (temp)
d0ab8cd3 2908 x = temp, code = GET_CODE (temp);
230d793d 2909
230d793d
RS
2910 /* First see if we can apply the inverse distributive law. */
2911 if (code == PLUS || code == MINUS || code == IOR || code == XOR)
2912 {
2913 x = apply_distributive_law (x);
2914 code = GET_CODE (x);
2915 }
2916
2917 /* If CODE is an associative operation not otherwise handled, see if we
2918 can associate some operands. This can win if they are constants or
2919 if they are logically related (i.e. (a & b) & a. */
2920 if ((code == PLUS || code == MINUS
2921 || code == MULT || code == AND || code == IOR || code == XOR
2922 || code == DIV || code == UDIV
2923 || code == SMAX || code == SMIN || code == UMAX || code == UMIN)
2924 && GET_MODE_CLASS (mode) == MODE_INT)
2925 {
2926 if (GET_CODE (XEXP (x, 0)) == code)
2927 {
2928 rtx other = XEXP (XEXP (x, 0), 0);
2929 rtx inner_op0 = XEXP (XEXP (x, 0), 1);
2930 rtx inner_op1 = XEXP (x, 1);
2931 rtx inner;
2932
2933 /* Make sure we pass the constant operand if any as the second
2934 one if this is a commutative operation. */
2935 if (CONSTANT_P (inner_op0) && GET_RTX_CLASS (code) == 'c')
2936 {
2937 rtx tem = inner_op0;
2938 inner_op0 = inner_op1;
2939 inner_op1 = tem;
2940 }
2941 inner = simplify_binary_operation (code == MINUS ? PLUS
2942 : code == DIV ? MULT
2943 : code == UDIV ? MULT
2944 : code,
2945 mode, inner_op0, inner_op1);
2946
2947 /* For commutative operations, try the other pair if that one
2948 didn't simplify. */
2949 if (inner == 0 && GET_RTX_CLASS (code) == 'c')
2950 {
2951 other = XEXP (XEXP (x, 0), 1);
2952 inner = simplify_binary_operation (code, mode,
2953 XEXP (XEXP (x, 0), 0),
2954 XEXP (x, 1));
2955 }
2956
2957 if (inner)
2958 {
2959 x = gen_binary (code, mode, other, inner);
2960 goto restart;
2961
2962 }
2963 }
2964 }
2965
2966 /* A little bit of algebraic simplification here. */
2967 switch (code)
2968 {
2969 case MEM:
2970 /* Ensure that our address has any ASHIFTs converted to MULT in case
2971 address-recognizing predicates are called later. */
2972 temp = make_compound_operation (XEXP (x, 0), MEM);
2973 SUBST (XEXP (x, 0), temp);
2974 break;
2975
2976 case SUBREG:
2977 /* (subreg:A (mem:B X) N) becomes a modified MEM unless the SUBREG
2978 is paradoxical. If we can't do that safely, then it becomes
2979 something nonsensical so that this combination won't take place. */
2980
2981 if (GET_CODE (SUBREG_REG (x)) == MEM
2982 && (GET_MODE_SIZE (mode)
2983 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
2984 {
2985 rtx inner = SUBREG_REG (x);
2986 int endian_offset = 0;
2987 /* Don't change the mode of the MEM
2988 if that would change the meaning of the address. */
2989 if (MEM_VOLATILE_P (SUBREG_REG (x))
2990 || mode_dependent_address_p (XEXP (inner, 0)))
2991 return gen_rtx (CLOBBER, mode, const0_rtx);
2992
2993#if BYTES_BIG_ENDIAN
2994 if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
2995 endian_offset += UNITS_PER_WORD - GET_MODE_SIZE (mode);
2996 if (GET_MODE_SIZE (GET_MODE (inner)) < UNITS_PER_WORD)
2997 endian_offset -= UNITS_PER_WORD - GET_MODE_SIZE (GET_MODE (inner));
2998#endif
2999 /* Note if the plus_constant doesn't make a valid address
3000 then this combination won't be accepted. */
3001 x = gen_rtx (MEM, mode,
3002 plus_constant (XEXP (inner, 0),
3003 (SUBREG_WORD (x) * UNITS_PER_WORD
3004 + endian_offset)));
3005 MEM_VOLATILE_P (x) = MEM_VOLATILE_P (inner);
3006 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (inner);
3007 MEM_IN_STRUCT_P (x) = MEM_IN_STRUCT_P (inner);
3008 return x;
3009 }
3010
3011 /* If we are in a SET_DEST, these other cases can't apply. */
3012 if (in_dest)
3013 return x;
3014
3015 /* Changing mode twice with SUBREG => just change it once,
3016 or not at all if changing back to starting mode. */
3017 if (GET_CODE (SUBREG_REG (x)) == SUBREG)
3018 {
3019 if (mode == GET_MODE (SUBREG_REG (SUBREG_REG (x)))
3020 && SUBREG_WORD (x) == 0 && SUBREG_WORD (SUBREG_REG (x)) == 0)
3021 return SUBREG_REG (SUBREG_REG (x));
3022
3023 SUBST_INT (SUBREG_WORD (x),
3024 SUBREG_WORD (x) + SUBREG_WORD (SUBREG_REG (x)));
3025 SUBST (SUBREG_REG (x), SUBREG_REG (SUBREG_REG (x)));
3026 }
3027
3028 /* SUBREG of a hard register => just change the register number
3029 and/or mode. If the hard register is not valid in that mode,
26ecfc76
RK
3030 suppress this combination. If the hard register is the stack,
3031 frame, or argument pointer, leave this as a SUBREG. */
230d793d
RS
3032
3033 if (GET_CODE (SUBREG_REG (x)) == REG
26ecfc76
RK
3034 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
3035 && REGNO (SUBREG_REG (x)) != FRAME_POINTER_REGNUM
3036#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3037 && REGNO (SUBREG_REG (x)) != ARG_POINTER_REGNUM
3038#endif
3039 && REGNO (SUBREG_REG (x)) != STACK_POINTER_REGNUM)
230d793d
RS
3040 {
3041 if (HARD_REGNO_MODE_OK (REGNO (SUBREG_REG (x)) + SUBREG_WORD (x),
3042 mode))
3043 return gen_rtx (REG, mode,
3044 REGNO (SUBREG_REG (x)) + SUBREG_WORD (x));
3045 else
3046 return gen_rtx (CLOBBER, mode, const0_rtx);
3047 }
3048
3049 /* For a constant, try to pick up the part we want. Handle a full
a4bde0b1
RK
3050 word and low-order part. Only do this if we are narrowing
3051 the constant; if it is being widened, we have no idea what
3052 the extra bits will have been set to. */
230d793d
RS
3053
3054 if (CONSTANT_P (SUBREG_REG (x)) && op0_mode != VOIDmode
3055 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
a4bde0b1 3056 && GET_MODE_SIZE (op0_mode) < UNITS_PER_WORD
230d793d
RS
3057 && GET_MODE_CLASS (mode) == MODE_INT)
3058 {
3059 temp = operand_subword (SUBREG_REG (x), SUBREG_WORD (x),
5f4f0e22 3060 0, op0_mode);
230d793d
RS
3061 if (temp)
3062 return temp;
3063 }
3064
a4bde0b1
RK
3065 if (CONSTANT_P (SUBREG_REG (x)) && subreg_lowpart_p (x)
3066 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (op0_mode))
230d793d
RS
3067 return gen_lowpart_for_combine (mode, SUBREG_REG (x));
3068
3069 /* If we are narrowing the object, we need to see if we can simplify
3070 the expression for the object knowing that we only need the
d0ab8cd3
RK
3071 low-order bits. */
3072
230d793d 3073 if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
d0ab8cd3
RK
3074 && subreg_lowpart_p (x))
3075 return force_to_mode (SUBREG_REG (x), mode, GET_MODE_BITSIZE (mode),
3076 NULL_RTX);
230d793d
RS
3077 break;
3078
3079 case NOT:
3080 /* (not (plus X -1)) can become (neg X). */
3081 if (GET_CODE (XEXP (x, 0)) == PLUS
3082 && XEXP (XEXP (x, 0), 1) == constm1_rtx)
3083 {
3084 x = gen_rtx_combine (NEG, mode, XEXP (XEXP (x, 0), 0));
3085 goto restart;
3086 }
3087
3088 /* Similarly, (not (neg X)) is (plus X -1). */
3089 if (GET_CODE (XEXP (x, 0)) == NEG)
3090 {
3091 x = gen_rtx_combine (PLUS, mode, XEXP (XEXP (x, 0), 0), constm1_rtx);
3092 goto restart;
3093 }
3094
d0ab8cd3
RK
3095 /* (not (xor X C)) for C constant is (xor X D) with D = ~ C. */
3096 if (GET_CODE (XEXP (x, 0)) == XOR
3097 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3098 && (temp = simplify_unary_operation (NOT, mode,
3099 XEXP (XEXP (x, 0), 1),
3100 mode)) != 0)
3101 {
3102 SUBST (XEXP (XEXP (x, 0), 1), temp);
3103 return XEXP (x, 0);
3104 }
3105
230d793d
RS
3106 /* (not (ashift 1 X)) is (rotate ~1 X). We used to do this for operands
3107 other than 1, but that is not valid. We could do a similar
3108 simplification for (not (lshiftrt C X)) where C is just the sign bit,
3109 but this doesn't seem common enough to bother with. */
3110 if (GET_CODE (XEXP (x, 0)) == ASHIFT
3111 && XEXP (XEXP (x, 0), 0) == const1_rtx)
3112 {
3113 x = gen_rtx (ROTATE, mode, gen_unary (NOT, mode, const1_rtx),
3114 XEXP (XEXP (x, 0), 1));
3115 goto restart;
3116 }
3117
3118 if (GET_CODE (XEXP (x, 0)) == SUBREG
3119 && subreg_lowpart_p (XEXP (x, 0))
3120 && (GET_MODE_SIZE (GET_MODE (XEXP (x, 0)))
3121 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (x, 0)))))
3122 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == ASHIFT
3123 && XEXP (SUBREG_REG (XEXP (x, 0)), 0) == const1_rtx)
3124 {
3125 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (XEXP (x, 0)));
3126
3127 x = gen_rtx (ROTATE, inner_mode,
3128 gen_unary (NOT, inner_mode, const1_rtx),
3129 XEXP (SUBREG_REG (XEXP (x, 0)), 1));
3130 x = gen_lowpart_for_combine (mode, x);
3131 goto restart;
3132 }
3133
3134#if STORE_FLAG_VALUE == -1
3135 /* (not (comparison foo bar)) can be done by reversing the comparison
3136 code if valid. */
3137 if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3138 && reversible_comparison_p (XEXP (x, 0)))
3139 return gen_rtx_combine (reverse_condition (GET_CODE (XEXP (x, 0))),
3140 mode, XEXP (XEXP (x, 0), 0),
3141 XEXP (XEXP (x, 0), 1));
500c518b
RK
3142
3143 /* (ashiftrt foo C) where C is the number of bits in FOO minus 1
3144 is (lt foo (const_int 0)), so we can perform the above
3145 simplification. */
3146
3147 if (XEXP (x, 1) == const1_rtx
3148 && GET_CODE (XEXP (x, 0)) == ASHIFTRT
3149 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3150 && INTVAL (XEXP (XEXP (x, 0), 1)) == GET_MODE_BITSIZE (mode) - 1)
3151 return gen_rtx_combine (GE, mode, XEXP (XEXP (x, 0), 0), const0_rtx);
230d793d
RS
3152#endif
3153
3154 /* Apply De Morgan's laws to reduce number of patterns for machines
3155 with negating logical insns (and-not, nand, etc.). If result has
3156 only one NOT, put it first, since that is how the patterns are
3157 coded. */
3158
3159 if (GET_CODE (XEXP (x, 0)) == IOR || GET_CODE (XEXP (x, 0)) == AND)
3160 {
3161 rtx in1 = XEXP (XEXP (x, 0), 0), in2 = XEXP (XEXP (x, 0), 1);
3162
3163 if (GET_CODE (in1) == NOT)
3164 in1 = XEXP (in1, 0);
3165 else
3166 in1 = gen_rtx_combine (NOT, GET_MODE (in1), in1);
3167
3168 if (GET_CODE (in2) == NOT)
3169 in2 = XEXP (in2, 0);
3170 else if (GET_CODE (in2) == CONST_INT
5f4f0e22
CH
3171 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3172 in2 = GEN_INT (GET_MODE_MASK (mode) & ~ INTVAL (in2));
230d793d
RS
3173 else
3174 in2 = gen_rtx_combine (NOT, GET_MODE (in2), in2);
3175
3176 if (GET_CODE (in2) == NOT)
3177 {
3178 rtx tem = in2;
3179 in2 = in1; in1 = tem;
3180 }
3181
3182 x = gen_rtx_combine (GET_CODE (XEXP (x, 0)) == IOR ? AND : IOR,
3183 mode, in1, in2);
3184 goto restart;
3185 }
3186 break;
3187
3188 case NEG:
3189 /* (neg (plus X 1)) can become (not X). */
3190 if (GET_CODE (XEXP (x, 0)) == PLUS
3191 && XEXP (XEXP (x, 0), 1) == const1_rtx)
3192 {
3193 x = gen_rtx_combine (NOT, mode, XEXP (XEXP (x, 0), 0));
3194 goto restart;
3195 }
3196
3197 /* Similarly, (neg (not X)) is (plus X 1). */
3198 if (GET_CODE (XEXP (x, 0)) == NOT)
3199 {
3200 x = gen_rtx_combine (PLUS, mode, XEXP (XEXP (x, 0), 0), const1_rtx);
3201 goto restart;
3202 }
3203
230d793d
RS
3204 /* (neg (minus X Y)) can become (minus Y X). */
3205 if (GET_CODE (XEXP (x, 0)) == MINUS
3206 && (GET_MODE_CLASS (mode) != MODE_FLOAT
3207 /* x-y != -(y-x) with IEEE floating point. */
3208 || TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT))
3209 {
3210 x = gen_binary (MINUS, mode, XEXP (XEXP (x, 0), 1),
3211 XEXP (XEXP (x, 0), 0));
3212 goto restart;
3213 }
3214
d0ab8cd3
RK
3215 /* (neg (xor A 1)) is (plus A -1) if A is known to be either 0 or 1. */
3216 if (GET_CODE (XEXP (x, 0)) == XOR && XEXP (XEXP (x, 0), 1) == const1_rtx
951553af 3217 && nonzero_bits (XEXP (XEXP (x, 0), 0), mode) == 1)
d0ab8cd3
RK
3218 {
3219 x = gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0), constm1_rtx);
3220 goto restart;
3221 }
3222
230d793d
RS
3223 /* NEG commutes with ASHIFT since it is multiplication. Only do this
3224 if we can then eliminate the NEG (e.g.,
3225 if the operand is a constant). */
3226
3227 if (GET_CODE (XEXP (x, 0)) == ASHIFT)
3228 {
3229 temp = simplify_unary_operation (NEG, mode,
3230 XEXP (XEXP (x, 0), 0), mode);
3231 if (temp)
3232 {
3233 SUBST (XEXP (XEXP (x, 0), 0), temp);
3234 return XEXP (x, 0);
3235 }
3236 }
3237
3238 temp = expand_compound_operation (XEXP (x, 0));
3239
3240 /* For C equal to the width of MODE minus 1, (neg (ashiftrt X C)) can be
3241 replaced by (lshiftrt X C). This will convert
3242 (neg (sign_extract X 1 Y)) to (zero_extract X 1 Y). */
3243
3244 if (GET_CODE (temp) == ASHIFTRT
3245 && GET_CODE (XEXP (temp, 1)) == CONST_INT
3246 && INTVAL (XEXP (temp, 1)) == GET_MODE_BITSIZE (mode) - 1)
3247 {
3248 x = simplify_shift_const (temp, LSHIFTRT, mode, XEXP (temp, 0),
3249 INTVAL (XEXP (temp, 1)));
3250 goto restart;
3251 }
3252
951553af 3253 /* If X has only a single bit that might be nonzero, say, bit I, convert
230d793d
RS
3254 (neg X) to (ashiftrt (ashift X C-I) C-I) where C is the bitsize of
3255 MODE minus 1. This will convert (neg (zero_extract X 1 Y)) to
3256 (sign_extract X 1 Y). But only do this if TEMP isn't a register
3257 or a SUBREG of one since we'd be making the expression more
3258 complex if it was just a register. */
3259
3260 if (GET_CODE (temp) != REG
3261 && ! (GET_CODE (temp) == SUBREG
3262 && GET_CODE (SUBREG_REG (temp)) == REG)
951553af 3263 && (i = exact_log2 (nonzero_bits (temp, mode))) >= 0)
230d793d
RS
3264 {
3265 rtx temp1 = simplify_shift_const
5f4f0e22
CH
3266 (NULL_RTX, ASHIFTRT, mode,
3267 simplify_shift_const (NULL_RTX, ASHIFT, mode, temp,
230d793d
RS
3268 GET_MODE_BITSIZE (mode) - 1 - i),
3269 GET_MODE_BITSIZE (mode) - 1 - i);
3270
3271 /* If all we did was surround TEMP with the two shifts, we
3272 haven't improved anything, so don't use it. Otherwise,
3273 we are better off with TEMP1. */
3274 if (GET_CODE (temp1) != ASHIFTRT
3275 || GET_CODE (XEXP (temp1, 0)) != ASHIFT
3276 || XEXP (XEXP (temp1, 0), 0) != temp)
3277 {
3278 x = temp1;
3279 goto restart;
3280 }
3281 }
3282 break;
3283
3284 case FLOAT_TRUNCATE:
3285 /* (float_truncate:SF (float_extend:DF foo:SF)) = foo:SF. */
3286 if (GET_CODE (XEXP (x, 0)) == FLOAT_EXTEND
3287 && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode)
3288 return XEXP (XEXP (x, 0), 0);
3289 break;
3290
3291#ifdef HAVE_cc0
3292 case COMPARE:
3293 /* Convert (compare FOO (const_int 0)) to FOO unless we aren't
3294 using cc0, in which case we want to leave it as a COMPARE
3295 so we can distinguish it from a register-register-copy. */
3296 if (XEXP (x, 1) == const0_rtx)
3297 return XEXP (x, 0);
3298
3299 /* In IEEE floating point, x-0 is not the same as x. */
3300 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3301 || GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) == MODE_INT)
3302 && XEXP (x, 1) == CONST0_RTX (GET_MODE (XEXP (x, 0))))
3303 return XEXP (x, 0);
3304 break;
3305#endif
3306
3307 case CONST:
3308 /* (const (const X)) can become (const X). Do it this way rather than
3309 returning the inner CONST since CONST can be shared with a
3310 REG_EQUAL note. */
3311 if (GET_CODE (XEXP (x, 0)) == CONST)
3312 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
3313 break;
3314
3315#ifdef HAVE_lo_sum
3316 case LO_SUM:
3317 /* Convert (lo_sum (high FOO) FOO) to FOO. This is necessary so we
3318 can add in an offset. find_split_point will split this address up
3319 again if it doesn't match. */
3320 if (GET_CODE (XEXP (x, 0)) == HIGH
3321 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1)))
3322 return XEXP (x, 1);
3323 break;
3324#endif
3325
3326 case PLUS:
3327 /* If we have (plus (plus (A const) B)), associate it so that CONST is
3328 outermost. That's because that's the way indexed addresses are
3329 supposed to appear. This code used to check many more cases, but
3330 they are now checked elsewhere. */
3331 if (GET_CODE (XEXP (x, 0)) == PLUS
3332 && CONSTANT_ADDRESS_P (XEXP (XEXP (x, 0), 1)))
3333 return gen_binary (PLUS, mode,
3334 gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0),
3335 XEXP (x, 1)),
3336 XEXP (XEXP (x, 0), 1));
3337
3338 /* (plus (xor (and <foo> (const_int pow2 - 1)) <c>) <-c>)
3339 when c is (const_int (pow2 + 1) / 2) is a sign extension of a
3340 bit-field and can be replaced by either a sign_extend or a
3341 sign_extract. The `and' may be a zero_extend. */
3342 if (GET_CODE (XEXP (x, 0)) == XOR
3343 && GET_CODE (XEXP (x, 1)) == CONST_INT
3344 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3345 && INTVAL (XEXP (x, 1)) == - INTVAL (XEXP (XEXP (x, 0), 1))
3346 && (i = exact_log2 (INTVAL (XEXP (XEXP (x, 0), 1)))) >= 0
5f4f0e22 3347 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
230d793d
RS
3348 && ((GET_CODE (XEXP (XEXP (x, 0), 0)) == AND
3349 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
3350 && (INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))
5f4f0e22 3351 == ((HOST_WIDE_INT) 1 << (i + 1)) - 1))
230d793d
RS
3352 || (GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND
3353 && (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)))
3354 == i + 1))))
3355 {
3356 x = simplify_shift_const
5f4f0e22
CH
3357 (NULL_RTX, ASHIFTRT, mode,
3358 simplify_shift_const (NULL_RTX, ASHIFT, mode,
230d793d
RS
3359 XEXP (XEXP (XEXP (x, 0), 0), 0),
3360 GET_MODE_BITSIZE (mode) - (i + 1)),
3361 GET_MODE_BITSIZE (mode) - (i + 1));
3362 goto restart;
3363 }
3364
951553af 3365 /* If only the low-order bit of X is possible nonzero, (plus x -1)
230d793d
RS
3366 can become (ashiftrt (ashift (xor x 1) C) C) where C is
3367 the bitsize of the mode - 1. This allows simplification of
3368 "a = (b & 8) == 0;" */
3369 if (XEXP (x, 1) == constm1_rtx
3370 && GET_CODE (XEXP (x, 0)) != REG
3371 && ! (GET_CODE (XEXP (x,0)) == SUBREG
3372 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG)
951553af 3373 && nonzero_bits (XEXP (x, 0), mode) == 1)
230d793d
RS
3374 {
3375 x = simplify_shift_const
5f4f0e22
CH
3376 (NULL_RTX, ASHIFTRT, mode,
3377 simplify_shift_const (NULL_RTX, ASHIFT, mode,
230d793d
RS
3378 gen_rtx_combine (XOR, mode,
3379 XEXP (x, 0), const1_rtx),
3380 GET_MODE_BITSIZE (mode) - 1),
3381 GET_MODE_BITSIZE (mode) - 1);
3382 goto restart;
3383 }
02f4ada4
RK
3384
3385 /* If we are adding two things that have no bits in common, convert
3386 the addition into an IOR. This will often be further simplified,
3387 for example in cases like ((a & 1) + (a & 2)), which can
3388 become a & 3. */
3389
ac49a949 3390 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
951553af
RK
3391 && (nonzero_bits (XEXP (x, 0), mode)
3392 & nonzero_bits (XEXP (x, 1), mode)) == 0)
02f4ada4
RK
3393 {
3394 x = gen_binary (IOR, mode, XEXP (x, 0), XEXP (x, 1));
3395 goto restart;
3396 }
230d793d
RS
3397 break;
3398
3399 case MINUS:
3400 /* (minus <foo> (and <foo> (const_int -pow2))) becomes
3401 (and <foo> (const_int pow2-1)) */
3402 if (GET_CODE (XEXP (x, 1)) == AND
3403 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
3404 && exact_log2 (- INTVAL (XEXP (XEXP (x, 1), 1))) >= 0
3405 && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0)))
3406 {
5f4f0e22 3407 x = simplify_and_const_int (NULL_RTX, mode, XEXP (x, 0),
230d793d
RS
3408 - INTVAL (XEXP (XEXP (x, 1), 1)) - 1);
3409 goto restart;
3410 }
3411 break;
3412
3413 case MULT:
3414 /* If we have (mult (plus A B) C), apply the distributive law and then
3415 the inverse distributive law to see if things simplify. This
3416 occurs mostly in addresses, often when unrolling loops. */
3417
3418 if (GET_CODE (XEXP (x, 0)) == PLUS)
3419 {
3420 x = apply_distributive_law
3421 (gen_binary (PLUS, mode,
3422 gen_binary (MULT, mode,
3423 XEXP (XEXP (x, 0), 0), XEXP (x, 1)),
3424 gen_binary (MULT, mode,
3425 XEXP (XEXP (x, 0), 1), XEXP (x, 1))));
3426
3427 if (GET_CODE (x) != MULT)
3428 goto restart;
3429 }
3430
3431 /* If this is multiplication by a power of two and its first operand is
3432 a shift, treat the multiply as a shift to allow the shifts to
3433 possibly combine. */
3434 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3435 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0
3436 && (GET_CODE (XEXP (x, 0)) == ASHIFT
3437 || GET_CODE (XEXP (x, 0)) == LSHIFTRT
3438 || GET_CODE (XEXP (x, 0)) == ASHIFTRT
3439 || GET_CODE (XEXP (x, 0)) == ROTATE
3440 || GET_CODE (XEXP (x, 0)) == ROTATERT))
3441 {
5f4f0e22 3442 x = simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (x, 0), i);
230d793d
RS
3443 goto restart;
3444 }
3445
3446 /* Convert (mult (ashift (const_int 1) A) B) to (ashift B A). */
3447 if (GET_CODE (XEXP (x, 0)) == ASHIFT
3448 && XEXP (XEXP (x, 0), 0) == const1_rtx)
3449 return gen_rtx_combine (ASHIFT, mode, XEXP (x, 1),
3450 XEXP (XEXP (x, 0), 1));
3451 break;
3452
3453 case UDIV:
3454 /* If this is a divide by a power of two, treat it as a shift if
3455 its first operand is a shift. */
3456 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3457 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0
3458 && (GET_CODE (XEXP (x, 0)) == ASHIFT
3459 || GET_CODE (XEXP (x, 0)) == LSHIFTRT
3460 || GET_CODE (XEXP (x, 0)) == ASHIFTRT
3461 || GET_CODE (XEXP (x, 0)) == ROTATE
3462 || GET_CODE (XEXP (x, 0)) == ROTATERT))
3463 {
5f4f0e22 3464 x = simplify_shift_const (NULL_RTX, LSHIFTRT, mode, XEXP (x, 0), i);
230d793d
RS
3465 goto restart;
3466 }
3467 break;
3468
3469 case EQ: case NE:
3470 case GT: case GTU: case GE: case GEU:
3471 case LT: case LTU: case LE: case LEU:
3472 /* If the first operand is a condition code, we can't do anything
3473 with it. */
3474 if (GET_CODE (XEXP (x, 0)) == COMPARE
3475 || (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) != MODE_CC
3476#ifdef HAVE_cc0
3477 && XEXP (x, 0) != cc0_rtx
3478#endif
3479 ))
3480 {
3481 rtx op0 = XEXP (x, 0);
3482 rtx op1 = XEXP (x, 1);
3483 enum rtx_code new_code;
3484
3485 if (GET_CODE (op0) == COMPARE)
3486 op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
3487
3488 /* Simplify our comparison, if possible. */
3489 new_code = simplify_comparison (code, &op0, &op1);
3490
3491#if STORE_FLAG_VALUE == 1
3492 /* If STORE_FLAG_VALUE is 1, we can convert (ne x 0) to simply X
951553af 3493 if only the low-order bit is possibly nonzero in X (such as when
230d793d 3494 X is a ZERO_EXTRACT of one bit. Similarly, we can convert
818b11b9
RK
3495 EQ to (xor X 1). Remove any ZERO_EXTRACT we made when thinking
3496 this was a comparison. It may now be simpler to use, e.g., an
3497 AND. If a ZERO_EXTRACT is indeed appropriate, it will
3498 be placed back by the call to make_compound_operation in the
3499 SET case. */
3f508eca 3500 if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
230d793d 3501 && op1 == const0_rtx
951553af 3502 && nonzero_bits (op0, GET_MODE (op0)) == 1)
818b11b9
RK
3503 return gen_lowpart_for_combine (mode,
3504 expand_compound_operation (op0));
3f508eca 3505 else if (new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
230d793d 3506 && op1 == const0_rtx
951553af 3507 && nonzero_bits (op0, GET_MODE (op0)) == 1)
818b11b9
RK
3508 {
3509 op0 = expand_compound_operation (op0);
3510
3511 x = gen_rtx_combine (XOR, mode,
3512 gen_lowpart_for_combine (mode, op0),
3513 const1_rtx);
3514 goto restart;
3515 }
230d793d
RS
3516#endif
3517
3518#if STORE_FLAG_VALUE == -1
3519 /* If STORE_FLAG_VALUE is -1, we can convert (ne x 0)
951553af 3520 to (neg x) if only the low-order bit of X can be nonzero.
230d793d
RS
3521 This converts (ne (zero_extract X 1 Y) 0) to
3522 (sign_extract X 1 Y). */
3f508eca 3523 if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
230d793d 3524 && op1 == const0_rtx
951553af 3525 && nonzero_bits (op0, GET_MODE (op0)) == 1)
230d793d 3526 {
818b11b9 3527 op0 = expand_compound_operation (op0);
230d793d
RS
3528 x = gen_rtx_combine (NEG, mode,
3529 gen_lowpart_for_combine (mode, op0));
3530 goto restart;
3531 }
3532#endif
3533
3534 /* If STORE_FLAG_VALUE says to just test the sign bit and X has just
951553af
RK
3535 one bit that might be nonzero, we can convert (ne x 0) to
3536 (ashift x c) where C puts the bit in the sign bit. Remove any
3537 AND with STORE_FLAG_VALUE when we are done, since we are only
3538 going to test the sign bit. */
3f508eca 3539 if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
5f4f0e22
CH
3540 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3541 && (STORE_FLAG_VALUE
3542 == (HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
230d793d
RS
3543 && op1 == const0_rtx
3544 && mode == GET_MODE (op0)
951553af 3545 && (i = exact_log2 (nonzero_bits (op0, GET_MODE (op0)))) >= 0)
230d793d 3546 {
818b11b9
RK
3547 x = simplify_shift_const (NULL_RTX, ASHIFT, mode,
3548 expand_compound_operation (op0),
230d793d
RS
3549 GET_MODE_BITSIZE (mode) - 1 - i);
3550 if (GET_CODE (x) == AND && XEXP (x, 1) == const_true_rtx)
3551 return XEXP (x, 0);
3552 else
3553 return x;
3554 }
3555
3556 /* If the code changed, return a whole new comparison. */
3557 if (new_code != code)
3558 return gen_rtx_combine (new_code, mode, op0, op1);
3559
3560 /* Otherwise, keep this operation, but maybe change its operands.
3561 This also converts (ne (compare FOO BAR) 0) to (ne FOO BAR). */
3562 SUBST (XEXP (x, 0), op0);
3563 SUBST (XEXP (x, 1), op1);
3564 }
3565 break;
3566
3567 case IF_THEN_ELSE:
1a26b032
RK
3568 /* Sometimes we can simplify the arm of an IF_THEN_ELSE if a register
3569 used in it is being compared against certain values. Get the
3570 true and false comparisons and see if that says anything about the
3571 value of each arm. */
d0ab8cd3 3572
1a26b032
RK
3573 if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3574 && reversible_comparison_p (XEXP (x, 0))
d0ab8cd3
RK
3575 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG)
3576 {
951553af 3577 HOST_WIDE_INT nzb;
d0ab8cd3 3578 rtx from = XEXP (XEXP (x, 0), 0);
1a26b032
RK
3579 enum rtx_code true_code = GET_CODE (XEXP (x, 0));
3580 enum rtx_code false_code = reverse_condition (true_code);
3581 rtx true_val = XEXP (XEXP (x, 0), 1);
3582 rtx false_val = true_val;
3583 rtx true_arm = XEXP (x, 1);
3584 rtx false_arm = XEXP (x, 2);
3585 int swapped = 0;
3586
3587 /* If FALSE_CODE is EQ, swap the codes and arms. */
3588
3589 if (false_code == EQ)
3590 {
3591 swapped = 1, true_code = EQ, false_code = NE;
3592 true_arm = XEXP (x, 2), false_arm = XEXP (x, 1);
3593 }
d0ab8cd3 3594
1a26b032 3595 /* If we are comparing against zero and the expression being tested
951553af
RK
3596 has only a single bit that might be nonzero, that is its value
3597 when it is not equal to zero. Similarly if it is known to be
3598 -1 or 0. */
d0ab8cd3 3599
1a26b032 3600 if (true_code == EQ && true_val == const0_rtx
951553af
RK
3601 && exact_log2 (nzb = nonzero_bits (from, GET_MODE (from))) >= 0)
3602 false_code = EQ, false_val = GEN_INT (nzb);
1a26b032 3603 else if (true_code == EQ && true_val == const0_rtx
d0ab8cd3
RK
3604 && (num_sign_bit_copies (from, GET_MODE (from))
3605 == GET_MODE_BITSIZE (GET_MODE (from))))
1a26b032 3606 false_code = EQ, false_val = constm1_rtx;
d0ab8cd3
RK
3607
3608 /* Now simplify an arm if we know the value of the register
3609 in the branch and it is used in the arm. Be carefull due to
3610 the potential of locally-shared RTL. */
3611
1a26b032
RK
3612 if (reg_mentioned_p (from, true_arm))
3613 true_arm = subst (known_cond (copy_rtx (true_arm), true_code,
3614 from, true_val),
3615 pc_rtx, pc_rtx, 0, 0);
3616 if (reg_mentioned_p (from, false_arm))
3617 false_arm = subst (known_cond (copy_rtx (false_arm), false_code,
3618 from, false_val),
3619 pc_rtx, pc_rtx, 0, 0);
3620
3621 SUBST (XEXP (x, 1), swapped ? false_arm : true_arm);
3622 SUBST (XEXP (x, 2), swapped ? true_arm : false_arm);
d0ab8cd3
RK
3623 }
3624
230d793d
RS
3625 /* If we have (if_then_else FOO (pc) (label_ref BAR)) and FOO can be
3626 reversed, do so to avoid needing two sets of patterns for
d0ab8cd3 3627 subtract-and-branch insns. Similarly if we have a constant in that
1a26b032
RK
3628 position or if the third operand is the same as the first operand
3629 of the comparison. */
3630
3631 if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3632 && reversible_comparison_p (XEXP (x, 0))
3633 && (XEXP (x, 1) == pc_rtx || GET_CODE (XEXP (x, 1)) == CONST_INT
3634 || rtx_equal_p (XEXP (x, 2), XEXP (XEXP (x, 0), 0))))
230d793d
RS
3635 {
3636 SUBST (XEXP (x, 0),
d0ab8cd3
RK
3637 gen_binary (reverse_condition (GET_CODE (XEXP (x, 0))),
3638 GET_MODE (XEXP (x, 0)),
3639 XEXP (XEXP (x, 0), 0), XEXP (XEXP (x, 0), 1)));
3640
3641 temp = XEXP (x, 1);
230d793d 3642 SUBST (XEXP (x, 1), XEXP (x, 2));
d0ab8cd3 3643 SUBST (XEXP (x, 2), temp);
230d793d 3644 }
1a26b032
RK
3645
3646 /* If the two arms are identical, we don't need the comparison. */
3647
3648 if (rtx_equal_p (XEXP (x, 1), XEXP (x, 2))
3649 && ! side_effects_p (XEXP (x, 0)))
3650 return XEXP (x, 1);
3651
3652 /* Look for cases where we have (abs x) or (neg (abs X)). */
3653
3654 if (GET_MODE_CLASS (mode) == MODE_INT
3655 && GET_CODE (XEXP (x, 2)) == NEG
3656 && rtx_equal_p (XEXP (x, 1), XEXP (XEXP (x, 2), 0))
3657 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3658 && rtx_equal_p (XEXP (x, 1), XEXP (XEXP (x, 0), 0))
3659 && ! side_effects_p (XEXP (x, 1)))
3660 switch (GET_CODE (XEXP (x, 0)))
3661 {
3662 case GT:
3663 case GE:
3664 x = gen_unary (ABS, mode, XEXP (x, 1));
3665 goto restart;
3666 case LT:
3667 case LE:
3668 x = gen_unary (NEG, mode, gen_unary (ABS, mode, XEXP (x, 1)));
3669 goto restart;
3670 }
3671
3672 /* Look for MIN or MAX. */
3673
3674 if (GET_MODE_CLASS (mode) == MODE_INT
3675 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3676 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1))
3677 && rtx_equal_p (XEXP (XEXP (x, 0), 1), XEXP (x, 2))
3678 && ! side_effects_p (XEXP (x, 0)))
3679 switch (GET_CODE (XEXP (x, 0)))
3680 {
3681 case GE:
3682 case GT:
3683 x = gen_binary (SMAX, mode, XEXP (x, 1), XEXP (x, 2));
3684 goto restart;
3685 case LE:
3686 case LT:
3687 x = gen_binary (SMIN, mode, XEXP (x, 1), XEXP (x, 2));
3688 goto restart;
3689 case GEU:
3690 case GTU:
3691 x = gen_binary (UMAX, mode, XEXP (x, 1), XEXP (x, 2));
3692 goto restart;
3693 case LEU:
3694 case LTU:
3695 x = gen_binary (UMIN, mode, XEXP (x, 1), XEXP (x, 2));
3696 goto restart;
3697 }
3698
3699 /* If we have something like (if_then_else (ne A 0) (OP X C) X),
3700 A is known to be either 0 or 1, and OP is an identity when its
3701 second operand is zero, this can be done as (OP X (mult A C)).
3702 Similarly if A is known to be 0 or -1 and also similarly if we have
3703 a ZERO_EXTEND or SIGN_EXTEND as long as X is already extended (so
3704 we don't destroy it). */
3705
3706 if (mode != VOIDmode
3707 && (GET_CODE (XEXP (x, 0)) == EQ || GET_CODE (XEXP (x, 0)) == NE)
3708 && XEXP (XEXP (x, 0), 1) == const0_rtx
951553af 3709 && (nonzero_bits (XEXP (XEXP (x, 0), 0), mode) == 1
1a26b032
RK
3710 || (num_sign_bit_copies (XEXP (XEXP (x, 0), 0), mode)
3711 == GET_MODE_BITSIZE (mode))))
3712 {
3713 rtx nz = make_compound_operation (GET_CODE (XEXP (x, 0)) == NE
3714 ? XEXP (x, 1) : XEXP (x, 2));
3715 rtx z = GET_CODE (XEXP (x, 0)) == NE ? XEXP (x, 2) : XEXP (x, 1);
951553af 3716 rtx dir = (nonzero_bits (XEXP (XEXP (x, 0), 0), mode) == 1
1a26b032
RK
3717 ? const1_rtx : constm1_rtx);
3718 rtx c = 0;
3719 enum machine_mode m = mode;
e64ff103 3720 enum rtx_code op, extend_op = 0;
1a26b032
RK
3721
3722 if ((GET_CODE (nz) == PLUS || GET_CODE (nz) == MINUS
3723 || GET_CODE (nz) == IOR || GET_CODE (nz) == XOR
3724 || GET_CODE (nz) == ASHIFT
3725 || GET_CODE (nz) == LSHIFTRT || GET_CODE (nz) == ASHIFTRT)
3726 && rtx_equal_p (XEXP (nz, 0), z))
3727 c = XEXP (nz, 1), op = GET_CODE (nz);
3728 else if (GET_CODE (nz) == SIGN_EXTEND
3729 && (GET_CODE (XEXP (nz, 0)) == PLUS
3730 || GET_CODE (XEXP (nz, 0)) == MINUS
3731 || GET_CODE (XEXP (nz, 0)) == IOR
3732 || GET_CODE (XEXP (nz, 0)) == XOR
3733 || GET_CODE (XEXP (nz, 0)) == ASHIFT
3734 || GET_CODE (XEXP (nz, 0)) == LSHIFTRT
3735 || GET_CODE (XEXP (nz, 0)) == ASHIFTRT)
3736 && GET_CODE (XEXP (XEXP (nz, 0), 0)) == SUBREG
3737 && subreg_lowpart_p (XEXP (XEXP (nz, 0), 0))
3738 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (nz, 0), 0)), z)
3739 && (num_sign_bit_copies (z, GET_MODE (z))
3740 >= (GET_MODE_BITSIZE (mode)
3741 - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (nz, 0), 0))))))
3742 {
3743 c = XEXP (XEXP (nz, 0), 1);
3744 op = GET_CODE (XEXP (nz, 0));
3745 extend_op = SIGN_EXTEND;
3746 m = GET_MODE (XEXP (nz, 0));
3747 }
3748 else if (GET_CODE (nz) == ZERO_EXTEND
3749 && (GET_CODE (XEXP (nz, 0)) == PLUS
3750 || GET_CODE (XEXP (nz, 0)) == MINUS
3751 || GET_CODE (XEXP (nz, 0)) == IOR
3752 || GET_CODE (XEXP (nz, 0)) == XOR
3753 || GET_CODE (XEXP (nz, 0)) == ASHIFT
3754 || GET_CODE (XEXP (nz, 0)) == LSHIFTRT
3755 || GET_CODE (XEXP (nz, 0)) == ASHIFTRT)
3756 && GET_CODE (XEXP (XEXP (nz, 0), 0)) == SUBREG
3757 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3758 && subreg_lowpart_p (XEXP (XEXP (nz, 0), 0))
3759 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (nz, 0), 0)), z)
951553af 3760 && ((nonzero_bits (z, GET_MODE (z))
1a26b032
RK
3761 & ~ GET_MODE_MASK (GET_MODE (XEXP (XEXP (nz, 0), 0))))
3762 == 0))
3763 {
3764 c = XEXP (XEXP (nz, 0), 1);
3765 op = GET_CODE (XEXP (nz, 0));
3766 extend_op = ZERO_EXTEND;
3767 m = GET_MODE (XEXP (nz, 0));
3768 }
3769
3770 if (c && ! side_effects_p (c) && ! side_effects_p (z))
3771 {
3772 temp
3773 = gen_binary (MULT, m,
3774 gen_lowpart_for_combine (m,
3775 XEXP (XEXP (x, 0), 0)),
3776 gen_binary (MULT, m, c, dir));
3777
3778 temp = gen_binary (op, m, gen_lowpart_for_combine (m, z), temp);
3779
e64ff103 3780 if (extend_op != 0)
1a26b032
RK
3781 temp = gen_unary (extend_op, mode, temp);
3782
3783 return temp;
3784 }
3785 }
230d793d
RS
3786 break;
3787
3788 case ZERO_EXTRACT:
3789 case SIGN_EXTRACT:
3790 case ZERO_EXTEND:
3791 case SIGN_EXTEND:
3792 /* If we are processing SET_DEST, we are done. */
3793 if (in_dest)
3794 return x;
3795
3796 x = expand_compound_operation (x);
3797 if (GET_CODE (x) != code)
3798 goto restart;
3799 break;
3800
3801 case SET:
3802 /* (set (pc) (return)) gets written as (return). */
3803 if (GET_CODE (SET_DEST (x)) == PC && GET_CODE (SET_SRC (x)) == RETURN)
3804 return SET_SRC (x);
3805
3806 /* Convert this into a field assignment operation, if possible. */
3807 x = make_field_assignment (x);
3808
230d793d
RS
3809 /* If we are setting CC0 or if the source is a COMPARE, look for the
3810 use of the comparison result and try to simplify it unless we already
3811 have used undobuf.other_insn. */
3812 if ((GET_CODE (SET_SRC (x)) == COMPARE
3813#ifdef HAVE_cc0
3814 || SET_DEST (x) == cc0_rtx
3815#endif
3816 )
3817 && (cc_use = find_single_use (SET_DEST (x), subst_insn,
3818 &other_insn)) != 0
3819 && (undobuf.other_insn == 0 || other_insn == undobuf.other_insn)
3820 && GET_RTX_CLASS (GET_CODE (*cc_use)) == '<'
3821 && XEXP (*cc_use, 0) == SET_DEST (x))
3822 {
3823 enum rtx_code old_code = GET_CODE (*cc_use);
3824 enum rtx_code new_code;
3825 rtx op0, op1;
3826 int other_changed = 0;
3827 enum machine_mode compare_mode = GET_MODE (SET_DEST (x));
3828
3829 if (GET_CODE (SET_SRC (x)) == COMPARE)
3830 op0 = XEXP (SET_SRC (x), 0), op1 = XEXP (SET_SRC (x), 1);
3831 else
3832 op0 = SET_SRC (x), op1 = const0_rtx;
3833
3834 /* Simplify our comparison, if possible. */
3835 new_code = simplify_comparison (old_code, &op0, &op1);
3836
c141a106 3837#ifdef EXTRA_CC_MODES
230d793d
RS
3838 /* If this machine has CC modes other than CCmode, check to see
3839 if we need to use a different CC mode here. */
77fa0940 3840 compare_mode = SELECT_CC_MODE (new_code, op0, op1);
c141a106 3841#endif /* EXTRA_CC_MODES */
230d793d 3842
c141a106 3843#if !defined (HAVE_cc0) && defined (EXTRA_CC_MODES)
230d793d
RS
3844 /* If the mode changed, we have to change SET_DEST, the mode
3845 in the compare, and the mode in the place SET_DEST is used.
3846 If SET_DEST is a hard register, just build new versions with
3847 the proper mode. If it is a pseudo, we lose unless it is only
3848 time we set the pseudo, in which case we can safely change
3849 its mode. */
3850 if (compare_mode != GET_MODE (SET_DEST (x)))
3851 {
3852 int regno = REGNO (SET_DEST (x));
3853 rtx new_dest = gen_rtx (REG, compare_mode, regno);
3854
3855 if (regno < FIRST_PSEUDO_REGISTER
3856 || (reg_n_sets[regno] == 1
3857 && ! REG_USERVAR_P (SET_DEST (x))))
3858 {
3859 if (regno >= FIRST_PSEUDO_REGISTER)
3860 SUBST (regno_reg_rtx[regno], new_dest);
3861
3862 SUBST (SET_DEST (x), new_dest);
3863 SUBST (XEXP (*cc_use, 0), new_dest);
3864 other_changed = 1;
3865 }
3866 }
3867#endif
3868
3869 /* If the code changed, we have to build a new comparison
3870 in undobuf.other_insn. */
3871 if (new_code != old_code)
3872 {
951553af 3873 unsigned HOST_WIDE_INT mask;
230d793d
RS
3874
3875 SUBST (*cc_use, gen_rtx_combine (new_code, GET_MODE (*cc_use),
3876 SET_DEST (x), const0_rtx));
3877
3878 /* If the only change we made was to change an EQ into an
951553af 3879 NE or vice versa, OP0 has only one bit that might be nonzero,
230d793d
RS
3880 and OP1 is zero, check if changing the user of the condition
3881 code will produce a valid insn. If it won't, we can keep
3882 the original code in that insn by surrounding our operation
3883 with an XOR. */
3884
3885 if (((old_code == NE && new_code == EQ)
3886 || (old_code == EQ && new_code == NE))
3887 && ! other_changed && op1 == const0_rtx
5f4f0e22
CH
3888 && (GET_MODE_BITSIZE (GET_MODE (op0))
3889 <= HOST_BITS_PER_WIDE_INT)
951553af 3890 && (exact_log2 (mask = nonzero_bits (op0, GET_MODE (op0)))
230d793d
RS
3891 >= 0))
3892 {
3893 rtx pat = PATTERN (other_insn), note = 0;
3894
6e2a4e3c 3895 if ((recog_for_combine (&pat, other_insn, &note) < 0
230d793d
RS
3896 && ! check_asm_operands (pat)))
3897 {
3898 PUT_CODE (*cc_use, old_code);
3899 other_insn = 0;
3900
3901 op0 = gen_binary (XOR, GET_MODE (op0), op0,
5f4f0e22 3902 GEN_INT (mask));
230d793d
RS
3903 }
3904 }
3905
3906 other_changed = 1;
3907 }
3908
3909 if (other_changed)
3910 undobuf.other_insn = other_insn;
3911
3912#ifdef HAVE_cc0
3913 /* If we are now comparing against zero, change our source if
3914 needed. If we do not use cc0, we always have a COMPARE. */
3915 if (op1 == const0_rtx && SET_DEST (x) == cc0_rtx)
3916 SUBST (SET_SRC (x), op0);
3917 else
3918#endif
3919
3920 /* Otherwise, if we didn't previously have a COMPARE in the
3921 correct mode, we need one. */
3922 if (GET_CODE (SET_SRC (x)) != COMPARE
3923 || GET_MODE (SET_SRC (x)) != compare_mode)
3924 SUBST (SET_SRC (x), gen_rtx_combine (COMPARE, compare_mode,
3925 op0, op1));
3926 else
3927 {
3928 /* Otherwise, update the COMPARE if needed. */
3929 SUBST (XEXP (SET_SRC (x), 0), op0);
3930 SUBST (XEXP (SET_SRC (x), 1), op1);
3931 }
3932 }
3933 else
3934 {
3935 /* Get SET_SRC in a form where we have placed back any
3936 compound expressions. Then do the checks below. */
3937 temp = make_compound_operation (SET_SRC (x), SET);
3938 SUBST (SET_SRC (x), temp);
3939 }
3940
df62f951
RK
3941 /* If we have (set x (subreg:m1 (op:m2 ...) 0)) with OP being some
3942 operation, and X being a REG or (subreg (reg)), we may be able to
3943 convert this to (set (subreg:m2 x) (op)).
3944
3945 We can always do this if M1 is narrower than M2 because that
3946 means that we only care about the low bits of the result.
3947
c6dc70d6
RK
3948 However, on most machines (those with neither BYTE_LOADS_ZERO_EXTEND
3949 nor BYTES_LOADS_SIGN_EXTEND defined), we cannot perform a
457816e2
RK
3950 narrower operation that requested since the high-order bits will
3951 be undefined. On machine where BYTE_LOADS_*_EXTEND is defined,
3952 however, this transformation is safe as long as M1 and M2 have
3953 the same number of words. */
df62f951
RK
3954
3955 if (GET_CODE (SET_SRC (x)) == SUBREG
3956 && subreg_lowpart_p (SET_SRC (x))
3957 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (SET_SRC (x)))) != 'o'
3958 && (((GET_MODE_SIZE (GET_MODE (SET_SRC (x))) + (UNITS_PER_WORD - 1))
3959 / UNITS_PER_WORD)
3960 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_SRC (x))))
3961 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
c6dc70d6 3962#ifndef BYTE_LOADS_EXTEND
df62f951
RK
3963 && (GET_MODE_SIZE (GET_MODE (SET_SRC (x)))
3964 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_SRC (x)))))
3965#endif
3966 && (GET_CODE (SET_DEST (x)) == REG
3967 || (GET_CODE (SET_DEST (x)) == SUBREG
3968 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG)))
3969 {
df62f951 3970 SUBST (SET_DEST (x),
d0ab8cd3
RK
3971 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (SET_SRC (x))),
3972 SET_DEST (x)));
df62f951
RK
3973 SUBST (SET_SRC (x), SUBREG_REG (SET_SRC (x)));
3974 }
3975
c6dc70d6 3976#ifdef BYTE_LOADS_EXTEND
230d793d
RS
3977 /* If we have (set FOO (subreg:M (mem:N BAR) 0)) with
3978 M wider than N, this would require a paradoxical subreg.
3979 Replace the subreg with a zero_extend to avoid the reload that
3980 would otherwise be required. */
c6dc70d6 3981
230d793d
RS
3982 if (GET_CODE (SET_SRC (x)) == SUBREG
3983 && subreg_lowpart_p (SET_SRC (x))
3984 && SUBREG_WORD (SET_SRC (x)) == 0
3985 && (GET_MODE_SIZE (GET_MODE (SET_SRC (x)))
3986 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_SRC (x)))))
3987 && GET_CODE (SUBREG_REG (SET_SRC (x))) == MEM)
c6dc70d6 3988 SUBST (SET_SRC (x), gen_rtx_combine (LOAD_EXTEND,
230d793d
RS
3989 GET_MODE (SET_SRC (x)),
3990 XEXP (SET_SRC (x), 0)));
3991#endif
3992
1a26b032
RK
3993#ifndef HAVE_conditional_move
3994
3995 /* If we don't have a conditional move, SET_SRC is an IF_THEN_ELSE,
3996 and we are comparing an item known to be 0 or -1 against 0, use a
3997 logical operation instead. Check for one of the arms being an IOR
3998 of the other arm with some value. We compute three terms to be
3999 IOR'ed together. In practice, at most two will be nonzero. Then
4000 we do the IOR's. */
4001
696223d7
TW
4002 if (GET_CODE (SET_DEST (x)) != PC
4003 && GET_CODE (SET_SRC (x)) == IF_THEN_ELSE
1a26b032
RK
4004 && (GET_CODE (XEXP (SET_SRC (x), 0)) == EQ
4005 || GET_CODE (XEXP (SET_SRC (x), 0)) == NE)
4006 && XEXP (XEXP (SET_SRC (x), 0), 1) == const0_rtx
4007 && (num_sign_bit_copies (XEXP (XEXP (SET_SRC (x), 0), 0),
4008 GET_MODE (XEXP (XEXP (SET_SRC (x), 0), 0)))
4009 == GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (SET_SRC (x), 0), 0))))
4010 && ! side_effects_p (SET_SRC (x)))
4011 {
4012 rtx true = (GET_CODE (XEXP (SET_SRC (x), 0)) == NE
4013 ? XEXP (SET_SRC (x), 1) : XEXP (SET_SRC (x), 2));
4014 rtx false = (GET_CODE (XEXP (SET_SRC (x), 0)) == NE
4015 ? XEXP (SET_SRC (x), 2) : XEXP (SET_SRC (x), 1));
4016 rtx term1 = const0_rtx, term2, term3;
4017
4018 if (GET_CODE (true) == IOR && rtx_equal_p (XEXP (true, 0), false))
4019 term1 = false, true = XEXP (true, 1), false = const0_rtx;
4020 else if (GET_CODE (true) == IOR
4021 && rtx_equal_p (XEXP (true, 1), false))
4022 term1 = false, true = XEXP (true, 0), false = const0_rtx;
4023 else if (GET_CODE (false) == IOR
4024 && rtx_equal_p (XEXP (false, 0), true))
4025 term1 = true, false = XEXP (false, 1), true = const0_rtx;
4026 else if (GET_CODE (false) == IOR
4027 && rtx_equal_p (XEXP (false, 1), true))
4028 term1 = true, false = XEXP (false, 0), true = const0_rtx;
4029
4030 term2 = gen_binary (AND, GET_MODE (SET_SRC (x)),
4031 XEXP (XEXP (SET_SRC (x), 0), 0), true);
4032 term3 = gen_binary (AND, GET_MODE (SET_SRC (x)),
4033 gen_unary (NOT, GET_MODE (SET_SRC (x)),
4034 XEXP (XEXP (SET_SRC (x), 0), 0)),
4035 false);
4036
4037 SUBST (SET_SRC (x),
4038 gen_binary (IOR, GET_MODE (SET_SRC (x)),
4039 gen_binary (IOR, GET_MODE (SET_SRC (x)),
4040 term1, term2),
4041 term3));
4042 }
4043#endif
230d793d
RS
4044 break;
4045
4046 case AND:
4047 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
4048 {
4049 x = simplify_and_const_int (x, mode, XEXP (x, 0),
4050 INTVAL (XEXP (x, 1)));
4051
4052 /* If we have (ior (and (X C1) C2)) and the next restart would be
4053 the last, simplify this by making C1 as small as possible
4054 and then exit. */
4055 if (n_restarts >= 3 && GET_CODE (x) == IOR
4056 && GET_CODE (XEXP (x, 0)) == AND
4057 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4058 && GET_CODE (XEXP (x, 1)) == CONST_INT)
4059 {
4060 temp = gen_binary (AND, mode, XEXP (XEXP (x, 0), 0),
5f4f0e22
CH
4061 GEN_INT (INTVAL (XEXP (XEXP (x, 0), 1))
4062 & ~ INTVAL (XEXP (x, 1))));
230d793d
RS
4063 return gen_binary (IOR, mode, temp, XEXP (x, 1));
4064 }
4065
4066 if (GET_CODE (x) != AND)
4067 goto restart;
4068 }
4069
4070 /* Convert (A | B) & A to A. */
4071 if (GET_CODE (XEXP (x, 0)) == IOR
4072 && (rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1))
4073 || rtx_equal_p (XEXP (XEXP (x, 0), 1), XEXP (x, 1)))
4074 && ! side_effects_p (XEXP (XEXP (x, 0), 0))
4075 && ! side_effects_p (XEXP (XEXP (x, 0), 1)))
4076 return XEXP (x, 1);
4077
4078 /* Convert (A ^ B) & A to A & (~ B) since the latter is often a single
4079 insn (and may simplify more). */
4080 else if (GET_CODE (XEXP (x, 0)) == XOR
4081 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1))
4082 && ! side_effects_p (XEXP (x, 1)))
4083 {
4084 x = gen_binary (AND, mode,
4085 gen_unary (NOT, mode, XEXP (XEXP (x, 0), 1)),
4086 XEXP (x, 1));
4087 goto restart;
4088 }
4089 else if (GET_CODE (XEXP (x, 0)) == XOR
4090 && rtx_equal_p (XEXP (XEXP (x, 0), 1), XEXP (x, 1))
4091 && ! side_effects_p (XEXP (x, 1)))
4092 {
4093 x = gen_binary (AND, mode,
4094 gen_unary (NOT, mode, XEXP (XEXP (x, 0), 0)),
4095 XEXP (x, 1));
4096 goto restart;
4097 }
4098
4099 /* Similarly for (~ (A ^ B)) & A. */
4100 else if (GET_CODE (XEXP (x, 0)) == NOT
4101 && GET_CODE (XEXP (XEXP (x, 0), 0)) == XOR
4102 && rtx_equal_p (XEXP (XEXP (XEXP (x, 0), 0), 0), XEXP (x, 1))
4103 && ! side_effects_p (XEXP (x, 1)))
4104 {
4105 x = gen_binary (AND, mode, XEXP (XEXP (XEXP (x, 0), 0), 1),
4106 XEXP (x, 1));
4107 goto restart;
4108 }
4109 else if (GET_CODE (XEXP (x, 0)) == NOT
4110 && GET_CODE (XEXP (XEXP (x, 0), 0)) == XOR
4111 && rtx_equal_p (XEXP (XEXP (XEXP (x, 0), 0), 1), XEXP (x, 1))
4112 && ! side_effects_p (XEXP (x, 1)))
4113 {
4114 x = gen_binary (AND, mode, XEXP (XEXP (XEXP (x, 0), 0), 0),
4115 XEXP (x, 1));
4116 goto restart;
4117 }
4118
d0ab8cd3
RK
4119 /* If we have (and A B) with A not an object but that is known to
4120 be -1 or 0, this is equivalent to the expression
4121 (if_then_else (ne A (const_int 0)) B (const_int 0))
4122 We make this conversion because it may allow further
1a26b032
RK
4123 simplifications and then allow use of conditional move insns.
4124 If the machine doesn't have condition moves, code in case SET
4125 will convert the IF_THEN_ELSE back to the logical operation.
4126 We build the IF_THEN_ELSE here in case further simplification
4127 is possible (e.g., we can convert it to ABS). */
d0ab8cd3
RK
4128
4129 if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) != 'o'
4130 && ! (GET_CODE (XEXP (x, 0)) == SUBREG
4131 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0)))) == 'o')
4132 && (num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
4133 == GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))))
4134 {
4135 rtx op0 = XEXP (x, 0);
4136 rtx op1 = const0_rtx;
4137 enum rtx_code comp_code
4138 = simplify_comparison (NE, &op0, &op1);
4139
4140 x = gen_rtx_combine (IF_THEN_ELSE, mode,
4141 gen_binary (comp_code, VOIDmode, op0, op1),
4142 XEXP (x, 1), const0_rtx);
4143 goto restart;
4144 }
4145
4146 /* In the following group of tests (and those in case IOR below),
230d793d
RS
4147 we start with some combination of logical operations and apply
4148 the distributive law followed by the inverse distributive law.
4149 Most of the time, this results in no change. However, if some of
4150 the operands are the same or inverses of each other, simplifications
4151 will result.
4152
4153 For example, (and (ior A B) (not B)) can occur as the result of
4154 expanding a bit field assignment. When we apply the distributive
4155 law to this, we get (ior (and (A (not B))) (and (B (not B)))),
4156 which then simplifies to (and (A (not B))). */
4157
4158 /* If we have (and (ior A B) C), apply the distributive law and then
4159 the inverse distributive law to see if things simplify. */
4160
4161 if (GET_CODE (XEXP (x, 0)) == IOR || GET_CODE (XEXP (x, 0)) == XOR)
4162 {
4163 x = apply_distributive_law
4164 (gen_binary (GET_CODE (XEXP (x, 0)), mode,
4165 gen_binary (AND, mode,
4166 XEXP (XEXP (x, 0), 0), XEXP (x, 1)),
4167 gen_binary (AND, mode,
4168 XEXP (XEXP (x, 0), 1), XEXP (x, 1))));
4169 if (GET_CODE (x) != AND)
4170 goto restart;
4171 }
4172
4173 if (GET_CODE (XEXP (x, 1)) == IOR || GET_CODE (XEXP (x, 1)) == XOR)
4174 {
4175 x = apply_distributive_law
4176 (gen_binary (GET_CODE (XEXP (x, 1)), mode,
4177 gen_binary (AND, mode,
4178 XEXP (XEXP (x, 1), 0), XEXP (x, 0)),
4179 gen_binary (AND, mode,
4180 XEXP (XEXP (x, 1), 1), XEXP (x, 0))));
4181 if (GET_CODE (x) != AND)
4182 goto restart;
4183 }
4184
4185 /* Similarly, taking advantage of the fact that
4186 (and (not A) (xor B C)) == (xor (ior A B) (ior A C)) */
4187
4188 if (GET_CODE (XEXP (x, 0)) == NOT && GET_CODE (XEXP (x, 1)) == XOR)
4189 {
4190 x = apply_distributive_law
4191 (gen_binary (XOR, mode,
4192 gen_binary (IOR, mode, XEXP (XEXP (x, 0), 0),
4193 XEXP (XEXP (x, 1), 0)),
4194 gen_binary (IOR, mode, XEXP (XEXP (x, 0), 0),
4195 XEXP (XEXP (x, 1), 1))));
4196 if (GET_CODE (x) != AND)
4197 goto restart;
4198 }
4199
4200 else if (GET_CODE (XEXP (x, 1)) == NOT && GET_CODE (XEXP (x, 0)) == XOR)
4201 {
4202 x = apply_distributive_law
4203 (gen_binary (XOR, mode,
4204 gen_binary (IOR, mode, XEXP (XEXP (x, 1), 0),
4205 XEXP (XEXP (x, 0), 0)),
4206 gen_binary (IOR, mode, XEXP (XEXP (x, 1), 0),
4207 XEXP (XEXP (x, 0), 1))));
4208 if (GET_CODE (x) != AND)
4209 goto restart;
4210 }
4211 break;
4212
4213 case IOR:
951553af 4214 /* (ior A C) is C if all bits of A that might be nonzero are on in C. */
d0ab8cd3 4215 if (GET_CODE (XEXP (x, 1)) == CONST_INT
ac49a949 4216 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
951553af 4217 && (nonzero_bits (XEXP (x, 0), mode) & ~ INTVAL (XEXP (x, 1))) == 0)
d0ab8cd3
RK
4218 return XEXP (x, 1);
4219
230d793d
RS
4220 /* Convert (A & B) | A to A. */
4221 if (GET_CODE (XEXP (x, 0)) == AND
4222 && (rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1))
4223 || rtx_equal_p (XEXP (XEXP (x, 0), 1), XEXP (x, 1)))
4224 && ! side_effects_p (XEXP (XEXP (x, 0), 0))
4225 && ! side_effects_p (XEXP (XEXP (x, 0), 1)))
4226 return XEXP (x, 1);
4227
4228 /* If we have (ior (and A B) C), apply the distributive law and then
4229 the inverse distributive law to see if things simplify. */
4230
4231 if (GET_CODE (XEXP (x, 0)) == AND)
4232 {
4233 x = apply_distributive_law
4234 (gen_binary (AND, mode,
4235 gen_binary (IOR, mode,
4236 XEXP (XEXP (x, 0), 0), XEXP (x, 1)),
4237 gen_binary (IOR, mode,
4238 XEXP (XEXP (x, 0), 1), XEXP (x, 1))));
4239
4240 if (GET_CODE (x) != IOR)
4241 goto restart;
4242 }
4243
4244 if (GET_CODE (XEXP (x, 1)) == AND)
4245 {
4246 x = apply_distributive_law
4247 (gen_binary (AND, mode,
4248 gen_binary (IOR, mode,
4249 XEXP (XEXP (x, 1), 0), XEXP (x, 0)),
4250 gen_binary (IOR, mode,
4251 XEXP (XEXP (x, 1), 1), XEXP (x, 0))));
4252
4253 if (GET_CODE (x) != IOR)
4254 goto restart;
4255 }
4256
4257 /* Convert (ior (ashift A CX) (lshiftrt A CY)) where CX+CY equals the
4258 mode size to (rotate A CX). */
4259
4260 if (((GET_CODE (XEXP (x, 0)) == ASHIFT
4261 && GET_CODE (XEXP (x, 1)) == LSHIFTRT)
4262 || (GET_CODE (XEXP (x, 1)) == ASHIFT
4263 && GET_CODE (XEXP (x, 0)) == LSHIFTRT))
4264 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (XEXP (x, 1), 0))
4265 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4266 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
4267 && (INTVAL (XEXP (XEXP (x, 0), 1)) + INTVAL (XEXP (XEXP (x, 1), 1))
4268 == GET_MODE_BITSIZE (mode)))
4269 {
4270 rtx shift_count;
4271
4272 if (GET_CODE (XEXP (x, 0)) == ASHIFT)
4273 shift_count = XEXP (XEXP (x, 0), 1);
4274 else
4275 shift_count = XEXP (XEXP (x, 1), 1);
4276 x = gen_rtx (ROTATE, mode, XEXP (XEXP (x, 0), 0), shift_count);
4277 goto restart;
4278 }
4279 break;
4280
4281 case XOR:
4282 /* Convert (XOR (NOT x) (NOT y)) to (XOR x y).
4283 Also convert (XOR (NOT x) y) to (NOT (XOR x y)), similarly for
4284 (NOT y). */
4285 {
4286 int num_negated = 0;
4287 rtx in1 = XEXP (x, 0), in2 = XEXP (x, 1);
4288
4289 if (GET_CODE (in1) == NOT)
4290 num_negated++, in1 = XEXP (in1, 0);
4291 if (GET_CODE (in2) == NOT)
4292 num_negated++, in2 = XEXP (in2, 0);
4293
4294 if (num_negated == 2)
4295 {
4296 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4297 SUBST (XEXP (x, 1), XEXP (XEXP (x, 1), 0));
4298 }
4299 else if (num_negated == 1)
d0ab8cd3
RK
4300 {
4301 x = gen_unary (NOT, mode,
4302 gen_binary (XOR, mode, in1, in2));
4303 goto restart;
4304 }
230d793d
RS
4305 }
4306
4307 /* Convert (xor (and A B) B) to (and (not A) B). The latter may
4308 correspond to a machine insn or result in further simplifications
4309 if B is a constant. */
4310
4311 if (GET_CODE (XEXP (x, 0)) == AND
4312 && rtx_equal_p (XEXP (XEXP (x, 0), 1), XEXP (x, 1))
4313 && ! side_effects_p (XEXP (x, 1)))
4314 {
4315 x = gen_binary (AND, mode,
4316 gen_unary (NOT, mode, XEXP (XEXP (x, 0), 0)),
4317 XEXP (x, 1));
4318 goto restart;
4319 }
4320 else if (GET_CODE (XEXP (x, 0)) == AND
4321 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1))
4322 && ! side_effects_p (XEXP (x, 1)))
4323 {
4324 x = gen_binary (AND, mode,
4325 gen_unary (NOT, mode, XEXP (XEXP (x, 0), 1)),
4326 XEXP (x, 1));
4327 goto restart;
4328 }
4329
4330
4331#if STORE_FLAG_VALUE == 1
4332 /* (xor (comparison foo bar) (const_int 1)) can become the reversed
4333 comparison. */
4334 if (XEXP (x, 1) == const1_rtx
4335 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
4336 && reversible_comparison_p (XEXP (x, 0)))
4337 return gen_rtx_combine (reverse_condition (GET_CODE (XEXP (x, 0))),
4338 mode, XEXP (XEXP (x, 0), 0),
4339 XEXP (XEXP (x, 0), 1));
500c518b
RK
4340
4341 /* (lshiftrt foo C) where C is the number of bits in FOO minus 1
4342 is (lt foo (const_int 0)), so we can perform the above
4343 simplification. */
4344
4345 if (XEXP (x, 1) == const1_rtx
4346 && GET_CODE (XEXP (x, 0)) == LSHIFTRT
4347 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4348 && INTVAL (XEXP (XEXP (x, 0), 1)) == GET_MODE_BITSIZE (mode) - 1)
4349 return gen_rtx_combine (GE, mode, XEXP (XEXP (x, 0), 0), const0_rtx);
230d793d
RS
4350#endif
4351
4352 /* (xor (comparison foo bar) (const_int sign-bit))
4353 when STORE_FLAG_VALUE is the sign bit. */
5f4f0e22
CH
4354 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4355 && (STORE_FLAG_VALUE
4356 == (HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
230d793d
RS
4357 && XEXP (x, 1) == const_true_rtx
4358 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
4359 && reversible_comparison_p (XEXP (x, 0)))
4360 return gen_rtx_combine (reverse_condition (GET_CODE (XEXP (x, 0))),
4361 mode, XEXP (XEXP (x, 0), 0),
4362 XEXP (XEXP (x, 0), 1));
4363 break;
4364
4365 case ABS:
4366 /* (abs (neg <foo>)) -> (abs <foo>) */
4367 if (GET_CODE (XEXP (x, 0)) == NEG)
4368 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4369
4370 /* If operand is something known to be positive, ignore the ABS. */
4371 if (GET_CODE (XEXP (x, 0)) == FFS || GET_CODE (XEXP (x, 0)) == ABS
5f4f0e22
CH
4372 || ((GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4373 <= HOST_BITS_PER_WIDE_INT)
951553af 4374 && ((nonzero_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
5f4f0e22
CH
4375 & ((HOST_WIDE_INT) 1
4376 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1)))
230d793d
RS
4377 == 0)))
4378 return XEXP (x, 0);
4379
4380
4381 /* If operand is known to be only -1 or 0, convert ABS to NEG. */
d0ab8cd3 4382 if (num_sign_bit_copies (XEXP (x, 0), mode) == GET_MODE_BITSIZE (mode))
230d793d
RS
4383 {
4384 x = gen_rtx_combine (NEG, mode, XEXP (x, 0));
4385 goto restart;
4386 }
4387 break;
4388
a7c99304
RK
4389 case FFS:
4390 /* (ffs (*_extend <X>)) = (ffs <X>) */
4391 if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND
4392 || GET_CODE (XEXP (x, 0)) == ZERO_EXTEND)
4393 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4394 break;
4395
230d793d
RS
4396 case FLOAT:
4397 /* (float (sign_extend <X>)) = (float <X>). */
4398 if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND)
4399 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4400 break;
4401
4402 case LSHIFT:
4403 case ASHIFT:
4404 case LSHIFTRT:
4405 case ASHIFTRT:
4406 case ROTATE:
4407 case ROTATERT:
230d793d
RS
4408 /* If this is a shift by a constant amount, simplify it. */
4409 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
4410 {
4411 x = simplify_shift_const (x, code, mode, XEXP (x, 0),
4412 INTVAL (XEXP (x, 1)));
4413 if (GET_CODE (x) != code)
4414 goto restart;
4415 }
77fa0940
RK
4416
4417#ifdef SHIFT_COUNT_TRUNCATED
4418 else if (GET_CODE (XEXP (x, 1)) != REG)
4419 SUBST (XEXP (x, 1),
4420 force_to_mode (XEXP (x, 1), GET_MODE (x),
4421 exact_log2 (GET_MODE_BITSIZE (GET_MODE (x))),
5f4f0e22 4422 NULL_RTX));
77fa0940
RK
4423#endif
4424
230d793d
RS
4425 break;
4426 }
4427
4428 return x;
4429}
4430\f
4431/* We consider ZERO_EXTRACT, SIGN_EXTRACT, and SIGN_EXTEND as "compound
4432 operations" because they can be replaced with two more basic operations.
4433 ZERO_EXTEND is also considered "compound" because it can be replaced with
4434 an AND operation, which is simpler, though only one operation.
4435
4436 The function expand_compound_operation is called with an rtx expression
4437 and will convert it to the appropriate shifts and AND operations,
4438 simplifying at each stage.
4439
4440 The function make_compound_operation is called to convert an expression
4441 consisting of shifts and ANDs into the equivalent compound expression.
4442 It is the inverse of this function, loosely speaking. */
4443
4444static rtx
4445expand_compound_operation (x)
4446 rtx x;
4447{
4448 int pos = 0, len;
4449 int unsignedp = 0;
4450 int modewidth;
4451 rtx tem;
4452
4453 switch (GET_CODE (x))
4454 {
4455 case ZERO_EXTEND:
4456 unsignedp = 1;
4457 case SIGN_EXTEND:
75473182
RS
4458 /* We can't necessarily use a const_int for a multiword mode;
4459 it depends on implicitly extending the value.
4460 Since we don't know the right way to extend it,
4461 we can't tell whether the implicit way is right.
4462
4463 Even for a mode that is no wider than a const_int,
4464 we can't win, because we need to sign extend one of its bits through
4465 the rest of it, and we don't know which bit. */
230d793d 4466 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
75473182 4467 return x;
230d793d
RS
4468
4469 if (! FAKE_EXTEND_SAFE_P (GET_MODE (XEXP (x, 0)), XEXP (x, 0)))
4470 return x;
4471
4472 len = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)));
4473 /* If the inner object has VOIDmode (the only way this can happen
4474 is if it is a ASM_OPERANDS), we can't do anything since we don't
4475 know how much masking to do. */
4476 if (len == 0)
4477 return x;
4478
4479 break;
4480
4481 case ZERO_EXTRACT:
4482 unsignedp = 1;
4483 case SIGN_EXTRACT:
4484 /* If the operand is a CLOBBER, just return it. */
4485 if (GET_CODE (XEXP (x, 0)) == CLOBBER)
4486 return XEXP (x, 0);
4487
4488 if (GET_CODE (XEXP (x, 1)) != CONST_INT
4489 || GET_CODE (XEXP (x, 2)) != CONST_INT
4490 || GET_MODE (XEXP (x, 0)) == VOIDmode)
4491 return x;
4492
4493 len = INTVAL (XEXP (x, 1));
4494 pos = INTVAL (XEXP (x, 2));
4495
4496 /* If this goes outside the object being extracted, replace the object
4497 with a (use (mem ...)) construct that only combine understands
4498 and is used only for this purpose. */
4499 if (len + pos > GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
4500 SUBST (XEXP (x, 0), gen_rtx (USE, GET_MODE (x), XEXP (x, 0)));
4501
4502#if BITS_BIG_ENDIAN
4503 pos = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - len - pos;
4504#endif
4505 break;
4506
4507 default:
4508 return x;
4509 }
4510
4511 /* If we reach here, we want to return a pair of shifts. The inner
4512 shift is a left shift of BITSIZE - POS - LEN bits. The outer
4513 shift is a right shift of BITSIZE - LEN bits. It is arithmetic or
4514 logical depending on the value of UNSIGNEDP.
4515
4516 If this was a ZERO_EXTEND or ZERO_EXTRACT, this pair of shifts will be
4517 converted into an AND of a shift.
4518
4519 We must check for the case where the left shift would have a negative
4520 count. This can happen in a case like (x >> 31) & 255 on machines
4521 that can't shift by a constant. On those machines, we would first
4522 combine the shift with the AND to produce a variable-position
4523 extraction. Then the constant of 31 would be substituted in to produce
4524 a such a position. */
4525
4526 modewidth = GET_MODE_BITSIZE (GET_MODE (x));
4527 if (modewidth >= pos - len)
5f4f0e22 4528 tem = simplify_shift_const (NULL_RTX, unsignedp ? LSHIFTRT : ASHIFTRT,
230d793d 4529 GET_MODE (x),
5f4f0e22
CH
4530 simplify_shift_const (NULL_RTX, ASHIFT,
4531 GET_MODE (x),
230d793d
RS
4532 XEXP (x, 0),
4533 modewidth - pos - len),
4534 modewidth - len);
4535
5f4f0e22
CH
4536 else if (unsignedp && len < HOST_BITS_PER_WIDE_INT)
4537 tem = simplify_and_const_int (NULL_RTX, GET_MODE (x),
4538 simplify_shift_const (NULL_RTX, LSHIFTRT,
230d793d
RS
4539 GET_MODE (x),
4540 XEXP (x, 0), pos),
5f4f0e22 4541 ((HOST_WIDE_INT) 1 << len) - 1);
230d793d
RS
4542 else
4543 /* Any other cases we can't handle. */
4544 return x;
4545
4546
4547 /* If we couldn't do this for some reason, return the original
4548 expression. */
4549 if (GET_CODE (tem) == CLOBBER)
4550 return x;
4551
4552 return tem;
4553}
4554\f
4555/* X is a SET which contains an assignment of one object into
4556 a part of another (such as a bit-field assignment, STRICT_LOW_PART,
4557 or certain SUBREGS). If possible, convert it into a series of
4558 logical operations.
4559
4560 We half-heartedly support variable positions, but do not at all
4561 support variable lengths. */
4562
4563static rtx
4564expand_field_assignment (x)
4565 rtx x;
4566{
4567 rtx inner;
4568 rtx pos; /* Always counts from low bit. */
4569 int len;
4570 rtx mask;
4571 enum machine_mode compute_mode;
4572
4573 /* Loop until we find something we can't simplify. */
4574 while (1)
4575 {
4576 if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
4577 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG)
4578 {
4579 inner = SUBREG_REG (XEXP (SET_DEST (x), 0));
4580 len = GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)));
4581 pos = const0_rtx;
4582 }
4583 else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
4584 && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT)
4585 {
4586 inner = XEXP (SET_DEST (x), 0);
4587 len = INTVAL (XEXP (SET_DEST (x), 1));
4588 pos = XEXP (SET_DEST (x), 2);
4589
4590 /* If the position is constant and spans the width of INNER,
4591 surround INNER with a USE to indicate this. */
4592 if (GET_CODE (pos) == CONST_INT
4593 && INTVAL (pos) + len > GET_MODE_BITSIZE (GET_MODE (inner)))
4594 inner = gen_rtx (USE, GET_MODE (SET_DEST (x)), inner);
4595
4596#if BITS_BIG_ENDIAN
4597 if (GET_CODE (pos) == CONST_INT)
5f4f0e22
CH
4598 pos = GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner)) - len
4599 - INTVAL (pos));
230d793d
RS
4600 else if (GET_CODE (pos) == MINUS
4601 && GET_CODE (XEXP (pos, 1)) == CONST_INT
4602 && (INTVAL (XEXP (pos, 1))
4603 == GET_MODE_BITSIZE (GET_MODE (inner)) - len))
4604 /* If position is ADJUST - X, new position is X. */
4605 pos = XEXP (pos, 0);
4606 else
4607 pos = gen_binary (MINUS, GET_MODE (pos),
5f4f0e22
CH
4608 GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner))
4609 - len),
4610 pos);
230d793d
RS
4611#endif
4612 }
4613
4614 /* A SUBREG between two modes that occupy the same numbers of words
4615 can be done by moving the SUBREG to the source. */
4616 else if (GET_CODE (SET_DEST (x)) == SUBREG
4617 && (((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
4618 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
4619 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
4620 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
4621 {
4622 x = gen_rtx (SET, VOIDmode, SUBREG_REG (SET_DEST (x)),
4623 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (SET_DEST (x))),
4624 SET_SRC (x)));
4625 continue;
4626 }
4627 else
4628 break;
4629
4630 while (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
4631 inner = SUBREG_REG (inner);
4632
4633 compute_mode = GET_MODE (inner);
4634
4635 /* Compute a mask of LEN bits, if we can do this on the host machine. */
5f4f0e22
CH
4636 if (len < HOST_BITS_PER_WIDE_INT)
4637 mask = GEN_INT (((HOST_WIDE_INT) 1 << len) - 1);
230d793d
RS
4638 else
4639 break;
4640
4641 /* Now compute the equivalent expression. Make a copy of INNER
4642 for the SET_DEST in case it is a MEM into which we will substitute;
4643 we don't want shared RTL in that case. */
4644 x = gen_rtx (SET, VOIDmode, copy_rtx (inner),
4645 gen_binary (IOR, compute_mode,
4646 gen_binary (AND, compute_mode,
4647 gen_unary (NOT, compute_mode,
4648 gen_binary (ASHIFT,
4649 compute_mode,
4650 mask, pos)),
4651 inner),
4652 gen_binary (ASHIFT, compute_mode,
4653 gen_binary (AND, compute_mode,
4654 gen_lowpart_for_combine
4655 (compute_mode,
4656 SET_SRC (x)),
4657 mask),
4658 pos)));
4659 }
4660
4661 return x;
4662}
4663\f
8999a12e
RK
4664/* Return an RTX for a reference to LEN bits of INNER. If POS_RTX is nonzero,
4665 it is an RTX that represents a variable starting position; otherwise,
4666 POS is the (constant) starting bit position (counted from the LSB).
230d793d
RS
4667
4668 INNER may be a USE. This will occur when we started with a bitfield
4669 that went outside the boundary of the object in memory, which is
4670 allowed on most machines. To isolate this case, we produce a USE
4671 whose mode is wide enough and surround the MEM with it. The only
4672 code that understands the USE is this routine. If it is not removed,
4673 it will cause the resulting insn not to match.
4674
4675 UNSIGNEDP is non-zero for an unsigned reference and zero for a
4676 signed reference.
4677
4678 IN_DEST is non-zero if this is a reference in the destination of a
4679 SET. This is used when a ZERO_ or SIGN_EXTRACT isn't needed. If non-zero,
4680 a STRICT_LOW_PART will be used, if zero, ZERO_EXTEND or SIGN_EXTEND will
4681 be used.
4682
4683 IN_COMPARE is non-zero if we are in a COMPARE. This means that a
4684 ZERO_EXTRACT should be built even for bits starting at bit 0.
4685
4686 MODE is the desired mode of the result (if IN_DEST == 0). */
4687
4688static rtx
4689make_extraction (mode, inner, pos, pos_rtx, len,
4690 unsignedp, in_dest, in_compare)
4691 enum machine_mode mode;
4692 rtx inner;
4693 int pos;
4694 rtx pos_rtx;
4695 int len;
4696 int unsignedp;
4697 int in_dest, in_compare;
4698{
94b4b17a
RS
4699 /* This mode describes the size of the storage area
4700 to fetch the overall value from. Within that, we
4701 ignore the POS lowest bits, etc. */
230d793d
RS
4702 enum machine_mode is_mode = GET_MODE (inner);
4703 enum machine_mode inner_mode;
4704 enum machine_mode wanted_mem_mode = byte_mode;
4705 enum machine_mode pos_mode = word_mode;
4706 enum machine_mode extraction_mode = word_mode;
4707 enum machine_mode tmode = mode_for_size (len, MODE_INT, 1);
4708 int spans_byte = 0;
4709 rtx new = 0;
8999a12e 4710 rtx orig_pos_rtx = pos_rtx;
230d793d
RS
4711
4712 /* Get some information about INNER and get the innermost object. */
4713 if (GET_CODE (inner) == USE)
94b4b17a 4714 /* (use:SI (mem:QI foo)) stands for (mem:SI foo). */
230d793d
RS
4715 /* We don't need to adjust the position because we set up the USE
4716 to pretend that it was a full-word object. */
4717 spans_byte = 1, inner = XEXP (inner, 0);
4718 else if (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
94b4b17a
RS
4719 {
4720 /* If going from (subreg:SI (mem:QI ...)) to (mem:QI ...),
4721 consider just the QI as the memory to extract from.
4722 The subreg adds or removes high bits; its mode is
4723 irrelevant to the meaning of this extraction,
4724 since POS and LEN count from the lsb. */
4725 if (GET_CODE (SUBREG_REG (inner)) == MEM)
4726 is_mode = GET_MODE (SUBREG_REG (inner));
4727 inner = SUBREG_REG (inner);
4728 }
230d793d
RS
4729
4730 inner_mode = GET_MODE (inner);
4731
4732 if (pos_rtx && GET_CODE (pos_rtx) == CONST_INT)
8999a12e 4733 pos = INTVAL (pos_rtx), pos_rtx = 0;
230d793d
RS
4734
4735 /* See if this can be done without an extraction. We never can if the
4736 width of the field is not the same as that of some integer mode. For
4737 registers, we can only avoid the extraction if the position is at the
4738 low-order bit and this is either not in the destination or we have the
4739 appropriate STRICT_LOW_PART operation available.
4740
4741 For MEM, we can avoid an extract if the field starts on an appropriate
4742 boundary and we can change the mode of the memory reference. However,
4743 we cannot directly access the MEM if we have a USE and the underlying
4744 MEM is not TMODE. This combination means that MEM was being used in a
4745 context where bits outside its mode were being referenced; that is only
4746 valid in bit-field insns. */
4747
4748 if (tmode != BLKmode
4749 && ! (spans_byte && inner_mode != tmode)
8999a12e 4750 && ((pos_rtx == 0 && pos == 0 && GET_CODE (inner) != MEM
230d793d 4751 && (! in_dest
df62f951
RK
4752 || (GET_CODE (inner) == REG
4753 && (movstrict_optab->handlers[(int) tmode].insn_code
4754 != CODE_FOR_nothing))))
8999a12e 4755 || (GET_CODE (inner) == MEM && pos_rtx == 0
dfbe1b2f
RK
4756 && (pos
4757 % (STRICT_ALIGNMENT ? GET_MODE_ALIGNMENT (tmode)
4758 : BITS_PER_UNIT)) == 0
230d793d
RS
4759 /* We can't do this if we are widening INNER_MODE (it
4760 may not be aligned, for one thing). */
4761 && GET_MODE_BITSIZE (inner_mode) >= GET_MODE_BITSIZE (tmode)
4762 && (inner_mode == tmode
4763 || (! mode_dependent_address_p (XEXP (inner, 0))
4764 && ! MEM_VOLATILE_P (inner))))))
4765 {
230d793d
RS
4766 /* If INNER is a MEM, make a new MEM that encompasses just the desired
4767 field. If the original and current mode are the same, we need not
4768 adjust the offset. Otherwise, we do if bytes big endian.
4769
4770 If INNER is not a MEM, get a piece consisting of the just the field
df62f951 4771 of interest (in this case POS must be 0). */
230d793d
RS
4772
4773 if (GET_CODE (inner) == MEM)
4774 {
94b4b17a
RS
4775 int offset;
4776 /* POS counts from lsb, but make OFFSET count in memory order. */
4777 if (BYTES_BIG_ENDIAN)
4778 offset = (GET_MODE_BITSIZE (is_mode) - len - pos) / BITS_PER_UNIT;
4779 else
4780 offset = pos / BITS_PER_UNIT;
230d793d
RS
4781
4782 new = gen_rtx (MEM, tmode, plus_constant (XEXP (inner, 0), offset));
4783 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (inner);
4784 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (inner);
4785 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (inner);
4786 }
df62f951 4787 else if (GET_CODE (inner) == REG)
77fa0940
RK
4788 /* We can't call gen_lowpart_for_combine here since we always want
4789 a SUBREG and it would sometimes return a new hard register. */
4790 new = gen_rtx (SUBREG, tmode, inner,
4791 (WORDS_BIG_ENDIAN
3e3ea975
RS
4792 && GET_MODE_SIZE (inner_mode) > UNITS_PER_WORD
4793 ? ((GET_MODE_SIZE (inner_mode) - GET_MODE_SIZE (tmode))
6ba17bb0
RS
4794 / UNITS_PER_WORD)
4795 : 0));
230d793d 4796 else
d0ab8cd3 4797 new = force_to_mode (inner, tmode, len, NULL_RTX);
230d793d
RS
4798
4799 /* If this extraction is going into the destination of a SET,
4800 make a STRICT_LOW_PART unless we made a MEM. */
4801
4802 if (in_dest)
4803 return (GET_CODE (new) == MEM ? new
77fa0940
RK
4804 : (GET_CODE (new) != SUBREG
4805 ? gen_rtx (CLOBBER, tmode, const0_rtx)
4806 : gen_rtx_combine (STRICT_LOW_PART, VOIDmode, new)));
230d793d
RS
4807
4808 /* Otherwise, sign- or zero-extend unless we already are in the
4809 proper mode. */
4810
4811 return (mode == tmode ? new
4812 : gen_rtx_combine (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
4813 mode, new));
4814 }
4815
cc471082
RS
4816 /* Unless this is a COMPARE or we have a funny memory reference,
4817 don't do anything with zero-extending field extracts starting at
4818 the low-order bit since they are simple AND operations. */
8999a12e
RK
4819 if (pos_rtx == 0 && pos == 0 && ! in_dest
4820 && ! in_compare && ! spans_byte && unsignedp)
230d793d
RS
4821 return 0;
4822
4823 /* Get the mode to use should INNER be a MEM, the mode for the position,
4824 and the mode for the result. */
4825#ifdef HAVE_insv
4826 if (in_dest)
4827 {
4828 wanted_mem_mode = insn_operand_mode[(int) CODE_FOR_insv][0];
4829 pos_mode = insn_operand_mode[(int) CODE_FOR_insv][2];
4830 extraction_mode = insn_operand_mode[(int) CODE_FOR_insv][3];
4831 }
4832#endif
4833
4834#ifdef HAVE_extzv
4835 if (! in_dest && unsignedp)
4836 {
4837 wanted_mem_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
4838 pos_mode = insn_operand_mode[(int) CODE_FOR_extzv][3];
4839 extraction_mode = insn_operand_mode[(int) CODE_FOR_extzv][0];
4840 }
4841#endif
4842
4843#ifdef HAVE_extv
4844 if (! in_dest && ! unsignedp)
4845 {
4846 wanted_mem_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
4847 pos_mode = insn_operand_mode[(int) CODE_FOR_extv][3];
4848 extraction_mode = insn_operand_mode[(int) CODE_FOR_extv][0];
4849 }
4850#endif
4851
4852 /* Never narrow an object, since that might not be safe. */
4853
4854 if (mode != VOIDmode
4855 && GET_MODE_SIZE (extraction_mode) < GET_MODE_SIZE (mode))
4856 extraction_mode = mode;
4857
4858 if (pos_rtx && GET_MODE (pos_rtx) != VOIDmode
4859 && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
4860 pos_mode = GET_MODE (pos_rtx);
4861
4862 /* If this is not from memory or we have to change the mode of memory and
4863 cannot, the desired mode is EXTRACTION_MODE. */
4864 if (GET_CODE (inner) != MEM
4865 || (inner_mode != wanted_mem_mode
4866 && (mode_dependent_address_p (XEXP (inner, 0))
4867 || MEM_VOLATILE_P (inner))))
4868 wanted_mem_mode = extraction_mode;
4869
4870#if BITS_BIG_ENDIAN
4871 /* If position is constant, compute new position. Otherwise, build
4872 subtraction. */
8999a12e 4873 if (pos_rtx == 0)
230d793d
RS
4874 pos = (MAX (GET_MODE_BITSIZE (is_mode), GET_MODE_BITSIZE (wanted_mem_mode))
4875 - len - pos);
4876 else
4877 pos_rtx
4878 = gen_rtx_combine (MINUS, GET_MODE (pos_rtx),
5f4f0e22
CH
4879 GEN_INT (MAX (GET_MODE_BITSIZE (is_mode),
4880 GET_MODE_BITSIZE (wanted_mem_mode))
4881 - len),
4882 pos_rtx);
230d793d
RS
4883#endif
4884
4885 /* If INNER has a wider mode, make it smaller. If this is a constant
4886 extract, try to adjust the byte to point to the byte containing
4887 the value. */
4888 if (wanted_mem_mode != VOIDmode
4889 && GET_MODE_SIZE (wanted_mem_mode) < GET_MODE_SIZE (is_mode)
4890 && ((GET_CODE (inner) == MEM
4891 && (inner_mode == wanted_mem_mode
4892 || (! mode_dependent_address_p (XEXP (inner, 0))
4893 && ! MEM_VOLATILE_P (inner))))))
4894 {
4895 int offset = 0;
4896
4897 /* The computations below will be correct if the machine is big
4898 endian in both bits and bytes or little endian in bits and bytes.
4899 If it is mixed, we must adjust. */
4900
230d793d
RS
4901 /* If bytes are big endian and we had a paradoxical SUBREG, we must
4902 adjust OFFSET to compensate. */
4903#if BYTES_BIG_ENDIAN
4904 if (! spans_byte
4905 && GET_MODE_SIZE (inner_mode) < GET_MODE_SIZE (is_mode))
4906 offset -= GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (inner_mode);
4907#endif
4908
4909 /* If this is a constant position, we can move to the desired byte. */
8999a12e 4910 if (pos_rtx == 0)
230d793d
RS
4911 {
4912 offset += pos / BITS_PER_UNIT;
4913 pos %= GET_MODE_BITSIZE (wanted_mem_mode);
4914 }
4915
c6b3f1f2
JW
4916#if BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
4917 if (! spans_byte && is_mode != wanted_mem_mode)
4918 offset = (GET_MODE_SIZE (is_mode)
4919 - GET_MODE_SIZE (wanted_mem_mode) - offset);
4920#endif
4921
230d793d
RS
4922 if (offset != 0 || inner_mode != wanted_mem_mode)
4923 {
4924 rtx newmem = gen_rtx (MEM, wanted_mem_mode,
4925 plus_constant (XEXP (inner, 0), offset));
4926 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (inner);
4927 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (inner);
4928 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (inner);
4929 inner = newmem;
4930 }
4931 }
4932
4933 /* If INNER is not memory, we can always get it into the proper mode. */
4934 else if (GET_CODE (inner) != MEM)
d0ab8cd3
RK
4935 inner = force_to_mode (inner, extraction_mode,
4936 (pos < 0 ? GET_MODE_BITSIZE (extraction_mode)
4937 : len + pos),
4938 NULL_RTX);
230d793d
RS
4939
4940 /* Adjust mode of POS_RTX, if needed. If we want a wider mode, we
4941 have to zero extend. Otherwise, we can just use a SUBREG. */
8999a12e 4942 if (pos_rtx != 0
230d793d
RS
4943 && GET_MODE_SIZE (pos_mode) > GET_MODE_SIZE (GET_MODE (pos_rtx)))
4944 pos_rtx = gen_rtx_combine (ZERO_EXTEND, pos_mode, pos_rtx);
8999a12e 4945 else if (pos_rtx != 0
230d793d
RS
4946 && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
4947 pos_rtx = gen_lowpart_for_combine (pos_mode, pos_rtx);
4948
8999a12e
RK
4949 /* Make POS_RTX unless we already have it and it is correct. If we don't
4950 have a POS_RTX but we do have an ORIG_POS_RTX, the latter must
4951 be a CONST_INT. */
4952 if (pos_rtx == 0 && orig_pos_rtx != 0 && INTVAL (orig_pos_rtx) == pos)
4953 pos_rtx = orig_pos_rtx;
4954
4955 else if (pos_rtx == 0)
5f4f0e22 4956 pos_rtx = GEN_INT (pos);
230d793d
RS
4957
4958 /* Make the required operation. See if we can use existing rtx. */
4959 new = gen_rtx_combine (unsignedp ? ZERO_EXTRACT : SIGN_EXTRACT,
5f4f0e22 4960 extraction_mode, inner, GEN_INT (len), pos_rtx);
230d793d
RS
4961 if (! in_dest)
4962 new = gen_lowpart_for_combine (mode, new);
4963
4964 return new;
4965}
4966\f
4967/* Look at the expression rooted at X. Look for expressions
4968 equivalent to ZERO_EXTRACT, SIGN_EXTRACT, ZERO_EXTEND, SIGN_EXTEND.
4969 Form these expressions.
4970
4971 Return the new rtx, usually just X.
4972
4973 Also, for machines like the Vax that don't have logical shift insns,
4974 try to convert logical to arithmetic shift operations in cases where
4975 they are equivalent. This undoes the canonicalizations to logical
4976 shifts done elsewhere.
4977
4978 We try, as much as possible, to re-use rtl expressions to save memory.
4979
4980 IN_CODE says what kind of expression we are processing. Normally, it is
42495ca0
RK
4981 SET. In a memory address (inside a MEM, PLUS or minus, the latter two
4982 being kludges), it is MEM. When processing the arguments of a comparison
230d793d
RS
4983 or a COMPARE against zero, it is COMPARE. */
4984
4985static rtx
4986make_compound_operation (x, in_code)
4987 rtx x;
4988 enum rtx_code in_code;
4989{
4990 enum rtx_code code = GET_CODE (x);
4991 enum machine_mode mode = GET_MODE (x);
4992 int mode_width = GET_MODE_BITSIZE (mode);
4993 enum rtx_code next_code;
d0ab8cd3 4994 int i, count;
230d793d 4995 rtx new = 0;
280f58ba 4996 rtx tem;
230d793d
RS
4997 char *fmt;
4998
4999 /* Select the code to be used in recursive calls. Once we are inside an
5000 address, we stay there. If we have a comparison, set to COMPARE,
5001 but once inside, go back to our default of SET. */
5002
42495ca0 5003 next_code = (code == MEM || code == PLUS || code == MINUS ? MEM
230d793d
RS
5004 : ((code == COMPARE || GET_RTX_CLASS (code) == '<')
5005 && XEXP (x, 1) == const0_rtx) ? COMPARE
5006 : in_code == COMPARE ? SET : in_code);
5007
5008 /* Process depending on the code of this operation. If NEW is set
5009 non-zero, it will be returned. */
5010
5011 switch (code)
5012 {
5013 case ASHIFT:
5014 case LSHIFT:
5015 /* Convert shifts by constants into multiplications if inside
5016 an address. */
5017 if (in_code == MEM && GET_CODE (XEXP (x, 1)) == CONST_INT
5f4f0e22 5018 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
230d793d 5019 && INTVAL (XEXP (x, 1)) >= 0)
280f58ba
RK
5020 {
5021 new = make_compound_operation (XEXP (x, 0), next_code);
5022 new = gen_rtx_combine (MULT, mode, new,
5023 GEN_INT ((HOST_WIDE_INT) 1
5024 << INTVAL (XEXP (x, 1))));
5025 }
230d793d
RS
5026 break;
5027
5028 case AND:
5029 /* If the second operand is not a constant, we can't do anything
5030 with it. */
5031 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5032 break;
5033
5034 /* If the constant is a power of two minus one and the first operand
5035 is a logical right shift, make an extraction. */
5036 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
5037 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
280f58ba
RK
5038 {
5039 new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
5040 new = make_extraction (mode, new, 0, XEXP (XEXP (x, 0), 1), i, 1,
5041 0, in_code == COMPARE);
5042 }
dfbe1b2f 5043
230d793d
RS
5044 /* Same as previous, but for (subreg (lshiftrt ...)) in first op. */
5045 else if (GET_CODE (XEXP (x, 0)) == SUBREG
5046 && subreg_lowpart_p (XEXP (x, 0))
5047 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == LSHIFTRT
5048 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
280f58ba
RK
5049 {
5050 new = make_compound_operation (XEXP (SUBREG_REG (XEXP (x, 0)), 0),
5051 next_code);
5052 new = make_extraction (GET_MODE (SUBREG_REG (XEXP (x, 0))), new, 0,
5053 XEXP (SUBREG_REG (XEXP (x, 0)), 1), i, 1,
5054 0, in_code == COMPARE);
5055 }
a7c99304
RK
5056
5057 /* If we are have (and (rotate X C) M) and C is larger than the number
5058 of bits in M, this is an extraction. */
5059
5060 else if (GET_CODE (XEXP (x, 0)) == ROTATE
5061 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5062 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0
5063 && i <= INTVAL (XEXP (XEXP (x, 0), 1)))
280f58ba
RK
5064 {
5065 new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
5066 new = make_extraction (mode, new,
5067 (GET_MODE_BITSIZE (mode)
5068 - INTVAL (XEXP (XEXP (x, 0), 1))),
5069 NULL_RTX, i, 1, 0, in_code == COMPARE);
5070 }
a7c99304
RK
5071
5072 /* On machines without logical shifts, if the operand of the AND is
230d793d
RS
5073 a logical shift and our mask turns off all the propagated sign
5074 bits, we can replace the logical shift with an arithmetic shift. */
d0ab8cd3
RK
5075 else if (ashr_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
5076 && (lshr_optab->handlers[(int) mode].insn_code
5077 == CODE_FOR_nothing)
230d793d
RS
5078 && GET_CODE (XEXP (x, 0)) == LSHIFTRT
5079 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5080 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
5f4f0e22
CH
5081 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
5082 && mode_width <= HOST_BITS_PER_WIDE_INT)
230d793d 5083 {
5f4f0e22 5084 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
230d793d
RS
5085
5086 mask >>= INTVAL (XEXP (XEXP (x, 0), 1));
5087 if ((INTVAL (XEXP (x, 1)) & ~mask) == 0)
5088 SUBST (XEXP (x, 0),
280f58ba
RK
5089 gen_rtx_combine (ASHIFTRT, mode,
5090 make_compound_operation (XEXP (XEXP (x, 0), 0),
5091 next_code),
230d793d
RS
5092 XEXP (XEXP (x, 0), 1)));
5093 }
5094
5095 /* If the constant is one less than a power of two, this might be
5096 representable by an extraction even if no shift is present.
5097 If it doesn't end up being a ZERO_EXTEND, we will ignore it unless
5098 we are in a COMPARE. */
5099 else if ((i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
280f58ba
RK
5100 new = make_extraction (mode,
5101 make_compound_operation (XEXP (x, 0),
5102 next_code),
5103 0, NULL_RTX, i, 1, 0, in_code == COMPARE);
230d793d
RS
5104
5105 /* If we are in a comparison and this is an AND with a power of two,
5106 convert this into the appropriate bit extract. */
5107 else if (in_code == COMPARE
5108 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
280f58ba
RK
5109 new = make_extraction (mode,
5110 make_compound_operation (XEXP (x, 0),
5111 next_code),
5112 i, NULL_RTX, 1, 1, 0, 1);
230d793d
RS
5113
5114 break;
5115
5116 case LSHIFTRT:
5117 /* If the sign bit is known to be zero, replace this with an
5118 arithmetic shift. */
d0ab8cd3
RK
5119 if (ashr_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing
5120 && lshr_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
5f4f0e22 5121 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 5122 && (nonzero_bits (XEXP (x, 0), mode) & (1 << (mode_width - 1))) == 0)
230d793d 5123 {
280f58ba
RK
5124 new = gen_rtx_combine (ASHIFTRT, mode,
5125 make_compound_operation (XEXP (x, 0),
5126 next_code),
5127 XEXP (x, 1));
230d793d
RS
5128 break;
5129 }
5130
5131 /* ... fall through ... */
5132
5133 case ASHIFTRT:
5134 /* If we have (ashiftrt (ashift foo C1) C2) with C2 >= C1,
5135 this is a SIGN_EXTRACT. */
5136 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5137 && GET_CODE (XEXP (x, 0)) == ASHIFT
5138 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5139 && INTVAL (XEXP (x, 1)) >= INTVAL (XEXP (XEXP (x, 0), 1)))
280f58ba
RK
5140 {
5141 new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
5142 new = make_extraction (mode, new,
5143 (INTVAL (XEXP (x, 1))
5144 - INTVAL (XEXP (XEXP (x, 0), 1))),
5145 NULL_RTX, mode_width - INTVAL (XEXP (x, 1)),
5146 code == LSHIFTRT, 0, in_code == COMPARE);
5147 }
d0ab8cd3
RK
5148
5149 /* Similarly if we have (ashifrt (OP (ashift foo C1) C3) C2). In these
5150 cases, we are better off returning a SIGN_EXTEND of the operation. */
5151
5152 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5153 && (GET_CODE (XEXP (x, 0)) == IOR || GET_CODE (XEXP (x, 0)) == AND
5154 || GET_CODE (XEXP (x, 0)) == XOR
5155 || GET_CODE (XEXP (x, 0)) == PLUS)
5156 && GET_CODE (XEXP (XEXP (x, 0), 0)) == ASHIFT
5157 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
d0ab8cd3
RK
5158 && INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1)) < HOST_BITS_PER_WIDE_INT
5159 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
0a5cbff6
RK
5160 && 0 == (INTVAL (XEXP (XEXP (x, 0), 1))
5161 & (((HOST_WIDE_INT) 1
5162 << (MIN (INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1)),
5163 INTVAL (XEXP (x, 1)))
5164 - 1)))))
d0ab8cd3 5165 {
0a5cbff6
RK
5166 rtx c1 = XEXP (XEXP (XEXP (x, 0), 0), 1);
5167 rtx c2 = XEXP (x, 1);
5168 rtx c3 = XEXP (XEXP (x, 0), 1);
5169 HOST_WIDE_INT newop1;
5170 rtx inner = XEXP (XEXP (XEXP (x, 0), 0), 0);
5171
5172 /* If C1 > C2, INNER needs to have the shift performed on it
5173 for C1-C2 bits. */
5174 if (INTVAL (c1) > INTVAL (c2))
5175 {
5176 inner = gen_binary (ASHIFT, mode, inner,
5177 GEN_INT (INTVAL (c1) - INTVAL (c2)));
5178 c1 = c2;
5179 }
d0ab8cd3 5180
0a5cbff6
RK
5181 newop1 = INTVAL (c3) >> INTVAL (c1);
5182 new = make_compound_operation (inner,
5183 GET_CODE (XEXP (x, 0)) == PLUS
5184 ? MEM : GET_CODE (XEXP (x, 0)));
d0ab8cd3 5185 new = make_extraction (mode,
280f58ba 5186 gen_binary (GET_CODE (XEXP (x, 0)), mode, new,
d0ab8cd3 5187 GEN_INT (newop1)),
0a5cbff6
RK
5188 INTVAL (c2) - INTVAL (c1),
5189 NULL_RTX, mode_width - INTVAL (c1),
d0ab8cd3
RK
5190 code == LSHIFTRT, 0, in_code == COMPARE);
5191 }
5192
d0dcc580
RK
5193 /* Similarly for (ashiftrt (neg (ashift FOO C1)) C2). */
5194 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5195 && GET_CODE (XEXP (x, 0)) == NEG
5196 && GET_CODE (XEXP (XEXP (x, 0), 0)) == ASHIFT
5197 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
5198 && INTVAL (XEXP (x, 1)) >= INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1)))
280f58ba
RK
5199 {
5200 new = make_compound_operation (XEXP (XEXP (XEXP (x, 0), 0), 0),
5201 next_code);
5202 new = make_extraction (mode,
5203 gen_unary (GET_CODE (XEXP (x, 0)), mode,
5204 new, 0),
5205 (INTVAL (XEXP (x, 1))
5206 - INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))),
5207 NULL_RTX, mode_width - INTVAL (XEXP (x, 1)),
5208 code == LSHIFTRT, 0, in_code == COMPARE);
5209 }
230d793d 5210 break;
280f58ba
RK
5211
5212 case SUBREG:
5213 /* Call ourselves recursively on the inner expression. If we are
5214 narrowing the object and it has a different RTL code from
5215 what it originally did, do this SUBREG as a force_to_mode. */
5216
0a5cbff6 5217 tem = make_compound_operation (SUBREG_REG (x), in_code);
280f58ba
RK
5218 if (GET_CODE (tem) != GET_CODE (SUBREG_REG (x))
5219 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (tem))
5220 && subreg_lowpart_p (x))
0a5cbff6
RK
5221 {
5222 rtx newer = force_to_mode (tem, mode,
5223 GET_MODE_BITSIZE (mode), NULL_RTX);
5224
5225 /* If we have something other than a SUBREG, we might have
5226 done an expansion, so rerun outselves. */
5227 if (GET_CODE (newer) != SUBREG)
5228 newer = make_compound_operation (newer, in_code);
5229
5230 return newer;
5231 }
230d793d
RS
5232 }
5233
5234 if (new)
5235 {
df62f951 5236 x = gen_lowpart_for_combine (mode, new);
230d793d
RS
5237 code = GET_CODE (x);
5238 }
5239
5240 /* Now recursively process each operand of this operation. */
5241 fmt = GET_RTX_FORMAT (code);
5242 for (i = 0; i < GET_RTX_LENGTH (code); i++)
5243 if (fmt[i] == 'e')
5244 {
5245 new = make_compound_operation (XEXP (x, i), next_code);
5246 SUBST (XEXP (x, i), new);
5247 }
5248
5249 return x;
5250}
5251\f
5252/* Given M see if it is a value that would select a field of bits
5253 within an item, but not the entire word. Return -1 if not.
5254 Otherwise, return the starting position of the field, where 0 is the
5255 low-order bit.
5256
5257 *PLEN is set to the length of the field. */
5258
5259static int
5260get_pos_from_mask (m, plen)
5f4f0e22 5261 unsigned HOST_WIDE_INT m;
230d793d
RS
5262 int *plen;
5263{
5264 /* Get the bit number of the first 1 bit from the right, -1 if none. */
5265 int pos = exact_log2 (m & - m);
5266
5267 if (pos < 0)
5268 return -1;
5269
5270 /* Now shift off the low-order zero bits and see if we have a power of
5271 two minus 1. */
5272 *plen = exact_log2 ((m >> pos) + 1);
5273
5274 if (*plen <= 0)
5275 return -1;
5276
5277 return pos;
5278}
5279\f
dfbe1b2f
RK
5280/* Rewrite X so that it is an expression in MODE. We only care about the
5281 low-order BITS bits so we can ignore AND operations that just clear
5282 higher-order bits.
5283
5284 Also, if REG is non-zero and X is a register equal in value to REG,
5285 replace X with REG. */
5286
5287static rtx
5288force_to_mode (x, mode, bits, reg)
5289 rtx x;
5290 enum machine_mode mode;
5291 int bits;
5292 rtx reg;
5293{
5294 enum rtx_code code = GET_CODE (x);
d0ab8cd3 5295 enum machine_mode op_mode = mode;
dfbe1b2f
RK
5296
5297 /* If X is narrower than MODE or if BITS is larger than the size of MODE,
5298 just get X in the proper mode. */
5299
5300 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode)
5301 || bits > GET_MODE_BITSIZE (mode))
5302 return gen_lowpart_for_combine (mode, x);
5303
5304 switch (code)
5305 {
5306 case SIGN_EXTEND:
5307 case ZERO_EXTEND:
5308 case ZERO_EXTRACT:
5309 case SIGN_EXTRACT:
5310 x = expand_compound_operation (x);
5311 if (GET_CODE (x) != code)
5312 return force_to_mode (x, mode, bits, reg);
5313 break;
5314
5315 case REG:
5316 if (reg != 0 && (rtx_equal_p (get_last_value (reg), x)
5317 || rtx_equal_p (reg, get_last_value (x))))
5318 x = reg;
5319 break;
5320
5321 case CONST_INT:
5f4f0e22
CH
5322 if (bits < HOST_BITS_PER_WIDE_INT)
5323 x = GEN_INT (INTVAL (x) & (((HOST_WIDE_INT) 1 << bits) - 1));
dfbe1b2f
RK
5324 return x;
5325
5326 case SUBREG:
5327 /* Ignore low-order SUBREGs. */
5328 if (subreg_lowpart_p (x))
5329 return force_to_mode (SUBREG_REG (x), mode, bits, reg);
5330 break;
5331
5332 case AND:
5333 /* If this is an AND with a constant. Otherwise, we fall through to
5334 do the general binary case. */
5335
5336 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
5337 {
5f4f0e22 5338 HOST_WIDE_INT mask = INTVAL (XEXP (x, 1));
dfbe1b2f
RK
5339 int len = exact_log2 (mask + 1);
5340 rtx op = XEXP (x, 0);
5341
5342 /* If this is masking some low-order bits, we may be able to
5343 impose a stricter constraint on what bits of the operand are
5344 required. */
5345
5346 op = force_to_mode (op, mode, len > 0 ? MIN (len, bits) : bits,
5347 reg);
5348
5f4f0e22
CH
5349 if (bits < HOST_BITS_PER_WIDE_INT)
5350 mask &= ((HOST_WIDE_INT) 1 << bits) - 1;
dfbe1b2f 5351
d0ab8cd3
RK
5352 /* If we have no AND in MODE, use the original mode for the
5353 operation. */
5354
5355 if (and_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
5356 op_mode = GET_MODE (x);
5357
5358 x = simplify_and_const_int (x, op_mode, op, mask);
dfbe1b2f
RK
5359
5360 /* If X is still an AND, see if it is an AND with a mask that
5361 is just some low-order bits. If so, and it is BITS wide (it
5362 can't be wider), we don't need it. */
5363
5364 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
5f4f0e22
CH
5365 && bits < HOST_BITS_PER_WIDE_INT
5366 && INTVAL (XEXP (x, 1)) == ((HOST_WIDE_INT) 1 << bits) - 1)
dfbe1b2f 5367 x = XEXP (x, 0);
d0ab8cd3
RK
5368
5369 break;
dfbe1b2f
RK
5370 }
5371
5372 /* ... fall through ... */
5373
5374 case PLUS:
5375 case MINUS:
5376 case MULT:
5377 case IOR:
5378 case XOR:
5379 /* For most binary operations, just propagate into the operation and
d0ab8cd3
RK
5380 change the mode if we have an operation of that mode. */
5381
5382 if ((code == PLUS
5383 && add_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
5384 || (code == MINUS
5385 && sub_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
5386 || (code == MULT && (smul_optab->handlers[(int) mode].insn_code
5387 == CODE_FOR_nothing))
53e33d95
RK
5388 || (code == AND
5389 && and_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
d0ab8cd3
RK
5390 || (code == IOR
5391 && ior_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
5392 || (code == XOR && (xor_optab->handlers[(int) mode].insn_code
5393 == CODE_FOR_nothing)))
5394 op_mode = GET_MODE (x);
5395
5396 x = gen_binary (code, op_mode,
5397 gen_lowpart_for_combine (op_mode,
5398 force_to_mode (XEXP (x, 0),
5399 mode, bits,
5400 reg)),
5401 gen_lowpart_for_combine (op_mode,
5402 force_to_mode (XEXP (x, 1),
5403 mode, bits,
5404 reg)));
5405 break;
dfbe1b2f
RK
5406
5407 case ASHIFT:
5408 case LSHIFT:
5409 /* For left shifts, do the same, but just for the first operand.
f6785026
RK
5410 However, we cannot do anything with shifts where we cannot
5411 guarantee that the counts are smaller than the size of the mode
5412 because such a count will have a different meaning in a
5413 wider mode.
5414
5415 If we can narrow the shift and know the count, we need even fewer
5416 bits of the first operand. */
5417
5418 if (! (GET_CODE (XEXP (x, 1)) == CONST_INT
5419 && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (mode))
5420 && ! (GET_MODE (XEXP (x, 1)) != VOIDmode
5421 && (nonzero_bits (XEXP (x, 1), GET_MODE (XEXP (x, 1)))
adb7a1cb 5422 < (unsigned HOST_WIDE_INT) GET_MODE_BITSIZE (mode))))
f6785026
RK
5423 break;
5424
dfbe1b2f
RK
5425 if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) < bits)
5426 bits -= INTVAL (XEXP (x, 1));
5427
d0ab8cd3
RK
5428 if ((code == ASHIFT
5429 && ashl_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
5430 || (code == LSHIFT && (lshl_optab->handlers[(int) mode].insn_code
5431 == CODE_FOR_nothing)))
5432 op_mode = GET_MODE (x);
5433
5434 x = gen_binary (code, op_mode,
5435 gen_lowpart_for_combine (op_mode,
5436 force_to_mode (XEXP (x, 0),
5437 mode, bits,
5438 reg)),
5439 XEXP (x, 1));
5440 break;
dfbe1b2f
RK
5441
5442 case LSHIFTRT:
5443 /* Here we can only do something if the shift count is a constant and
f6785026
RK
5444 the count plus BITS is no larger than the width of MODE. In that
5445 case, we can do the shift in MODE. */
dfbe1b2f
RK
5446
5447 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5448 && INTVAL (XEXP (x, 1)) + bits <= GET_MODE_BITSIZE (mode))
d0ab8cd3
RK
5449 {
5450 rtx inner = force_to_mode (XEXP (x, 0), mode,
5451 bits + INTVAL (XEXP (x, 1)), reg);
5452
5453 if (lshr_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
5454 op_mode = GET_MODE (x);
5455
5456 x = gen_binary (LSHIFTRT, op_mode,
5457 gen_lowpart_for_combine (op_mode, inner),
5458 XEXP (x, 1));
5459 }
5460 break;
5461
5462 case ASHIFTRT:
5463 /* If this is a sign-extension operation that just affects bits
5464 we don't care about, remove it. */
5465
5466 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5467 && INTVAL (XEXP (x, 1)) >= 0
5468 && INTVAL (XEXP (x, 1)) <= GET_MODE_BITSIZE (GET_MODE (x)) - bits
5469 && GET_CODE (XEXP (x, 0)) == ASHIFT
5470 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5471 && INTVAL (XEXP (XEXP (x, 0), 1)) == INTVAL (XEXP (x, 1)))
5472 return force_to_mode (XEXP (XEXP (x, 0), 0), mode, bits, reg);
dfbe1b2f
RK
5473 break;
5474
5475 case NEG:
5476 case NOT:
d0ab8cd3
RK
5477 if ((code == NEG
5478 && neg_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
5479 || (code == NOT && (one_cmpl_optab->handlers[(int) mode].insn_code
5480 == CODE_FOR_nothing)))
5481 op_mode = GET_MODE (x);
5482
dfbe1b2f 5483 /* Handle these similarly to the way we handle most binary operations. */
d0ab8cd3
RK
5484 x = gen_unary (code, op_mode,
5485 gen_lowpart_for_combine (op_mode,
5486 force_to_mode (XEXP (x, 0), mode,
5487 bits, reg)));
5488 break;
5489
5490 case IF_THEN_ELSE:
5491 /* We have no way of knowing if the IF_THEN_ELSE can itself be
5492 written in a narrower mode. We play it safe and do not do so. */
5493
5494 SUBST (XEXP (x, 1),
5495 gen_lowpart_for_combine (GET_MODE (x),
5496 force_to_mode (XEXP (x, 1), mode,
5497 bits, reg)));
5498 SUBST (XEXP (x, 2),
5499 gen_lowpart_for_combine (GET_MODE (x),
5500 force_to_mode (XEXP (x, 2), mode,
5501 bits, reg)));
5502 break;
dfbe1b2f
RK
5503 }
5504
d0ab8cd3 5505 /* Ensure we return a value of the proper mode. */
dfbe1b2f
RK
5506 return gen_lowpart_for_combine (mode, x);
5507}
5508\f
1a26b032
RK
5509/* Return the value of expression X given the fact that condition COND
5510 is known to be true when applied to REG as its first operand and VAL
5511 as its second. X is known to not be shared and so can be modified in
5512 place.
5513
5514 We only handle the simplest cases, and specifically those cases that
5515 arise with IF_THEN_ELSE expressions. */
5516
5517static rtx
5518known_cond (x, cond, reg, val)
5519 rtx x;
5520 enum rtx_code cond;
5521 rtx reg, val;
5522{
5523 enum rtx_code code = GET_CODE (x);
5524 rtx new, temp;
5525 char *fmt;
5526 int i, j;
5527
5528 if (side_effects_p (x))
5529 return x;
5530
5531 if (cond == EQ && rtx_equal_p (x, reg))
5532 return val;
5533
5534 /* If X is (abs REG) and we know something about REG's relationship
5535 with zero, we may be able to simplify this. */
5536
5537 if (code == ABS && rtx_equal_p (XEXP (x, 0), reg) && val == const0_rtx)
5538 switch (cond)
5539 {
5540 case GE: case GT: case EQ:
5541 return XEXP (x, 0);
5542 case LT: case LE:
5543 return gen_unary (NEG, GET_MODE (XEXP (x, 0)), XEXP (x, 0));
5544 }
5545
5546 /* The only other cases we handle are MIN, MAX, and comparisons if the
5547 operands are the same as REG and VAL. */
5548
5549 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
5550 {
5551 if (rtx_equal_p (XEXP (x, 0), val))
5552 cond = swap_condition (cond), temp = val, val = reg, reg = temp;
5553
5554 if (rtx_equal_p (XEXP (x, 0), reg) && rtx_equal_p (XEXP (x, 1), val))
5555 {
5556 if (GET_RTX_CLASS (code) == '<')
5557 return (comparison_dominates_p (cond, code) ? const_true_rtx
5558 : (comparison_dominates_p (cond,
5559 reverse_condition (code))
5560 ? const0_rtx : x));
5561
5562 else if (code == SMAX || code == SMIN
5563 || code == UMIN || code == UMAX)
5564 {
5565 int unsignedp = (code == UMIN || code == UMAX);
5566
5567 if (code == SMAX || code == UMAX)
5568 cond = reverse_condition (cond);
5569
5570 switch (cond)
5571 {
5572 case GE: case GT:
5573 return unsignedp ? x : XEXP (x, 1);
5574 case LE: case LT:
5575 return unsignedp ? x : XEXP (x, 0);
5576 case GEU: case GTU:
5577 return unsignedp ? XEXP (x, 1) : x;
5578 case LEU: case LTU:
5579 return unsignedp ? XEXP (x, 0) : x;
5580 }
5581 }
5582 }
5583 }
5584
5585 fmt = GET_RTX_FORMAT (code);
5586 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5587 {
5588 if (fmt[i] == 'e')
5589 SUBST (XEXP (x, i), known_cond (XEXP (x, i), cond, reg, val));
5590 else if (fmt[i] == 'E')
5591 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5592 SUBST (XVECEXP (x, i, j), known_cond (XVECEXP (x, i, j),
5593 cond, reg, val));
5594 }
5595
5596 return x;
5597}
5598\f
230d793d
RS
5599/* See if X, a SET operation, can be rewritten as a bit-field assignment.
5600 Return that assignment if so.
5601
5602 We only handle the most common cases. */
5603
5604static rtx
5605make_field_assignment (x)
5606 rtx x;
5607{
5608 rtx dest = SET_DEST (x);
5609 rtx src = SET_SRC (x);
dfbe1b2f
RK
5610 rtx ourdest;
5611 rtx assign;
5f4f0e22
CH
5612 HOST_WIDE_INT c1;
5613 int pos, len;
dfbe1b2f
RK
5614 rtx other;
5615 enum machine_mode mode;
230d793d
RS
5616
5617 /* If SRC was (and (not (ashift (const_int 1) POS)) DEST), this is
5618 a clear of a one-bit field. We will have changed it to
5619 (and (rotate (const_int -2) POS) DEST), so check for that. Also check
5620 for a SUBREG. */
5621
5622 if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == ROTATE
5623 && GET_CODE (XEXP (XEXP (src, 0), 0)) == CONST_INT
5624 && INTVAL (XEXP (XEXP (src, 0), 0)) == -2
dfbe1b2f
RK
5625 && (rtx_equal_p (dest, XEXP (src, 1))
5626 || rtx_equal_p (dest, get_last_value (XEXP (src, 1)))
5627 || rtx_equal_p (get_last_value (dest), XEXP (src, 1))))
230d793d 5628 {
8999a12e 5629 assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
230d793d 5630 1, 1, 1, 0);
dfbe1b2f 5631 return gen_rtx (SET, VOIDmode, assign, const0_rtx);
230d793d
RS
5632 }
5633
5634 else if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == SUBREG
5635 && subreg_lowpart_p (XEXP (src, 0))
5636 && (GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
5637 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (src, 0)))))
5638 && GET_CODE (SUBREG_REG (XEXP (src, 0))) == ROTATE
5639 && INTVAL (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == -2
dfbe1b2f
RK
5640 && (rtx_equal_p (dest, XEXP (src, 1))
5641 || rtx_equal_p (dest, get_last_value (XEXP (src, 1)))
5642 || rtx_equal_p (get_last_value (dest), XEXP (src, 1))))
230d793d 5643 {
8999a12e 5644 assign = make_extraction (VOIDmode, dest, 0,
230d793d
RS
5645 XEXP (SUBREG_REG (XEXP (src, 0)), 1),
5646 1, 1, 1, 0);
dfbe1b2f 5647 return gen_rtx (SET, VOIDmode, assign, const0_rtx);
230d793d
RS
5648 }
5649
5650 /* If SRC is (ior (ashift (const_int 1) POS DEST)), this is a set of a
5651 one-bit field. */
5652 else if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == ASHIFT
5653 && XEXP (XEXP (src, 0), 0) == const1_rtx
dfbe1b2f
RK
5654 && (rtx_equal_p (dest, XEXP (src, 1))
5655 || rtx_equal_p (dest, get_last_value (XEXP (src, 1)))
5656 || rtx_equal_p (get_last_value (dest), XEXP (src, 1))))
230d793d 5657 {
8999a12e 5658 assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
230d793d 5659 1, 1, 1, 0);
dfbe1b2f 5660 return gen_rtx (SET, VOIDmode, assign, const1_rtx);
230d793d
RS
5661 }
5662
dfbe1b2f
RK
5663 /* The other case we handle is assignments into a constant-position
5664 field. They look like (ior (and DEST C1) OTHER). If C1 represents
5665 a mask that has all one bits except for a group of zero bits and
5666 OTHER is known to have zeros where C1 has ones, this is such an
5667 assignment. Compute the position and length from C1. Shift OTHER
5668 to the appropriate position, force it to the required mode, and
5669 make the extraction. Check for the AND in both operands. */
5670
5671 if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == AND
5672 && GET_CODE (XEXP (XEXP (src, 0), 1)) == CONST_INT
5673 && (rtx_equal_p (XEXP (XEXP (src, 0), 0), dest)
5674 || rtx_equal_p (XEXP (XEXP (src, 0), 0), get_last_value (dest))
5675 || rtx_equal_p (get_last_value (XEXP (XEXP (src, 0), 1)), dest)))
5676 c1 = INTVAL (XEXP (XEXP (src, 0), 1)), other = XEXP (src, 1);
5677 else if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 1)) == AND
5678 && GET_CODE (XEXP (XEXP (src, 1), 1)) == CONST_INT
5679 && (rtx_equal_p (XEXP (XEXP (src, 1), 0), dest)
5680 || rtx_equal_p (XEXP (XEXP (src, 1), 0), get_last_value (dest))
5681 || rtx_equal_p (get_last_value (XEXP (XEXP (src, 1), 0)),
5682 dest)))
5683 c1 = INTVAL (XEXP (XEXP (src, 1), 1)), other = XEXP (src, 0);
5684 else
5685 return x;
230d793d 5686
dfbe1b2f
RK
5687 pos = get_pos_from_mask (~c1, &len);
5688 if (pos < 0 || pos + len > GET_MODE_BITSIZE (GET_MODE (dest))
ac49a949 5689 || (GET_MODE_BITSIZE (GET_MODE (other)) <= HOST_BITS_PER_WIDE_INT
951553af 5690 && (c1 & nonzero_bits (other, GET_MODE (other))) != 0))
dfbe1b2f 5691 return x;
230d793d 5692
5f4f0e22 5693 assign = make_extraction (VOIDmode, dest, pos, NULL_RTX, len, 1, 1, 0);
230d793d 5694
dfbe1b2f
RK
5695 /* The mode to use for the source is the mode of the assignment, or of
5696 what is inside a possible STRICT_LOW_PART. */
5697 mode = (GET_CODE (assign) == STRICT_LOW_PART
5698 ? GET_MODE (XEXP (assign, 0)) : GET_MODE (assign));
230d793d 5699
dfbe1b2f
RK
5700 /* Shift OTHER right POS places and make it the source, restricting it
5701 to the proper length and mode. */
230d793d 5702
5f4f0e22
CH
5703 src = force_to_mode (simplify_shift_const (NULL_RTX, LSHIFTRT,
5704 GET_MODE (src), other, pos),
dfbe1b2f 5705 mode, len, dest);
230d793d 5706
dfbe1b2f 5707 return gen_rtx_combine (SET, VOIDmode, assign, src);
230d793d
RS
5708}
5709\f
5710/* See if X is of the form (+ (* a c) (* b c)) and convert to (* (+ a b) c)
5711 if so. */
5712
5713static rtx
5714apply_distributive_law (x)
5715 rtx x;
5716{
5717 enum rtx_code code = GET_CODE (x);
5718 rtx lhs, rhs, other;
5719 rtx tem;
5720 enum rtx_code inner_code;
5721
d8a8a4da
RS
5722 /* Distributivity is not true for floating point.
5723 It can change the value. So don't do it.
5724 -- rms and moshier@world.std.com. */
5725 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
5726 return x;
5727
230d793d
RS
5728 /* The outer operation can only be one of the following: */
5729 if (code != IOR && code != AND && code != XOR
5730 && code != PLUS && code != MINUS)
5731 return x;
5732
5733 lhs = XEXP (x, 0), rhs = XEXP (x, 1);
5734
dfbe1b2f 5735 /* If either operand is a primitive we can't do anything, so get out fast. */
230d793d 5736 if (GET_RTX_CLASS (GET_CODE (lhs)) == 'o'
dfbe1b2f 5737 || GET_RTX_CLASS (GET_CODE (rhs)) == 'o')
230d793d
RS
5738 return x;
5739
5740 lhs = expand_compound_operation (lhs);
5741 rhs = expand_compound_operation (rhs);
5742 inner_code = GET_CODE (lhs);
5743 if (inner_code != GET_CODE (rhs))
5744 return x;
5745
5746 /* See if the inner and outer operations distribute. */
5747 switch (inner_code)
5748 {
5749 case LSHIFTRT:
5750 case ASHIFTRT:
5751 case AND:
5752 case IOR:
5753 /* These all distribute except over PLUS. */
5754 if (code == PLUS || code == MINUS)
5755 return x;
5756 break;
5757
5758 case MULT:
5759 if (code != PLUS && code != MINUS)
5760 return x;
5761 break;
5762
5763 case ASHIFT:
5764 case LSHIFT:
5765 /* These are also multiplies, so they distribute over everything. */
5766 break;
5767
5768 case SUBREG:
dfbe1b2f
RK
5769 /* Non-paradoxical SUBREGs distributes over all operations, provided
5770 the inner modes and word numbers are the same, this is an extraction
2b4bd1bc
JW
5771 of a low-order part, we don't convert an fp operation to int or
5772 vice versa, and we would not be converting a single-word
dfbe1b2f 5773 operation into a multi-word operation. The latter test is not
2b4bd1bc 5774 required, but it prevents generating unneeded multi-word operations.
dfbe1b2f
RK
5775 Some of the previous tests are redundant given the latter test, but
5776 are retained because they are required for correctness.
5777
5778 We produce the result slightly differently in this case. */
5779
5780 if (GET_MODE (SUBREG_REG (lhs)) != GET_MODE (SUBREG_REG (rhs))
5781 || SUBREG_WORD (lhs) != SUBREG_WORD (rhs)
5782 || ! subreg_lowpart_p (lhs)
2b4bd1bc
JW
5783 || (GET_MODE_CLASS (GET_MODE (lhs))
5784 != GET_MODE_CLASS (GET_MODE (SUBREG_REG (lhs))))
dfbe1b2f
RK
5785 || (GET_MODE_SIZE (GET_MODE (lhs))
5786 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))))
5787 || GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))) > UNITS_PER_WORD)
230d793d
RS
5788 return x;
5789
5790 tem = gen_binary (code, GET_MODE (SUBREG_REG (lhs)),
5791 SUBREG_REG (lhs), SUBREG_REG (rhs));
5792 return gen_lowpart_for_combine (GET_MODE (x), tem);
5793
5794 default:
5795 return x;
5796 }
5797
5798 /* Set LHS and RHS to the inner operands (A and B in the example
5799 above) and set OTHER to the common operand (C in the example).
5800 These is only one way to do this unless the inner operation is
5801 commutative. */
5802 if (GET_RTX_CLASS (inner_code) == 'c'
5803 && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 0)))
5804 other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 1);
5805 else if (GET_RTX_CLASS (inner_code) == 'c'
5806 && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 1)))
5807 other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 0);
5808 else if (GET_RTX_CLASS (inner_code) == 'c'
5809 && rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 0)))
5810 other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 1);
5811 else if (rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 1)))
5812 other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 0);
5813 else
5814 return x;
5815
5816 /* Form the new inner operation, seeing if it simplifies first. */
5817 tem = gen_binary (code, GET_MODE (x), lhs, rhs);
5818
5819 /* There is one exception to the general way of distributing:
5820 (a ^ b) | (a ^ c) -> (~a) & (b ^ c) */
5821 if (code == XOR && inner_code == IOR)
5822 {
5823 inner_code = AND;
5824 other = gen_unary (NOT, GET_MODE (x), other);
5825 }
5826
5827 /* We may be able to continuing distributing the result, so call
5828 ourselves recursively on the inner operation before forming the
5829 outer operation, which we return. */
5830 return gen_binary (inner_code, GET_MODE (x),
5831 apply_distributive_law (tem), other);
5832}
5833\f
5834/* We have X, a logical `and' of VAROP with the constant CONSTOP, to be done
5835 in MODE.
5836
5837 Return an equivalent form, if different from X. Otherwise, return X. If
5838 X is zero, we are to always construct the equivalent form. */
5839
5840static rtx
5841simplify_and_const_int (x, mode, varop, constop)
5842 rtx x;
5843 enum machine_mode mode;
5844 rtx varop;
5f4f0e22 5845 unsigned HOST_WIDE_INT constop;
230d793d
RS
5846{
5847 register enum machine_mode tmode;
5848 register rtx temp;
951553af 5849 unsigned HOST_WIDE_INT nonzero;
230d793d
RS
5850
5851 /* There is a large class of optimizations based on the principle that
5852 some operations produce results where certain bits are known to be zero,
5853 and hence are not significant to the AND. For example, if we have just
5854 done a left shift of one bit, the low-order bit is known to be zero and
5855 hence an AND with a mask of ~1 would not do anything.
5856
5857 At the end of the following loop, we set:
5858
5859 VAROP to be the item to be AND'ed with;
5860 CONSTOP to the constant value to AND it with. */
5861
5862 while (1)
5863 {
5f4f0e22
CH
5864 /* If we ever encounter a mode wider than the host machine's widest
5865 integer size, we can't compute the masks accurately, so give up. */
5866 if (GET_MODE_BITSIZE (GET_MODE (varop)) > HOST_BITS_PER_WIDE_INT)
230d793d
RS
5867 break;
5868
5869 /* Unless one of the cases below does a `continue',
5870 a `break' will be executed to exit the loop. */
5871
5872 switch (GET_CODE (varop))
5873 {
5874 case CLOBBER:
5875 /* If VAROP is a (clobber (const_int)), return it since we know
5876 we are generating something that won't match. */
5877 return varop;
5878
5879#if ! BITS_BIG_ENDIAN
5880 case USE:
5881 /* VAROP is a (use (mem ..)) that was made from a bit-field
5882 extraction that spanned the boundary of the MEM. If we are
5883 now masking so it is within that boundary, we don't need the
5884 USE any more. */
5885 if ((constop & ~ GET_MODE_MASK (GET_MODE (XEXP (varop, 0)))) == 0)
5886 {
5887 varop = XEXP (varop, 0);
5888 continue;
5889 }
5890 break;
5891#endif
5892
5893 case SUBREG:
5894 if (subreg_lowpart_p (varop)
5895 /* We can ignore the effect this SUBREG if it narrows the mode
457816e2 5896 or, on machines where byte operations extend, if the
230d793d
RS
5897 constant masks to zero all the bits the mode doesn't have. */
5898 && ((GET_MODE_SIZE (GET_MODE (varop))
5899 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop))))
c6dc70d6 5900#ifdef BYTE_LOADS_EXTEND
230d793d
RS
5901 || (0 == (constop
5902 & GET_MODE_MASK (GET_MODE (varop))
5903 & ~ GET_MODE_MASK (GET_MODE (SUBREG_REG (varop)))))
5904#endif
5905 ))
5906 {
5907 varop = SUBREG_REG (varop);
5908 continue;
5909 }
5910 break;
5911
5912 case ZERO_EXTRACT:
5913 case SIGN_EXTRACT:
5914 case ZERO_EXTEND:
5915 case SIGN_EXTEND:
5916 /* Try to expand these into a series of shifts and then work
5917 with that result. If we can't, for example, if the extract
5918 isn't at a fixed position, give up. */
5919 temp = expand_compound_operation (varop);
5920 if (temp != varop)
5921 {
5922 varop = temp;
5923 continue;
5924 }
5925 break;
5926
5927 case AND:
5928 if (GET_CODE (XEXP (varop, 1)) == CONST_INT)
5929 {
5930 constop &= INTVAL (XEXP (varop, 1));
5931 varop = XEXP (varop, 0);
5932 continue;
5933 }
5934 break;
5935
5936 case IOR:
5937 case XOR:
5938 /* If VAROP is (ior (lshiftrt FOO C1) C2), try to commute the IOR and
5939 LSHIFT so we end up with an (and (lshiftrt (ior ...) ...) ...)
5940 operation which may be a bitfield extraction. */
5941
5942 if (GET_CODE (XEXP (varop, 0)) == LSHIFTRT
5943 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
5944 && INTVAL (XEXP (XEXP (varop, 0), 1)) >= 0
5f4f0e22 5945 && INTVAL (XEXP (XEXP (varop, 0), 1)) < HOST_BITS_PER_WIDE_INT
230d793d
RS
5946 && GET_CODE (XEXP (varop, 1)) == CONST_INT
5947 && (INTVAL (XEXP (varop, 1))
951553af 5948 & ~ nonzero_bits (XEXP (varop, 0), GET_MODE (varop)) == 0))
230d793d 5949 {
5f4f0e22
CH
5950 temp = GEN_INT ((INTVAL (XEXP (varop, 1)) & constop)
5951 << INTVAL (XEXP (XEXP (varop, 0), 1)));
230d793d
RS
5952 temp = gen_binary (GET_CODE (varop), GET_MODE (varop),
5953 XEXP (XEXP (varop, 0), 0), temp);
5954 varop = gen_rtx_combine (LSHIFTRT, GET_MODE (varop),
5955 temp, XEXP (varop, 1));
5956 continue;
5957 }
5958
5959 /* Apply the AND to both branches of the IOR or XOR, then try to
5960 apply the distributive law. This may eliminate operations
5961 if either branch can be simplified because of the AND.
5962 It may also make some cases more complex, but those cases
5963 probably won't match a pattern either with or without this. */
5964 return
5965 gen_lowpart_for_combine
5966 (mode, apply_distributive_law
5967 (gen_rtx_combine
5968 (GET_CODE (varop), GET_MODE (varop),
5f4f0e22 5969 simplify_and_const_int (NULL_RTX, GET_MODE (varop),
230d793d 5970 XEXP (varop, 0), constop),
5f4f0e22 5971 simplify_and_const_int (NULL_RTX, GET_MODE (varop),
230d793d
RS
5972 XEXP (varop, 1), constop))));
5973
5974 case NOT:
5975 /* (and (not FOO)) is (and (xor FOO CONST_OP)) so if FOO is an
5976 LSHIFTRT we can do the same as above. */
5977
5978 if (GET_CODE (XEXP (varop, 0)) == LSHIFTRT
5979 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
5980 && INTVAL (XEXP (XEXP (varop, 0), 1)) >= 0
5f4f0e22 5981 && INTVAL (XEXP (XEXP (varop, 0), 1)) < HOST_BITS_PER_WIDE_INT)
230d793d 5982 {
5f4f0e22 5983 temp = GEN_INT (constop << INTVAL (XEXP (XEXP (varop, 0), 1)));
230d793d
RS
5984 temp = gen_binary (XOR, GET_MODE (varop),
5985 XEXP (XEXP (varop, 0), 0), temp);
5986 varop = gen_rtx_combine (LSHIFTRT, GET_MODE (varop),
5987 temp, XEXP (XEXP (varop, 0), 1));
5988 continue;
5989 }
5990 break;
5991
5992 case ASHIFTRT:
5993 /* If we are just looking for the sign bit, we don't need this
5994 shift at all, even if it has a variable count. */
5f4f0e22
CH
5995 if (constop == ((HOST_WIDE_INT) 1
5996 << (GET_MODE_BITSIZE (GET_MODE (varop)) - 1)))
230d793d
RS
5997 {
5998 varop = XEXP (varop, 0);
5999 continue;
6000 }
6001
6002 /* If this is a shift by a constant, get a mask that contains
6003 those bits that are not copies of the sign bit. We then have
6004 two cases: If CONSTOP only includes those bits, this can be
6005 a logical shift, which may allow simplifications. If CONSTOP
6006 is a single-bit field not within those bits, we are requesting
6007 a copy of the sign bit and hence can shift the sign bit to
6008 the appropriate location. */
6009 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
6010 && INTVAL (XEXP (varop, 1)) >= 0
5f4f0e22 6011 && INTVAL (XEXP (varop, 1)) < HOST_BITS_PER_WIDE_INT)
230d793d
RS
6012 {
6013 int i = -1;
6014
951553af
RK
6015 nonzero = GET_MODE_MASK (GET_MODE (varop));
6016 nonzero >>= INTVAL (XEXP (varop, 1));
230d793d 6017
951553af 6018 if ((constop & ~ nonzero) == 0
230d793d
RS
6019 || (i = exact_log2 (constop)) >= 0)
6020 {
6021 varop = simplify_shift_const
6022 (varop, LSHIFTRT, GET_MODE (varop), XEXP (varop, 0),
6023 i < 0 ? INTVAL (XEXP (varop, 1))
6024 : GET_MODE_BITSIZE (GET_MODE (varop)) - 1 - i);
6025 if (GET_CODE (varop) != ASHIFTRT)
6026 continue;
6027 }
6028 }
6029
6030 /* If our mask is 1, convert this to a LSHIFTRT. This can be done
6031 even if the shift count isn't a constant. */
6032 if (constop == 1)
6033 varop = gen_rtx_combine (LSHIFTRT, GET_MODE (varop),
6034 XEXP (varop, 0), XEXP (varop, 1));
6035 break;
6036
500c518b
RK
6037 case LSHIFTRT:
6038 /* If we have (and (lshiftrt FOO C1) C2) where the combination of the
6039 shift and AND produces only copies of the sign bit (C2 is one less
b8a68db6 6040 than a power of two), we can do this with just a shift. */
500c518b
RK
6041
6042 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
6043 && ((INTVAL (XEXP (varop, 1))
6044 + num_sign_bit_copies (XEXP (varop, 0),
6045 GET_MODE (XEXP (varop, 0))))
6046 >= GET_MODE_BITSIZE (GET_MODE (varop)))
6047 && exact_log2 (constop + 1) >= 0)
6048 varop
6049 = gen_rtx_combine (LSHIFTRT, GET_MODE (varop), XEXP (varop, 0),
6050 GEN_INT (GET_MODE_BITSIZE (GET_MODE (varop))
6051 - exact_log2 (constop + 1)));
6052 break;
6053
230d793d
RS
6054 case NE:
6055 /* (and (ne FOO 0) CONST) can be (and FOO CONST) if CONST is
951553af
RK
6056 included in STORE_FLAG_VALUE and FOO has no bits that might be
6057 nonzero not in CONST. */
230d793d
RS
6058 if ((constop & ~ STORE_FLAG_VALUE) == 0
6059 && XEXP (varop, 0) == const0_rtx
951553af 6060 && (nonzero_bits (XEXP (varop, 0), mode) & ~ constop) == 0)
230d793d
RS
6061 {
6062 varop = XEXP (varop, 0);
6063 continue;
6064 }
6065 break;
6066
6067 case PLUS:
6068 /* In (and (plus FOO C1) M), if M is a mask that just turns off
6069 low-order bits (as in an alignment operation) and FOO is already
6070 aligned to that boundary, we can convert remove this AND
6071 and possibly the PLUS if it is now adding zero. */
6072 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
6073 && exact_log2 (-constop) >= 0
951553af 6074 && (nonzero_bits (XEXP (varop, 0), mode) & ~ constop) == 0)
230d793d
RS
6075 {
6076 varop = plus_constant (XEXP (varop, 0),
6077 INTVAL (XEXP (varop, 1)) & constop);
6078 constop = ~0;
6079 break;
6080 }
6081
6082 /* ... fall through ... */
6083
6084 case MINUS:
6085 /* In (and (plus (and FOO M1) BAR) M2), if M1 and M2 are one
6086 less than powers of two and M2 is narrower than M1, we can
6087 eliminate the inner AND. This occurs when incrementing
6088 bit fields. */
6089
6090 if (GET_CODE (XEXP (varop, 0)) == ZERO_EXTRACT
6091 || GET_CODE (XEXP (varop, 0)) == ZERO_EXTEND)
6092 SUBST (XEXP (varop, 0),
6093 expand_compound_operation (XEXP (varop, 0)));
6094
6095 if (GET_CODE (XEXP (varop, 0)) == AND
6096 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
6097 && exact_log2 (constop + 1) >= 0
6098 && exact_log2 (INTVAL (XEXP (XEXP (varop, 0), 1)) + 1) >= 0
6099 && (~ INTVAL (XEXP (XEXP (varop, 0), 1)) & constop) == 0)
6100 SUBST (XEXP (varop, 0), XEXP (XEXP (varop, 0), 0));
6101 break;
6102 }
6103
6104 break;
6105 }
6106
6107 /* If we have reached a constant, this whole thing is constant. */
6108 if (GET_CODE (varop) == CONST_INT)
5f4f0e22 6109 return GEN_INT (constop & INTVAL (varop));
230d793d 6110
fc06d7aa
RK
6111 /* See what bits may be nonzero in VAROP. Unlike the general case of
6112 a call to nonzero_bits, here we don't care about bits outside
6113 MODE. */
6114
6115 nonzero = nonzero_bits (varop, mode) & GET_MODE_MASK (mode);
230d793d
RS
6116
6117 /* Turn off all bits in the constant that are known to already be zero.
951553af 6118 Thus, if the AND isn't needed at all, we will have CONSTOP == NONZERO_BITS
230d793d
RS
6119 which is tested below. */
6120
951553af 6121 constop &= nonzero;
230d793d
RS
6122
6123 /* If we don't have any bits left, return zero. */
6124 if (constop == 0)
6125 return const0_rtx;
6126
6127 /* Get VAROP in MODE. Try to get a SUBREG if not. Don't make a new SUBREG
6128 if we already had one (just check for the simplest cases). */
6129 if (x && GET_CODE (XEXP (x, 0)) == SUBREG
6130 && GET_MODE (XEXP (x, 0)) == mode
6131 && SUBREG_REG (XEXP (x, 0)) == varop)
6132 varop = XEXP (x, 0);
6133 else
6134 varop = gen_lowpart_for_combine (mode, varop);
6135
6136 /* If we can't make the SUBREG, try to return what we were given. */
6137 if (GET_CODE (varop) == CLOBBER)
6138 return x ? x : varop;
6139
6140 /* If we are only masking insignificant bits, return VAROP. */
951553af 6141 if (constop == nonzero)
230d793d
RS
6142 x = varop;
6143
6144 /* Otherwise, return an AND. See how much, if any, of X we can use. */
6145 else if (x == 0 || GET_CODE (x) != AND || GET_MODE (x) != mode)
5f4f0e22 6146 x = gen_rtx_combine (AND, mode, varop, GEN_INT (constop));
230d793d
RS
6147
6148 else
6149 {
6150 if (GET_CODE (XEXP (x, 1)) != CONST_INT
6151 || INTVAL (XEXP (x, 1)) != constop)
5f4f0e22 6152 SUBST (XEXP (x, 1), GEN_INT (constop));
230d793d
RS
6153
6154 SUBST (XEXP (x, 0), varop);
6155 }
6156
6157 return x;
6158}
6159\f
6160/* Given an expression, X, compute which bits in X can be non-zero.
6161 We don't care about bits outside of those defined in MODE.
6162
6163 For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
6164 a shift, AND, or zero_extract, we can do better. */
6165
5f4f0e22 6166static unsigned HOST_WIDE_INT
951553af 6167nonzero_bits (x, mode)
230d793d
RS
6168 rtx x;
6169 enum machine_mode mode;
6170{
951553af
RK
6171 unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
6172 unsigned HOST_WIDE_INT inner_nz;
230d793d
RS
6173 enum rtx_code code;
6174 int mode_width = GET_MODE_BITSIZE (mode);
6175 rtx tem;
6176
6177 /* If X is wider than MODE, use its mode instead. */
6178 if (GET_MODE_BITSIZE (GET_MODE (x)) > mode_width)
6179 {
6180 mode = GET_MODE (x);
951553af 6181 nonzero = GET_MODE_MASK (mode);
230d793d
RS
6182 mode_width = GET_MODE_BITSIZE (mode);
6183 }
6184
5f4f0e22 6185 if (mode_width > HOST_BITS_PER_WIDE_INT)
230d793d
RS
6186 /* Our only callers in this case look for single bit values. So
6187 just return the mode mask. Those tests will then be false. */
951553af 6188 return nonzero;
230d793d
RS
6189
6190 code = GET_CODE (x);
6191 switch (code)
6192 {
6193 case REG:
6194#ifdef STACK_BOUNDARY
6195 /* If this is the stack pointer, we may know something about its
6196 alignment. If PUSH_ROUNDING is defined, it is possible for the
6197 stack to be momentarily aligned only to that amount, so we pick
6198 the least alignment. */
6199
6200 if (x == stack_pointer_rtx)
6201 {
6202 int sp_alignment = STACK_BOUNDARY / BITS_PER_UNIT;
6203
6204#ifdef PUSH_ROUNDING
6205 sp_alignment = MIN (PUSH_ROUNDING (1), sp_alignment);
6206#endif
6207
951553af 6208 return nonzero & ~ (sp_alignment - 1);
230d793d
RS
6209 }
6210#endif
6211
55310dad
RK
6212 /* If X is a register whose nonzero bits value is current, use it.
6213 Otherwise, if X is a register whose value we can find, use that
6214 value. Otherwise, use the previously-computed global nonzero bits
6215 for this register. */
6216
6217 if (reg_last_set_value[REGNO (x)] != 0
6218 && reg_last_set_mode[REGNO (x)] == mode
6219 && (reg_n_sets[REGNO (x)] == 1
6220 || reg_last_set_label[REGNO (x)] == label_tick)
6221 && INSN_CUID (reg_last_set[REGNO (x)]) < subst_low_cuid)
6222 return reg_last_set_nonzero_bits[REGNO (x)];
230d793d
RS
6223
6224 tem = get_last_value (x);
6225 if (tem)
951553af
RK
6226 return nonzero_bits (tem, mode);
6227 else if (nonzero_sign_valid && reg_nonzero_bits[REGNO (x)])
6228 return reg_nonzero_bits[REGNO (x)] & nonzero;
230d793d 6229 else
951553af 6230 return nonzero;
230d793d
RS
6231
6232 case CONST_INT:
6233 return INTVAL (x);
6234
6235#ifdef BYTE_LOADS_ZERO_EXTEND
6236 case MEM:
6237 /* In many, if not most, RISC machines, reading a byte from memory
6238 zeros the rest of the register. Noticing that fact saves a lot
6239 of extra zero-extends. */
951553af 6240 nonzero &= GET_MODE_MASK (GET_MODE (x));
230d793d
RS
6241 break;
6242#endif
6243
6244#if STORE_FLAG_VALUE == 1
6245 case EQ: case NE:
6246 case GT: case GTU:
6247 case LT: case LTU:
6248 case GE: case GEU:
6249 case LE: case LEU:
3f508eca
RK
6250
6251 if (GET_MODE_CLASS (mode) == MODE_INT)
951553af 6252 nonzero = 1;
230d793d
RS
6253
6254 /* A comparison operation only sets the bits given by its mode. The
6255 rest are set undefined. */
6256 if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
951553af 6257 nonzero |= (GET_MODE_MASK (mode) & ~ GET_MODE_MASK (GET_MODE (x)));
230d793d
RS
6258 break;
6259#endif
6260
230d793d 6261 case NEG:
d0ab8cd3
RK
6262 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
6263 == GET_MODE_BITSIZE (GET_MODE (x)))
951553af 6264 nonzero = 1;
230d793d
RS
6265
6266 if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
951553af 6267 nonzero |= (GET_MODE_MASK (mode) & ~ GET_MODE_MASK (GET_MODE (x)));
230d793d 6268 break;
d0ab8cd3
RK
6269
6270 case ABS:
6271 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
6272 == GET_MODE_BITSIZE (GET_MODE (x)))
951553af 6273 nonzero = 1;
d0ab8cd3 6274 break;
230d793d
RS
6275
6276 case TRUNCATE:
951553af 6277 nonzero &= (nonzero_bits (XEXP (x, 0), mode) & GET_MODE_MASK (mode));
230d793d
RS
6278 break;
6279
6280 case ZERO_EXTEND:
951553af 6281 nonzero &= nonzero_bits (XEXP (x, 0), mode);
230d793d 6282 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
951553af 6283 nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
230d793d
RS
6284 break;
6285
6286 case SIGN_EXTEND:
6287 /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
6288 Otherwise, show all the bits in the outer mode but not the inner
6289 may be non-zero. */
951553af 6290 inner_nz = nonzero_bits (XEXP (x, 0), mode);
230d793d
RS
6291 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
6292 {
951553af
RK
6293 inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
6294 if (inner_nz &
5f4f0e22
CH
6295 (((HOST_WIDE_INT) 1
6296 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1))))
951553af 6297 inner_nz |= (GET_MODE_MASK (mode)
230d793d
RS
6298 & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
6299 }
6300
951553af 6301 nonzero &= inner_nz;
230d793d
RS
6302 break;
6303
6304 case AND:
951553af
RK
6305 nonzero &= (nonzero_bits (XEXP (x, 0), mode)
6306 & nonzero_bits (XEXP (x, 1), mode));
230d793d
RS
6307 break;
6308
d0ab8cd3
RK
6309 case XOR: case IOR:
6310 case UMIN: case UMAX: case SMIN: case SMAX:
951553af
RK
6311 nonzero &= (nonzero_bits (XEXP (x, 0), mode)
6312 | nonzero_bits (XEXP (x, 1), mode));
230d793d
RS
6313 break;
6314
6315 case PLUS: case MINUS:
6316 case MULT:
6317 case DIV: case UDIV:
6318 case MOD: case UMOD:
6319 /* We can apply the rules of arithmetic to compute the number of
6320 high- and low-order zero bits of these operations. We start by
6321 computing the width (position of the highest-order non-zero bit)
6322 and the number of low-order zero bits for each value. */
6323 {
951553af
RK
6324 unsigned HOST_WIDE_INT nz0 = nonzero_bits (XEXP (x, 0), mode);
6325 unsigned HOST_WIDE_INT nz1 = nonzero_bits (XEXP (x, 1), mode);
6326 int width0 = floor_log2 (nz0) + 1;
6327 int width1 = floor_log2 (nz1) + 1;
6328 int low0 = floor_log2 (nz0 & -nz0);
6329 int low1 = floor_log2 (nz1 & -nz1);
6330 int op0_maybe_minusp = (nz0 & ((HOST_WIDE_INT) 1 << (mode_width - 1)));
6331 int op1_maybe_minusp = (nz1 & ((HOST_WIDE_INT) 1 << (mode_width - 1)));
230d793d
RS
6332 int result_width = mode_width;
6333 int result_low = 0;
6334
6335 switch (code)
6336 {
6337 case PLUS:
6338 result_width = MAX (width0, width1) + 1;
6339 result_low = MIN (low0, low1);
6340 break;
6341 case MINUS:
6342 result_low = MIN (low0, low1);
6343 break;
6344 case MULT:
6345 result_width = width0 + width1;
6346 result_low = low0 + low1;
6347 break;
6348 case DIV:
6349 if (! op0_maybe_minusp && ! op1_maybe_minusp)
6350 result_width = width0;
6351 break;
6352 case UDIV:
6353 result_width = width0;
6354 break;
6355 case MOD:
6356 if (! op0_maybe_minusp && ! op1_maybe_minusp)
6357 result_width = MIN (width0, width1);
6358 result_low = MIN (low0, low1);
6359 break;
6360 case UMOD:
6361 result_width = MIN (width0, width1);
6362 result_low = MIN (low0, low1);
6363 break;
6364 }
6365
6366 if (result_width < mode_width)
951553af 6367 nonzero &= ((HOST_WIDE_INT) 1 << result_width) - 1;
230d793d
RS
6368
6369 if (result_low > 0)
951553af 6370 nonzero &= ~ (((HOST_WIDE_INT) 1 << result_low) - 1);
230d793d
RS
6371 }
6372 break;
6373
6374 case ZERO_EXTRACT:
6375 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5f4f0e22 6376 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
951553af 6377 nonzero &= ((HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
230d793d
RS
6378 break;
6379
6380 case SUBREG:
c3c2cb37
RK
6381 /* If this is a SUBREG formed for a promoted variable that has
6382 been zero-extended, we know that at least the high-order bits
6383 are zero, though others might be too. */
6384
6385 if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x))
951553af
RK
6386 nonzero = (GET_MODE_MASK (GET_MODE (x))
6387 & nonzero_bits (SUBREG_REG (x), GET_MODE (x)));
c3c2cb37 6388
230d793d
RS
6389 /* If the inner mode is a single word for both the host and target
6390 machines, we can compute this from which bits of the inner
951553af 6391 object might be nonzero. */
230d793d 6392 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) <= BITS_PER_WORD
5f4f0e22
CH
6393 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
6394 <= HOST_BITS_PER_WIDE_INT))
230d793d 6395 {
951553af 6396 nonzero &= nonzero_bits (SUBREG_REG (x), mode);
c6dc70d6 6397#ifndef BYTE_LOADS_EXTEND
230d793d
RS
6398 /* On many CISC machines, accessing an object in a wider mode
6399 causes the high-order bits to become undefined. So they are
6400 not known to be zero. */
6401 if (GET_MODE_SIZE (GET_MODE (x))
6402 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
951553af
RK
6403 nonzero |= (GET_MODE_MASK (GET_MODE (x))
6404 & ~ GET_MODE_MASK (GET_MODE (SUBREG_REG (x))));
230d793d
RS
6405#endif
6406 }
6407 break;
6408
6409 case ASHIFTRT:
6410 case LSHIFTRT:
6411 case ASHIFT:
6412 case LSHIFT:
6413 case ROTATE:
951553af 6414 /* The nonzero bits are in two classes: any bits within MODE
230d793d 6415 that aren't in GET_MODE (x) are always significant. The rest of the
951553af 6416 nonzero bits are those that are significant in the operand of
230d793d
RS
6417 the shift when shifted the appropriate number of bits. This
6418 shows that high-order bits are cleared by the right shift and
6419 low-order bits by left shifts. */
6420 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6421 && INTVAL (XEXP (x, 1)) >= 0
5f4f0e22 6422 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
230d793d
RS
6423 {
6424 enum machine_mode inner_mode = GET_MODE (x);
6425 int width = GET_MODE_BITSIZE (inner_mode);
6426 int count = INTVAL (XEXP (x, 1));
5f4f0e22 6427 unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
951553af
RK
6428 unsigned HOST_WIDE_INT op_nonzero = nonzero_bits (XEXP (x, 0), mode);
6429 unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask;
5f4f0e22 6430 unsigned HOST_WIDE_INT outer = 0;
230d793d
RS
6431
6432 if (mode_width > width)
951553af 6433 outer = (op_nonzero & nonzero & ~ mode_mask);
230d793d
RS
6434
6435 if (code == LSHIFTRT)
6436 inner >>= count;
6437 else if (code == ASHIFTRT)
6438 {
6439 inner >>= count;
6440
951553af 6441 /* If the sign bit may have been nonzero before the shift, we
230d793d 6442 need to mark all the places it could have been copied to
951553af 6443 by the shift as possibly nonzero. */
5f4f0e22
CH
6444 if (inner & ((HOST_WIDE_INT) 1 << (width - 1 - count)))
6445 inner |= (((HOST_WIDE_INT) 1 << count) - 1) << (width - count);
230d793d
RS
6446 }
6447 else if (code == LSHIFT || code == ASHIFT)
6448 inner <<= count;
6449 else
6450 inner = ((inner << (count % width)
6451 | (inner >> (width - (count % width)))) & mode_mask);
6452
951553af 6453 nonzero &= (outer | inner);
230d793d
RS
6454 }
6455 break;
6456
6457 case FFS:
6458 /* This is at most the number of bits in the mode. */
951553af 6459 nonzero = ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width) + 1)) - 1;
230d793d 6460 break;
d0ab8cd3
RK
6461
6462 case IF_THEN_ELSE:
951553af
RK
6463 nonzero &= (nonzero_bits (XEXP (x, 1), mode)
6464 | nonzero_bits (XEXP (x, 2), mode));
d0ab8cd3 6465 break;
230d793d
RS
6466 }
6467
951553af 6468 return nonzero;
230d793d
RS
6469}
6470\f
d0ab8cd3
RK
6471/* Return the number of bits at the high-order end of X that are known to
6472 be equal to the sign bit. This number will always be between 1 and
6473 the number of bits in the mode of X. MODE is the mode to be used
6474 if X is VOIDmode. */
6475
6476static int
6477num_sign_bit_copies (x, mode)
6478 rtx x;
6479 enum machine_mode mode;
6480{
6481 enum rtx_code code = GET_CODE (x);
6482 int bitwidth;
6483 int num0, num1, result;
951553af 6484 unsigned HOST_WIDE_INT nonzero;
d0ab8cd3
RK
6485 rtx tem;
6486
6487 /* If we weren't given a mode, use the mode of X. If the mode is still
6488 VOIDmode, we don't know anything. */
6489
6490 if (mode == VOIDmode)
6491 mode = GET_MODE (x);
6492
6493 if (mode == VOIDmode)
6752e8d2 6494 return 1;
d0ab8cd3
RK
6495
6496 bitwidth = GET_MODE_BITSIZE (mode);
6497
6498 switch (code)
6499 {
6500 case REG:
55310dad
RK
6501
6502 if (reg_last_set_value[REGNO (x)] != 0
6503 && reg_last_set_mode[REGNO (x)] == mode
6504 && (reg_n_sets[REGNO (x)] == 1
6505 || reg_last_set_label[REGNO (x)] == label_tick)
6506 && INSN_CUID (reg_last_set[REGNO (x)]) < subst_low_cuid)
6507 return reg_last_set_sign_bit_copies[REGNO (x)];
d0ab8cd3
RK
6508
6509 tem = get_last_value (x);
6510 if (tem != 0)
6511 return num_sign_bit_copies (tem, mode);
55310dad
RK
6512
6513 if (nonzero_sign_valid && reg_sign_bit_copies[REGNO (x)] != 0)
6514 return reg_sign_bit_copies[REGNO (x)];
d0ab8cd3
RK
6515 break;
6516
457816e2
RK
6517#ifdef BYTE_LOADS_SIGN_EXTEND
6518 case MEM:
6519 /* Some RISC machines sign-extend all loads of smaller than a word. */
6520 return MAX (1, bitwidth - GET_MODE_BITSIZE (GET_MODE (x)) + 1);
6521#endif
6522
d0ab8cd3
RK
6523 case CONST_INT:
6524 /* If the constant is negative, take its 1's complement and remask.
6525 Then see how many zero bits we have. */
951553af 6526 nonzero = INTVAL (x) & GET_MODE_MASK (mode);
ac49a949 6527 if (bitwidth <= HOST_BITS_PER_WIDE_INT
951553af
RK
6528 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
6529 nonzero = (~ nonzero) & GET_MODE_MASK (mode);
d0ab8cd3 6530
951553af 6531 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
d0ab8cd3
RK
6532
6533 case SUBREG:
c3c2cb37
RK
6534 /* If this is a SUBREG for a promoted object that is sign-extended
6535 and we are looking at it in a wider mode, we know that at least the
6536 high-order bits are known to be sign bit copies. */
6537
6538 if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x))
dc3e17ad
RK
6539 return MAX (bitwidth - GET_MODE_BITSIZE (GET_MODE (x)) + 1,
6540 num_sign_bit_copies (SUBREG_REG (x), mode));
c3c2cb37 6541
d0ab8cd3
RK
6542 /* For a smaller object, just ignore the high bits. */
6543 if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
6544 {
6545 num0 = num_sign_bit_copies (SUBREG_REG (x), VOIDmode);
6546 return MAX (1, (num0
6547 - (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
6548 - bitwidth)));
6549 }
457816e2 6550
c6dc70d6 6551#ifdef BYTE_LOADS_EXTEND
457816e2
RK
6552 /* For paradoxical SUBREGs, just look inside since, on machines with
6553 one of these defined, we assume that operations are actually
6554 performed on the full register. Note that we are passing MODE
6555 to the recursive call, so the number of sign bit copies will
6556 remain relative to that mode, not the inner mode. */
6557
6558 if (GET_MODE_SIZE (GET_MODE (x))
6559 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
6560 return num_sign_bit_copies (SUBREG_REG (x), mode);
6561#endif
6562
d0ab8cd3
RK
6563 break;
6564
6565 case SIGN_EXTRACT:
6566 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
6567 return MAX (1, bitwidth - INTVAL (XEXP (x, 1)));
6568 break;
6569
6570 case SIGN_EXTEND:
6571 return (bitwidth - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
6572 + num_sign_bit_copies (XEXP (x, 0), VOIDmode));
6573
6574 case TRUNCATE:
6575 /* For a smaller object, just ignore the high bits. */
6576 num0 = num_sign_bit_copies (XEXP (x, 0), VOIDmode);
6577 return MAX (1, (num0 - (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
6578 - bitwidth)));
6579
6580 case NOT:
6581 return num_sign_bit_copies (XEXP (x, 0), mode);
6582
6583 case ROTATE: case ROTATERT:
6584 /* If we are rotating left by a number of bits less than the number
6585 of sign bit copies, we can just subtract that amount from the
6586 number. */
6587 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6588 && INTVAL (XEXP (x, 1)) >= 0 && INTVAL (XEXP (x, 1)) < bitwidth)
6589 {
6590 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
6591 return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
6592 : bitwidth - INTVAL (XEXP (x, 1))));
6593 }
6594 break;
6595
6596 case NEG:
6597 /* In general, this subtracts one sign bit copy. But if the value
6598 is known to be positive, the number of sign bit copies is the
951553af
RK
6599 same as that of the input. Finally, if the input has just one bit
6600 that might be nonzero, all the bits are copies of the sign bit. */
6601 nonzero = nonzero_bits (XEXP (x, 0), mode);
6602 if (nonzero == 1)
d0ab8cd3
RK
6603 return bitwidth;
6604
6605 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
6606 if (num0 > 1
ac49a949 6607 && bitwidth <= HOST_BITS_PER_WIDE_INT
951553af 6608 && (((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero))
d0ab8cd3
RK
6609 num0--;
6610
6611 return num0;
6612
6613 case IOR: case AND: case XOR:
6614 case SMIN: case SMAX: case UMIN: case UMAX:
6615 /* Logical operations will preserve the number of sign-bit copies.
6616 MIN and MAX operations always return one of the operands. */
6617 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
6618 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
6619 return MIN (num0, num1);
6620
6621 case PLUS: case MINUS:
6622 /* For addition and subtraction, we can have a 1-bit carry. However,
6623 if we are subtracting 1 from a positive number, there will not
6624 be such a carry. Furthermore, if the positive number is known to
6625 be 0 or 1, we know the result is either -1 or 0. */
6626
3e3ea975 6627 if (code == PLUS && XEXP (x, 1) == constm1_rtx
9295e6af 6628 && bitwidth <= HOST_BITS_PER_WIDE_INT)
d0ab8cd3 6629 {
951553af
RK
6630 nonzero = nonzero_bits (XEXP (x, 0), mode);
6631 if ((((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0)
6632 return (nonzero == 1 || nonzero == 0 ? bitwidth
6633 : bitwidth - floor_log2 (nonzero) - 1);
d0ab8cd3
RK
6634 }
6635
6636 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
6637 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
6638 return MAX (1, MIN (num0, num1) - 1);
6639
6640 case MULT:
6641 /* The number of bits of the product is the sum of the number of
6642 bits of both terms. However, unless one of the terms if known
6643 to be positive, we must allow for an additional bit since negating
6644 a negative number can remove one sign bit copy. */
6645
6646 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
6647 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
6648
6649 result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
6650 if (result > 0
9295e6af 6651 && bitwidth <= HOST_BITS_PER_WIDE_INT
951553af 6652 && ((nonzero_bits (XEXP (x, 0), mode)
d0ab8cd3 6653 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
951553af 6654 && (nonzero_bits (XEXP (x, 1), mode)
d0ab8cd3
RK
6655 & ((HOST_WIDE_INT) 1 << (bitwidth - 1)) != 0))
6656 result--;
6657
6658 return MAX (1, result);
6659
6660 case UDIV:
6661 /* The result must be <= the first operand. */
6662 return num_sign_bit_copies (XEXP (x, 0), mode);
6663
6664 case UMOD:
6665 /* The result must be <= the scond operand. */
6666 return num_sign_bit_copies (XEXP (x, 1), mode);
6667
6668 case DIV:
6669 /* Similar to unsigned division, except that we have to worry about
6670 the case where the divisor is negative, in which case we have
6671 to add 1. */
6672 result = num_sign_bit_copies (XEXP (x, 0), mode);
6673 if (result > 1
ac49a949 6674 && bitwidth <= HOST_BITS_PER_WIDE_INT
951553af 6675 && (nonzero_bits (XEXP (x, 1), mode)
d0ab8cd3
RK
6676 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
6677 result --;
6678
6679 return result;
6680
6681 case MOD:
6682 result = num_sign_bit_copies (XEXP (x, 1), mode);
6683 if (result > 1
ac49a949 6684 && bitwidth <= HOST_BITS_PER_WIDE_INT
951553af 6685 && (nonzero_bits (XEXP (x, 1), mode)
d0ab8cd3
RK
6686 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
6687 result --;
6688
6689 return result;
6690
6691 case ASHIFTRT:
6692 /* Shifts by a constant add to the number of bits equal to the
6693 sign bit. */
6694 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
6695 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6696 && INTVAL (XEXP (x, 1)) > 0)
6697 num0 = MIN (bitwidth, num0 + INTVAL (XEXP (x, 1)));
6698
6699 return num0;
6700
6701 case ASHIFT:
6702 case LSHIFT:
6703 /* Left shifts destroy copies. */
6704 if (GET_CODE (XEXP (x, 1)) != CONST_INT
6705 || INTVAL (XEXP (x, 1)) < 0
6706 || INTVAL (XEXP (x, 1)) >= bitwidth)
6707 return 1;
6708
6709 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
6710 return MAX (1, num0 - INTVAL (XEXP (x, 1)));
6711
6712 case IF_THEN_ELSE:
6713 num0 = num_sign_bit_copies (XEXP (x, 1), mode);
6714 num1 = num_sign_bit_copies (XEXP (x, 2), mode);
6715 return MIN (num0, num1);
6716
6717#if STORE_FLAG_VALUE == -1
6718 case EQ: case NE: case GE: case GT: case LE: case LT:
6719 case GEU: case GTU: case LEU: case LTU:
6720 return bitwidth;
6721#endif
6722 }
6723
6724 /* If we haven't been able to figure it out by one of the above rules,
6725 see if some of the high-order bits are known to be zero. If so,
ac49a949
RS
6726 count those bits and return one less than that amount. If we can't
6727 safely compute the mask for this mode, always return BITWIDTH. */
6728
6729 if (bitwidth > HOST_BITS_PER_WIDE_INT)
6752e8d2 6730 return 1;
d0ab8cd3 6731
951553af
RK
6732 nonzero = nonzero_bits (x, mode);
6733 return (nonzero == GET_MODE_MASK (mode)
6734 ? 1 : bitwidth - floor_log2 (nonzero) - 1);
d0ab8cd3
RK
6735}
6736\f
1a26b032
RK
6737/* Return the number of "extended" bits there are in X, when interpreted
6738 as a quantity in MODE whose signedness is indicated by UNSIGNEDP. For
6739 unsigned quantities, this is the number of high-order zero bits.
6740 For signed quantities, this is the number of copies of the sign bit
6741 minus 1. In both case, this function returns the number of "spare"
6742 bits. For example, if two quantities for which this function returns
6743 at least 1 are added, the addition is known not to overflow.
6744
6745 This function will always return 0 unless called during combine, which
6746 implies that it must be called from a define_split. */
6747
6748int
6749extended_count (x, mode, unsignedp)
6750 rtx x;
6751 enum machine_mode mode;
6752 int unsignedp;
6753{
951553af 6754 if (nonzero_sign_valid == 0)
1a26b032
RK
6755 return 0;
6756
6757 return (unsignedp
ac49a949
RS
6758 ? (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
6759 && (GET_MODE_BITSIZE (mode) - 1
951553af 6760 - floor_log2 (nonzero_bits (x, mode))))
1a26b032
RK
6761 : num_sign_bit_copies (x, mode) - 1);
6762}
6763\f
230d793d
RS
6764/* This function is called from `simplify_shift_const' to merge two
6765 outer operations. Specifically, we have already found that we need
6766 to perform operation *POP0 with constant *PCONST0 at the outermost
6767 position. We would now like to also perform OP1 with constant CONST1
6768 (with *POP0 being done last).
6769
6770 Return 1 if we can do the operation and update *POP0 and *PCONST0 with
6771 the resulting operation. *PCOMP_P is set to 1 if we would need to
6772 complement the innermost operand, otherwise it is unchanged.
6773
6774 MODE is the mode in which the operation will be done. No bits outside
6775 the width of this mode matter. It is assumed that the width of this mode
5f4f0e22 6776 is smaller than or equal to HOST_BITS_PER_WIDE_INT.
230d793d
RS
6777
6778 If *POP0 or OP1 are NIL, it means no operation is required. Only NEG, PLUS,
6779 IOR, XOR, and AND are supported. We may set *POP0 to SET if the proper
6780 result is simply *PCONST0.
6781
6782 If the resulting operation cannot be expressed as one operation, we
6783 return 0 and do not change *POP0, *PCONST0, and *PCOMP_P. */
6784
6785static int
6786merge_outer_ops (pop0, pconst0, op1, const1, mode, pcomp_p)
6787 enum rtx_code *pop0;
5f4f0e22 6788 HOST_WIDE_INT *pconst0;
230d793d 6789 enum rtx_code op1;
5f4f0e22 6790 HOST_WIDE_INT const1;
230d793d
RS
6791 enum machine_mode mode;
6792 int *pcomp_p;
6793{
6794 enum rtx_code op0 = *pop0;
5f4f0e22 6795 HOST_WIDE_INT const0 = *pconst0;
230d793d
RS
6796
6797 const0 &= GET_MODE_MASK (mode);
6798 const1 &= GET_MODE_MASK (mode);
6799
6800 /* If OP0 is an AND, clear unimportant bits in CONST1. */
6801 if (op0 == AND)
6802 const1 &= const0;
6803
6804 /* If OP0 or OP1 is NIL, this is easy. Similarly if they are the same or
6805 if OP0 is SET. */
6806
6807 if (op1 == NIL || op0 == SET)
6808 return 1;
6809
6810 else if (op0 == NIL)
6811 op0 = op1, const0 = const1;
6812
6813 else if (op0 == op1)
6814 {
6815 switch (op0)
6816 {
6817 case AND:
6818 const0 &= const1;
6819 break;
6820 case IOR:
6821 const0 |= const1;
6822 break;
6823 case XOR:
6824 const0 ^= const1;
6825 break;
6826 case PLUS:
6827 const0 += const1;
6828 break;
6829 case NEG:
6830 op0 = NIL;
6831 break;
6832 }
6833 }
6834
6835 /* Otherwise, if either is a PLUS or NEG, we can't do anything. */
6836 else if (op0 == PLUS || op1 == PLUS || op0 == NEG || op1 == NEG)
6837 return 0;
6838
6839 /* If the two constants aren't the same, we can't do anything. The
6840 remaining six cases can all be done. */
6841 else if (const0 != const1)
6842 return 0;
6843
6844 else
6845 switch (op0)
6846 {
6847 case IOR:
6848 if (op1 == AND)
6849 /* (a & b) | b == b */
6850 op0 = SET;
6851 else /* op1 == XOR */
6852 /* (a ^ b) | b == a | b */
6853 ;
6854 break;
6855
6856 case XOR:
6857 if (op1 == AND)
6858 /* (a & b) ^ b == (~a) & b */
6859 op0 = AND, *pcomp_p = 1;
6860 else /* op1 == IOR */
6861 /* (a | b) ^ b == a & ~b */
6862 op0 = AND, *pconst0 = ~ const0;
6863 break;
6864
6865 case AND:
6866 if (op1 == IOR)
6867 /* (a | b) & b == b */
6868 op0 = SET;
6869 else /* op1 == XOR */
6870 /* (a ^ b) & b) == (~a) & b */
6871 *pcomp_p = 1;
6872 break;
6873 }
6874
6875 /* Check for NO-OP cases. */
6876 const0 &= GET_MODE_MASK (mode);
6877 if (const0 == 0
6878 && (op0 == IOR || op0 == XOR || op0 == PLUS))
6879 op0 = NIL;
6880 else if (const0 == 0 && op0 == AND)
6881 op0 = SET;
6882 else if (const0 == GET_MODE_MASK (mode) && op0 == AND)
6883 op0 = NIL;
6884
6885 *pop0 = op0;
6886 *pconst0 = const0;
6887
6888 return 1;
6889}
6890\f
6891/* Simplify a shift of VAROP by COUNT bits. CODE says what kind of shift.
6892 The result of the shift is RESULT_MODE. X, if non-zero, is an expression
6893 that we started with.
6894
6895 The shift is normally computed in the widest mode we find in VAROP, as
6896 long as it isn't a different number of words than RESULT_MODE. Exceptions
6897 are ASHIFTRT and ROTATE, which are always done in their original mode, */
6898
6899static rtx
6900simplify_shift_const (x, code, result_mode, varop, count)
6901 rtx x;
6902 enum rtx_code code;
6903 enum machine_mode result_mode;
6904 rtx varop;
6905 int count;
6906{
6907 enum rtx_code orig_code = code;
6908 int orig_count = count;
6909 enum machine_mode mode = result_mode;
6910 enum machine_mode shift_mode, tmode;
6911 int mode_words
6912 = (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
6913 /* We form (outer_op (code varop count) (outer_const)). */
6914 enum rtx_code outer_op = NIL;
5f4f0e22 6915 HOST_WIDE_INT outer_const;
230d793d
RS
6916 rtx const_rtx;
6917 int complement_p = 0;
6918 rtx new;
6919
6920 /* If we were given an invalid count, don't do anything except exactly
6921 what was requested. */
6922
6923 if (count < 0 || count > GET_MODE_BITSIZE (mode))
6924 {
6925 if (x)
6926 return x;
6927
5f4f0e22 6928 return gen_rtx (code, mode, varop, GEN_INT (count));
230d793d
RS
6929 }
6930
6931 /* Unless one of the branches of the `if' in this loop does a `continue',
6932 we will `break' the loop after the `if'. */
6933
6934 while (count != 0)
6935 {
6936 /* If we have an operand of (clobber (const_int 0)), just return that
6937 value. */
6938 if (GET_CODE (varop) == CLOBBER)
6939 return varop;
6940
6941 /* If we discovered we had to complement VAROP, leave. Making a NOT
6942 here would cause an infinite loop. */
6943 if (complement_p)
6944 break;
6945
6946 /* Convert ROTATETRT to ROTATE. */
6947 if (code == ROTATERT)
6948 code = ROTATE, count = GET_MODE_BITSIZE (result_mode) - count;
6949
6950 /* Canonicalize LSHIFT to ASHIFT. */
6951 if (code == LSHIFT)
6952 code = ASHIFT;
6953
6954 /* We need to determine what mode we will do the shift in. If the
6955 shift is a ASHIFTRT or ROTATE, we must always do it in the mode it
6956 was originally done in. Otherwise, we can do it in MODE, the widest
6957 mode encountered. */
6958 shift_mode = (code == ASHIFTRT || code == ROTATE ? result_mode : mode);
6959
6960 /* Handle cases where the count is greater than the size of the mode
6961 minus 1. For ASHIFT, use the size minus one as the count (this can
6962 occur when simplifying (lshiftrt (ashiftrt ..))). For rotates,
6963 take the count modulo the size. For other shifts, the result is
6964 zero.
6965
6966 Since these shifts are being produced by the compiler by combining
6967 multiple operations, each of which are defined, we know what the
6968 result is supposed to be. */
6969
6970 if (count > GET_MODE_BITSIZE (shift_mode) - 1)
6971 {
6972 if (code == ASHIFTRT)
6973 count = GET_MODE_BITSIZE (shift_mode) - 1;
6974 else if (code == ROTATE || code == ROTATERT)
6975 count %= GET_MODE_BITSIZE (shift_mode);
6976 else
6977 {
6978 /* We can't simply return zero because there may be an
6979 outer op. */
6980 varop = const0_rtx;
6981 count = 0;
6982 break;
6983 }
6984 }
6985
6986 /* Negative counts are invalid and should not have been made (a
6987 programmer-specified negative count should have been handled
6988 above). */
6989 else if (count < 0)
6990 abort ();
6991
d0ab8cd3
RK
6992 /* An arithmetic right shift of a quantity known to be -1 or 0
6993 is a no-op. */
6994 if (code == ASHIFTRT
6995 && (num_sign_bit_copies (varop, shift_mode)
6996 == GET_MODE_BITSIZE (shift_mode)))
6997 {
6998 count = 0;
6999 break;
7000 }
7001
500c518b
RK
7002 /* If we are doing an arithmetic right shift and discarding all but
7003 the sign bit copies, this is equivalent to doing a shift by the
7004 bitsize minus one. Convert it into that shift because it will often
7005 allow other simplifications. */
7006
7007 if (code == ASHIFTRT
7008 && (count + num_sign_bit_copies (varop, shift_mode)
7009 >= GET_MODE_BITSIZE (shift_mode)))
7010 count = GET_MODE_BITSIZE (shift_mode) - 1;
7011
230d793d
RS
7012 /* We simplify the tests below and elsewhere by converting
7013 ASHIFTRT to LSHIFTRT if we know the sign bit is clear.
7014 `make_compound_operation' will convert it to a ASHIFTRT for
7015 those machines (such as Vax) that don't have a LSHIFTRT. */
5f4f0e22 7016 if (GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT
230d793d 7017 && code == ASHIFTRT
951553af 7018 && ((nonzero_bits (varop, shift_mode)
5f4f0e22
CH
7019 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (shift_mode) - 1)))
7020 == 0))
230d793d
RS
7021 code = LSHIFTRT;
7022
7023 switch (GET_CODE (varop))
7024 {
7025 case SIGN_EXTEND:
7026 case ZERO_EXTEND:
7027 case SIGN_EXTRACT:
7028 case ZERO_EXTRACT:
7029 new = expand_compound_operation (varop);
7030 if (new != varop)
7031 {
7032 varop = new;
7033 continue;
7034 }
7035 break;
7036
7037 case MEM:
7038 /* If we have (xshiftrt (mem ...) C) and C is MODE_WIDTH
7039 minus the width of a smaller mode, we can do this with a
7040 SIGN_EXTEND or ZERO_EXTEND from the narrower memory location. */
7041 if ((code == ASHIFTRT || code == LSHIFTRT)
7042 && ! mode_dependent_address_p (XEXP (varop, 0))
7043 && ! MEM_VOLATILE_P (varop)
7044 && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
7045 MODE_INT, 1)) != BLKmode)
7046 {
7047#if BYTES_BIG_ENDIAN
7048 new = gen_rtx (MEM, tmode, XEXP (varop, 0));
7049#else
7050 new = gen_rtx (MEM, tmode,
7051 plus_constant (XEXP (varop, 0),
7052 count / BITS_PER_UNIT));
7053 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (varop);
7054 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (varop);
7055 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (varop);
7056#endif
7057 varop = gen_rtx_combine (code == ASHIFTRT ? SIGN_EXTEND
7058 : ZERO_EXTEND, mode, new);
7059 count = 0;
7060 continue;
7061 }
7062 break;
7063
7064 case USE:
7065 /* Similar to the case above, except that we can only do this if
7066 the resulting mode is the same as that of the underlying
7067 MEM and adjust the address depending on the *bits* endianness
7068 because of the way that bit-field extract insns are defined. */
7069 if ((code == ASHIFTRT || code == LSHIFTRT)
7070 && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
7071 MODE_INT, 1)) != BLKmode
7072 && tmode == GET_MODE (XEXP (varop, 0)))
7073 {
7074#if BITS_BIG_ENDIAN
7075 new = XEXP (varop, 0);
7076#else
7077 new = copy_rtx (XEXP (varop, 0));
7078 SUBST (XEXP (new, 0),
7079 plus_constant (XEXP (new, 0),
7080 count / BITS_PER_UNIT));
7081#endif
7082
7083 varop = gen_rtx_combine (code == ASHIFTRT ? SIGN_EXTEND
7084 : ZERO_EXTEND, mode, new);
7085 count = 0;
7086 continue;
7087 }
7088 break;
7089
7090 case SUBREG:
7091 /* If VAROP is a SUBREG, strip it as long as the inner operand has
7092 the same number of words as what we've seen so far. Then store
7093 the widest mode in MODE. */
f9e67232
RS
7094 if (subreg_lowpart_p (varop)
7095 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
7096 > GET_MODE_SIZE (GET_MODE (varop)))
230d793d
RS
7097 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
7098 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
7099 == mode_words))
7100 {
7101 varop = SUBREG_REG (varop);
7102 if (GET_MODE_SIZE (GET_MODE (varop)) > GET_MODE_SIZE (mode))
7103 mode = GET_MODE (varop);
7104 continue;
7105 }
7106 break;
7107
7108 case MULT:
7109 /* Some machines use MULT instead of ASHIFT because MULT
7110 is cheaper. But it is still better on those machines to
7111 merge two shifts into one. */
7112 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
7113 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
7114 {
7115 varop = gen_binary (ASHIFT, GET_MODE (varop), XEXP (varop, 0),
5f4f0e22 7116 GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));;
230d793d
RS
7117 continue;
7118 }
7119 break;
7120
7121 case UDIV:
7122 /* Similar, for when divides are cheaper. */
7123 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
7124 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
7125 {
7126 varop = gen_binary (LSHIFTRT, GET_MODE (varop), XEXP (varop, 0),
5f4f0e22 7127 GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));
230d793d
RS
7128 continue;
7129 }
7130 break;
7131
7132 case ASHIFTRT:
7133 /* If we are extracting just the sign bit of an arithmetic right
7134 shift, that shift is not needed. */
7135 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1)
7136 {
7137 varop = XEXP (varop, 0);
7138 continue;
7139 }
7140
7141 /* ... fall through ... */
7142
7143 case LSHIFTRT:
7144 case ASHIFT:
7145 case LSHIFT:
7146 case ROTATE:
7147 /* Here we have two nested shifts. The result is usually the
7148 AND of a new shift with a mask. We compute the result below. */
7149 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
7150 && INTVAL (XEXP (varop, 1)) >= 0
7151 && INTVAL (XEXP (varop, 1)) < GET_MODE_BITSIZE (GET_MODE (varop))
5f4f0e22
CH
7152 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
7153 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
230d793d
RS
7154 {
7155 enum rtx_code first_code = GET_CODE (varop);
7156 int first_count = INTVAL (XEXP (varop, 1));
5f4f0e22 7157 unsigned HOST_WIDE_INT mask;
230d793d
RS
7158 rtx mask_rtx;
7159 rtx inner;
7160
7161 if (first_code == LSHIFT)
7162 first_code = ASHIFT;
7163
7164 /* We have one common special case. We can't do any merging if
7165 the inner code is an ASHIFTRT of a smaller mode. However, if
7166 we have (ashift:M1 (subreg:M1 (ashiftrt:M2 FOO C1) 0) C2)
7167 with C2 == GET_MODE_BITSIZE (M1) - GET_MODE_BITSIZE (M2),
7168 we can convert it to
7169 (ashiftrt:M1 (ashift:M1 (and:M1 (subreg:M1 FOO 0 C2) C3) C1).
7170 This simplifies certain SIGN_EXTEND operations. */
7171 if (code == ASHIFT && first_code == ASHIFTRT
7172 && (GET_MODE_BITSIZE (result_mode)
7173 - GET_MODE_BITSIZE (GET_MODE (varop))) == count)
7174 {
7175 /* C3 has the low-order C1 bits zero. */
7176
5f4f0e22
CH
7177 mask = (GET_MODE_MASK (mode)
7178 & ~ (((HOST_WIDE_INT) 1 << first_count) - 1));
230d793d 7179
5f4f0e22 7180 varop = simplify_and_const_int (NULL_RTX, result_mode,
230d793d 7181 XEXP (varop, 0), mask);
5f4f0e22 7182 varop = simplify_shift_const (NULL_RTX, ASHIFT, result_mode,
230d793d
RS
7183 varop, count);
7184 count = first_count;
7185 code = ASHIFTRT;
7186 continue;
7187 }
7188
d0ab8cd3
RK
7189 /* If this was (ashiftrt (ashift foo C1) C2) and FOO has more
7190 than C1 high-order bits equal to the sign bit, we can convert
7191 this to either an ASHIFT or a ASHIFTRT depending on the
7192 two counts.
230d793d
RS
7193
7194 We cannot do this if VAROP's mode is not SHIFT_MODE. */
7195
7196 if (code == ASHIFTRT && first_code == ASHIFT
7197 && GET_MODE (varop) == shift_mode
d0ab8cd3
RK
7198 && (num_sign_bit_copies (XEXP (varop, 0), shift_mode)
7199 > first_count))
230d793d 7200 {
d0ab8cd3
RK
7201 count -= first_count;
7202 if (count < 0)
7203 count = - count, code = ASHIFT;
7204 varop = XEXP (varop, 0);
7205 continue;
230d793d
RS
7206 }
7207
7208 /* There are some cases we can't do. If CODE is ASHIFTRT,
7209 we can only do this if FIRST_CODE is also ASHIFTRT.
7210
7211 We can't do the case when CODE is ROTATE and FIRST_CODE is
7212 ASHIFTRT.
7213
7214 If the mode of this shift is not the mode of the outer shift,
7215 we can't do this if either shift is ASHIFTRT or ROTATE.
7216
7217 Finally, we can't do any of these if the mode is too wide
7218 unless the codes are the same.
7219
7220 Handle the case where the shift codes are the same
7221 first. */
7222
7223 if (code == first_code)
7224 {
7225 if (GET_MODE (varop) != result_mode
7226 && (code == ASHIFTRT || code == ROTATE))
7227 break;
7228
7229 count += first_count;
7230 varop = XEXP (varop, 0);
7231 continue;
7232 }
7233
7234 if (code == ASHIFTRT
7235 || (code == ROTATE && first_code == ASHIFTRT)
5f4f0e22 7236 || GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT
230d793d
RS
7237 || (GET_MODE (varop) != result_mode
7238 && (first_code == ASHIFTRT || first_code == ROTATE
7239 || code == ROTATE)))
7240 break;
7241
7242 /* To compute the mask to apply after the shift, shift the
951553af 7243 nonzero bits of the inner shift the same way the
230d793d
RS
7244 outer shift will. */
7245
951553af 7246 mask_rtx = GEN_INT (nonzero_bits (varop, GET_MODE (varop)));
230d793d
RS
7247
7248 mask_rtx
7249 = simplify_binary_operation (code, result_mode, mask_rtx,
5f4f0e22 7250 GEN_INT (count));
230d793d
RS
7251
7252 /* Give up if we can't compute an outer operation to use. */
7253 if (mask_rtx == 0
7254 || GET_CODE (mask_rtx) != CONST_INT
7255 || ! merge_outer_ops (&outer_op, &outer_const, AND,
7256 INTVAL (mask_rtx),
7257 result_mode, &complement_p))
7258 break;
7259
7260 /* If the shifts are in the same direction, we add the
7261 counts. Otherwise, we subtract them. */
7262 if ((code == ASHIFTRT || code == LSHIFTRT)
7263 == (first_code == ASHIFTRT || first_code == LSHIFTRT))
7264 count += first_count;
7265 else
7266 count -= first_count;
7267
7268 /* If COUNT is positive, the new shift is usually CODE,
7269 except for the two exceptions below, in which case it is
7270 FIRST_CODE. If the count is negative, FIRST_CODE should
7271 always be used */
7272 if (count > 0
7273 && ((first_code == ROTATE && code == ASHIFT)
7274 || (first_code == ASHIFTRT && code == LSHIFTRT)))
7275 code = first_code;
7276 else if (count < 0)
7277 code = first_code, count = - count;
7278
7279 varop = XEXP (varop, 0);
7280 continue;
7281 }
7282
7283 /* If we have (A << B << C) for any shift, we can convert this to
7284 (A << C << B). This wins if A is a constant. Only try this if
7285 B is not a constant. */
7286
7287 else if (GET_CODE (varop) == code
7288 && GET_CODE (XEXP (varop, 1)) != CONST_INT
7289 && 0 != (new
7290 = simplify_binary_operation (code, mode,
7291 XEXP (varop, 0),
5f4f0e22 7292 GEN_INT (count))))
230d793d
RS
7293 {
7294 varop = gen_rtx_combine (code, mode, new, XEXP (varop, 1));
7295 count = 0;
7296 continue;
7297 }
7298 break;
7299
7300 case NOT:
7301 /* Make this fit the case below. */
7302 varop = gen_rtx_combine (XOR, mode, XEXP (varop, 0),
5f4f0e22 7303 GEN_INT (GET_MODE_MASK (mode)));
230d793d
RS
7304 continue;
7305
7306 case IOR:
7307 case AND:
7308 case XOR:
7309 /* If we have (xshiftrt (ior (plus X (const_int -1)) X) C)
7310 with C the size of VAROP - 1 and the shift is logical if
7311 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
7312 we have an (le X 0) operation. If we have an arithmetic shift
7313 and STORE_FLAG_VALUE is 1 or we have a logical shift with
7314 STORE_FLAG_VALUE of -1, we have a (neg (le X 0)) operation. */
7315
7316 if (GET_CODE (varop) == IOR && GET_CODE (XEXP (varop, 0)) == PLUS
7317 && XEXP (XEXP (varop, 0), 1) == constm1_rtx
7318 && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
7319 && (code == LSHIFTRT || code == ASHIFTRT)
7320 && count == GET_MODE_BITSIZE (GET_MODE (varop)) - 1
7321 && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
7322 {
7323 count = 0;
7324 varop = gen_rtx_combine (LE, GET_MODE (varop), XEXP (varop, 1),
7325 const0_rtx);
7326
7327 if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
7328 varop = gen_rtx_combine (NEG, GET_MODE (varop), varop);
7329
7330 continue;
7331 }
7332
7333 /* If we have (shift (logical)), move the logical to the outside
7334 to allow it to possibly combine with another logical and the
7335 shift to combine with another shift. This also canonicalizes to
7336 what a ZERO_EXTRACT looks like. Also, some machines have
7337 (and (shift)) insns. */
7338
7339 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
7340 && (new = simplify_binary_operation (code, result_mode,
7341 XEXP (varop, 1),
5f4f0e22 7342 GEN_INT (count))) != 0
230d793d
RS
7343 && merge_outer_ops (&outer_op, &outer_const, GET_CODE (varop),
7344 INTVAL (new), result_mode, &complement_p))
7345 {
7346 varop = XEXP (varop, 0);
7347 continue;
7348 }
7349
7350 /* If we can't do that, try to simplify the shift in each arm of the
7351 logical expression, make a new logical expression, and apply
7352 the inverse distributive law. */
7353 {
5f4f0e22 7354 rtx lhs = simplify_shift_const (NULL_RTX, code, result_mode,
230d793d 7355 XEXP (varop, 0), count);
5f4f0e22 7356 rtx rhs = simplify_shift_const (NULL_RTX, code, result_mode,
230d793d
RS
7357 XEXP (varop, 1), count);
7358
7359 varop = gen_binary (GET_CODE (varop), result_mode, lhs, rhs);
7360 varop = apply_distributive_law (varop);
7361
7362 count = 0;
7363 }
7364 break;
7365
7366 case EQ:
7367 /* convert (lshift (eq FOO 0) C) to (xor FOO 1) if STORE_FLAG_VALUE
7368 says that the sign bit can be tested, FOO has mode MODE, C is
7369 GET_MODE_BITSIZE (MODE) - 1, and FOO has only the low-order bit
951553af 7370 may be nonzero. */
230d793d
RS
7371 if (code == LSHIFT
7372 && XEXP (varop, 1) == const0_rtx
7373 && GET_MODE (XEXP (varop, 0)) == result_mode
7374 && count == GET_MODE_BITSIZE (result_mode) - 1
5f4f0e22 7375 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
230d793d 7376 && ((STORE_FLAG_VALUE
5f4f0e22 7377 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (result_mode) - 1))))
951553af 7378 && nonzero_bits (XEXP (varop, 0), result_mode) == 1
5f4f0e22
CH
7379 && merge_outer_ops (&outer_op, &outer_const, XOR,
7380 (HOST_WIDE_INT) 1, result_mode,
7381 &complement_p))
230d793d
RS
7382 {
7383 varop = XEXP (varop, 0);
7384 count = 0;
7385 continue;
7386 }
7387 break;
7388
7389 case NEG:
d0ab8cd3
RK
7390 /* (lshiftrt (neg A) C) where A is either 0 or 1 and C is one less
7391 than the number of bits in the mode is equivalent to A. */
7392 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1
951553af 7393 && nonzero_bits (XEXP (varop, 0), result_mode) == 1)
230d793d 7394 {
d0ab8cd3 7395 varop = XEXP (varop, 0);
230d793d
RS
7396 count = 0;
7397 continue;
7398 }
7399
7400 /* NEG commutes with ASHIFT since it is multiplication. Move the
7401 NEG outside to allow shifts to combine. */
7402 if (code == ASHIFT
5f4f0e22
CH
7403 && merge_outer_ops (&outer_op, &outer_const, NEG,
7404 (HOST_WIDE_INT) 0, result_mode,
7405 &complement_p))
230d793d
RS
7406 {
7407 varop = XEXP (varop, 0);
7408 continue;
7409 }
7410 break;
7411
7412 case PLUS:
d0ab8cd3
RK
7413 /* (lshiftrt (plus A -1) C) where A is either 0 or 1 and C
7414 is one less than the number of bits in the mode is
7415 equivalent to (xor A 1). */
230d793d
RS
7416 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1
7417 && XEXP (varop, 1) == constm1_rtx
951553af 7418 && nonzero_bits (XEXP (varop, 0), result_mode) == 1
5f4f0e22
CH
7419 && merge_outer_ops (&outer_op, &outer_const, XOR,
7420 (HOST_WIDE_INT) 1, result_mode,
7421 &complement_p))
230d793d
RS
7422 {
7423 count = 0;
7424 varop = XEXP (varop, 0);
7425 continue;
7426 }
7427
3f508eca 7428 /* If we have (xshiftrt (plus FOO BAR) C), and the only bits
951553af 7429 that might be nonzero in BAR are those being shifted out and those
3f508eca
RK
7430 bits are known zero in FOO, we can replace the PLUS with FOO.
7431 Similarly in the other operand order. This code occurs when
7432 we are computing the size of a variable-size array. */
7433
7434 if ((code == ASHIFTRT || code == LSHIFTRT)
5f4f0e22 7435 && count < HOST_BITS_PER_WIDE_INT
951553af
RK
7436 && nonzero_bits (XEXP (varop, 1), result_mode) >> count == 0
7437 && (nonzero_bits (XEXP (varop, 1), result_mode)
7438 & nonzero_bits (XEXP (varop, 0), result_mode)) == 0)
3f508eca
RK
7439 {
7440 varop = XEXP (varop, 0);
7441 continue;
7442 }
7443 else if ((code == ASHIFTRT || code == LSHIFTRT)
5f4f0e22 7444 && count < HOST_BITS_PER_WIDE_INT
ac49a949 7445 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
951553af 7446 && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
3f508eca 7447 >> count)
951553af
RK
7448 && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
7449 & nonzero_bits (XEXP (varop, 1),
3f508eca
RK
7450 result_mode)))
7451 {
7452 varop = XEXP (varop, 1);
7453 continue;
7454 }
7455
230d793d
RS
7456 /* (ashift (plus foo C) N) is (plus (ashift foo N) C'). */
7457 if (code == ASHIFT
7458 && GET_CODE (XEXP (varop, 1)) == CONST_INT
7459 && (new = simplify_binary_operation (ASHIFT, result_mode,
7460 XEXP (varop, 1),
5f4f0e22 7461 GEN_INT (count))) != 0
230d793d
RS
7462 && merge_outer_ops (&outer_op, &outer_const, PLUS,
7463 INTVAL (new), result_mode, &complement_p))
7464 {
7465 varop = XEXP (varop, 0);
7466 continue;
7467 }
7468 break;
7469
7470 case MINUS:
7471 /* If we have (xshiftrt (minus (ashiftrt X C)) X) C)
7472 with C the size of VAROP - 1 and the shift is logical if
7473 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
7474 we have a (gt X 0) operation. If the shift is arithmetic with
7475 STORE_FLAG_VALUE of 1 or logical with STORE_FLAG_VALUE == -1,
7476 we have a (neg (gt X 0)) operation. */
7477
7478 if (GET_CODE (XEXP (varop, 0)) == ASHIFTRT
7479 && count == GET_MODE_BITSIZE (GET_MODE (varop)) - 1
7480 && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
7481 && (code == LSHIFTRT || code == ASHIFTRT)
7482 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
7483 && INTVAL (XEXP (XEXP (varop, 0), 1)) == count
7484 && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
7485 {
7486 count = 0;
7487 varop = gen_rtx_combine (GT, GET_MODE (varop), XEXP (varop, 1),
7488 const0_rtx);
7489
7490 if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
7491 varop = gen_rtx_combine (NEG, GET_MODE (varop), varop);
7492
7493 continue;
7494 }
7495 break;
7496 }
7497
7498 break;
7499 }
7500
7501 /* We need to determine what mode to do the shift in. If the shift is
7502 a ASHIFTRT or ROTATE, we must always do it in the mode it was originally
7503 done in. Otherwise, we can do it in MODE, the widest mode encountered.
7504 The code we care about is that of the shift that will actually be done,
7505 not the shift that was originally requested. */
7506 shift_mode = (code == ASHIFTRT || code == ROTATE ? result_mode : mode);
7507
7508 /* We have now finished analyzing the shift. The result should be
7509 a shift of type CODE with SHIFT_MODE shifting VAROP COUNT places. If
7510 OUTER_OP is non-NIL, it is an operation that needs to be applied
7511 to the result of the shift. OUTER_CONST is the relevant constant,
7512 but we must turn off all bits turned off in the shift.
7513
7514 If we were passed a value for X, see if we can use any pieces of
7515 it. If not, make new rtx. */
7516
7517 if (x && GET_RTX_CLASS (GET_CODE (x)) == '2'
7518 && GET_CODE (XEXP (x, 1)) == CONST_INT
7519 && INTVAL (XEXP (x, 1)) == count)
7520 const_rtx = XEXP (x, 1);
7521 else
5f4f0e22 7522 const_rtx = GEN_INT (count);
230d793d
RS
7523
7524 if (x && GET_CODE (XEXP (x, 0)) == SUBREG
7525 && GET_MODE (XEXP (x, 0)) == shift_mode
7526 && SUBREG_REG (XEXP (x, 0)) == varop)
7527 varop = XEXP (x, 0);
7528 else if (GET_MODE (varop) != shift_mode)
7529 varop = gen_lowpart_for_combine (shift_mode, varop);
7530
7531 /* If we can't make the SUBREG, try to return what we were given. */
7532 if (GET_CODE (varop) == CLOBBER)
7533 return x ? x : varop;
7534
7535 new = simplify_binary_operation (code, shift_mode, varop, const_rtx);
7536 if (new != 0)
7537 x = new;
7538 else
7539 {
7540 if (x == 0 || GET_CODE (x) != code || GET_MODE (x) != shift_mode)
7541 x = gen_rtx_combine (code, shift_mode, varop, const_rtx);
7542
7543 SUBST (XEXP (x, 0), varop);
7544 SUBST (XEXP (x, 1), const_rtx);
7545 }
7546
7547 /* If we were doing a LSHIFTRT in a wider mode than it was originally,
7548 turn off all the bits that the shift would have turned off. */
7549 if (orig_code == LSHIFTRT && result_mode != shift_mode)
5f4f0e22 7550 x = simplify_and_const_int (NULL_RTX, shift_mode, x,
230d793d
RS
7551 GET_MODE_MASK (result_mode) >> orig_count);
7552
7553 /* Do the remainder of the processing in RESULT_MODE. */
7554 x = gen_lowpart_for_combine (result_mode, x);
7555
7556 /* If COMPLEMENT_P is set, we have to complement X before doing the outer
7557 operation. */
7558 if (complement_p)
7559 x = gen_unary (NOT, result_mode, x);
7560
7561 if (outer_op != NIL)
7562 {
5f4f0e22 7563 if (GET_MODE_BITSIZE (result_mode) < HOST_BITS_PER_WIDE_INT)
230d793d
RS
7564 outer_const &= GET_MODE_MASK (result_mode);
7565
7566 if (outer_op == AND)
5f4f0e22 7567 x = simplify_and_const_int (NULL_RTX, result_mode, x, outer_const);
230d793d
RS
7568 else if (outer_op == SET)
7569 /* This means that we have determined that the result is
7570 equivalent to a constant. This should be rare. */
5f4f0e22 7571 x = GEN_INT (outer_const);
230d793d
RS
7572 else if (GET_RTX_CLASS (outer_op) == '1')
7573 x = gen_unary (outer_op, result_mode, x);
7574 else
5f4f0e22 7575 x = gen_binary (outer_op, result_mode, x, GEN_INT (outer_const));
230d793d
RS
7576 }
7577
7578 return x;
7579}
7580\f
7581/* Like recog, but we receive the address of a pointer to a new pattern.
7582 We try to match the rtx that the pointer points to.
7583 If that fails, we may try to modify or replace the pattern,
7584 storing the replacement into the same pointer object.
7585
7586 Modifications include deletion or addition of CLOBBERs.
7587
7588 PNOTES is a pointer to a location where any REG_UNUSED notes added for
7589 the CLOBBERs are placed.
7590
7591 The value is the final insn code from the pattern ultimately matched,
7592 or -1. */
7593
7594static int
7595recog_for_combine (pnewpat, insn, pnotes)
7596 rtx *pnewpat;
7597 rtx insn;
7598 rtx *pnotes;
7599{
7600 register rtx pat = *pnewpat;
7601 int insn_code_number;
7602 int num_clobbers_to_add = 0;
7603 int i;
7604 rtx notes = 0;
7605
7606 /* Is the result of combination a valid instruction? */
7607 insn_code_number = recog (pat, insn, &num_clobbers_to_add);
7608
7609 /* If it isn't, there is the possibility that we previously had an insn
7610 that clobbered some register as a side effect, but the combined
7611 insn doesn't need to do that. So try once more without the clobbers
7612 unless this represents an ASM insn. */
7613
7614 if (insn_code_number < 0 && ! check_asm_operands (pat)
7615 && GET_CODE (pat) == PARALLEL)
7616 {
7617 int pos;
7618
7619 for (pos = 0, i = 0; i < XVECLEN (pat, 0); i++)
7620 if (GET_CODE (XVECEXP (pat, 0, i)) != CLOBBER)
7621 {
7622 if (i != pos)
7623 SUBST (XVECEXP (pat, 0, pos), XVECEXP (pat, 0, i));
7624 pos++;
7625 }
7626
7627 SUBST_INT (XVECLEN (pat, 0), pos);
7628
7629 if (pos == 1)
7630 pat = XVECEXP (pat, 0, 0);
7631
7632 insn_code_number = recog (pat, insn, &num_clobbers_to_add);
7633 }
7634
7635 /* If we had any clobbers to add, make a new pattern than contains
7636 them. Then check to make sure that all of them are dead. */
7637 if (num_clobbers_to_add)
7638 {
7639 rtx newpat = gen_rtx (PARALLEL, VOIDmode,
7640 gen_rtvec (GET_CODE (pat) == PARALLEL
7641 ? XVECLEN (pat, 0) + num_clobbers_to_add
7642 : num_clobbers_to_add + 1));
7643
7644 if (GET_CODE (pat) == PARALLEL)
7645 for (i = 0; i < XVECLEN (pat, 0); i++)
7646 XVECEXP (newpat, 0, i) = XVECEXP (pat, 0, i);
7647 else
7648 XVECEXP (newpat, 0, 0) = pat;
7649
7650 add_clobbers (newpat, insn_code_number);
7651
7652 for (i = XVECLEN (newpat, 0) - num_clobbers_to_add;
7653 i < XVECLEN (newpat, 0); i++)
7654 {
7655 if (GET_CODE (XEXP (XVECEXP (newpat, 0, i), 0)) == REG
7656 && ! reg_dead_at_p (XEXP (XVECEXP (newpat, 0, i), 0), insn))
7657 return -1;
7658 notes = gen_rtx (EXPR_LIST, REG_UNUSED,
7659 XEXP (XVECEXP (newpat, 0, i), 0), notes);
7660 }
7661 pat = newpat;
7662 }
7663
7664 *pnewpat = pat;
7665 *pnotes = notes;
7666
7667 return insn_code_number;
7668}
7669\f
7670/* Like gen_lowpart but for use by combine. In combine it is not possible
7671 to create any new pseudoregs. However, it is safe to create
7672 invalid memory addresses, because combine will try to recognize
7673 them and all they will do is make the combine attempt fail.
7674
7675 If for some reason this cannot do its job, an rtx
7676 (clobber (const_int 0)) is returned.
7677 An insn containing that will not be recognized. */
7678
7679#undef gen_lowpart
7680
7681static rtx
7682gen_lowpart_for_combine (mode, x)
7683 enum machine_mode mode;
7684 register rtx x;
7685{
7686 rtx result;
7687
7688 if (GET_MODE (x) == mode)
7689 return x;
7690
eae957a8
RK
7691 /* We can only support MODE being wider than a word if X is a
7692 constant integer or has a mode the same size. */
7693
7694 if (GET_MODE_SIZE (mode) > UNITS_PER_WORD
7695 && ! ((GET_MODE (x) == VOIDmode
7696 && (GET_CODE (x) == CONST_INT
7697 || GET_CODE (x) == CONST_DOUBLE))
7698 || GET_MODE_SIZE (GET_MODE (x)) == GET_MODE_SIZE (mode)))
230d793d
RS
7699 return gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
7700
7701 /* X might be a paradoxical (subreg (mem)). In that case, gen_lowpart
7702 won't know what to do. So we will strip off the SUBREG here and
7703 process normally. */
7704 if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
7705 {
7706 x = SUBREG_REG (x);
7707 if (GET_MODE (x) == mode)
7708 return x;
7709 }
7710
7711 result = gen_lowpart_common (mode, x);
7712 if (result)
7713 return result;
7714
7715 if (GET_CODE (x) == MEM)
7716 {
7717 register int offset = 0;
7718 rtx new;
7719
7720 /* Refuse to work on a volatile memory ref or one with a mode-dependent
7721 address. */
7722 if (MEM_VOLATILE_P (x) || mode_dependent_address_p (XEXP (x, 0)))
7723 return gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
7724
7725 /* If we want to refer to something bigger than the original memref,
7726 generate a perverse subreg instead. That will force a reload
7727 of the original memref X. */
7728 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode))
7729 return gen_rtx (SUBREG, mode, x, 0);
7730
7731#if WORDS_BIG_ENDIAN
7732 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
7733 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
7734#endif
7735#if BYTES_BIG_ENDIAN
7736 /* Adjust the address so that the address-after-the-data
7737 is unchanged. */
7738 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
7739 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
7740#endif
7741 new = gen_rtx (MEM, mode, plus_constant (XEXP (x, 0), offset));
7742 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
7743 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
7744 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
7745 return new;
7746 }
7747
7748 /* If X is a comparison operator, rewrite it in a new mode. This
7749 probably won't match, but may allow further simplifications. */
7750 else if (GET_RTX_CLASS (GET_CODE (x)) == '<')
7751 return gen_rtx_combine (GET_CODE (x), mode, XEXP (x, 0), XEXP (x, 1));
7752
7753 /* If we couldn't simplify X any other way, just enclose it in a
7754 SUBREG. Normally, this SUBREG won't match, but some patterns may
a7c99304 7755 include an explicit SUBREG or we may simplify it further in combine. */
230d793d 7756 else
dfbe1b2f
RK
7757 {
7758 int word = 0;
7759
7760 if (WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD)
7761 word = ((GET_MODE_SIZE (GET_MODE (x))
7762 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD))
7763 / UNITS_PER_WORD);
7764 return gen_rtx (SUBREG, mode, x, word);
7765 }
230d793d
RS
7766}
7767\f
7768/* Make an rtx expression. This is a subset of gen_rtx and only supports
7769 expressions of 1, 2, or 3 operands, each of which are rtx expressions.
7770
7771 If the identical expression was previously in the insn (in the undobuf),
7772 it will be returned. Only if it is not found will a new expression
7773 be made. */
7774
7775/*VARARGS2*/
7776static rtx
7777gen_rtx_combine (va_alist)
7778 va_dcl
7779{
7780 va_list p;
7781 enum rtx_code code;
7782 enum machine_mode mode;
7783 int n_args;
7784 rtx args[3];
7785 int i, j;
7786 char *fmt;
7787 rtx rt;
7788
7789 va_start (p);
7790 code = va_arg (p, enum rtx_code);
7791 mode = va_arg (p, enum machine_mode);
7792 n_args = GET_RTX_LENGTH (code);
7793 fmt = GET_RTX_FORMAT (code);
7794
7795 if (n_args == 0 || n_args > 3)
7796 abort ();
7797
7798 /* Get each arg and verify that it is supposed to be an expression. */
7799 for (j = 0; j < n_args; j++)
7800 {
7801 if (*fmt++ != 'e')
7802 abort ();
7803
7804 args[j] = va_arg (p, rtx);
7805 }
7806
7807 /* See if this is in undobuf. Be sure we don't use objects that came
7808 from another insn; this could produce circular rtl structures. */
7809
7810 for (i = previous_num_undos; i < undobuf.num_undo; i++)
7811 if (!undobuf.undo[i].is_int
7c046e4e
RK
7812 && GET_CODE (undobuf.undo[i].old_contents.rtx) == code
7813 && GET_MODE (undobuf.undo[i].old_contents.rtx) == mode)
230d793d
RS
7814 {
7815 for (j = 0; j < n_args; j++)
7c046e4e 7816 if (XEXP (undobuf.undo[i].old_contents.rtx, j) != args[j])
230d793d
RS
7817 break;
7818
7819 if (j == n_args)
7c046e4e 7820 return undobuf.undo[i].old_contents.rtx;
230d793d
RS
7821 }
7822
7823 /* Otherwise make a new rtx. We know we have 1, 2, or 3 args.
7824 Use rtx_alloc instead of gen_rtx because it's faster on RISC. */
7825 rt = rtx_alloc (code);
7826 PUT_MODE (rt, mode);
7827 XEXP (rt, 0) = args[0];
7828 if (n_args > 1)
7829 {
7830 XEXP (rt, 1) = args[1];
7831 if (n_args > 2)
7832 XEXP (rt, 2) = args[2];
7833 }
7834 return rt;
7835}
7836
7837/* These routines make binary and unary operations by first seeing if they
7838 fold; if not, a new expression is allocated. */
7839
7840static rtx
7841gen_binary (code, mode, op0, op1)
7842 enum rtx_code code;
7843 enum machine_mode mode;
7844 rtx op0, op1;
7845{
7846 rtx result;
1a26b032
RK
7847 rtx tem;
7848
7849 if (GET_RTX_CLASS (code) == 'c'
7850 && (GET_CODE (op0) == CONST_INT
7851 || (CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)))
7852 tem = op0, op0 = op1, op1 = tem;
230d793d
RS
7853
7854 if (GET_RTX_CLASS (code) == '<')
7855 {
7856 enum machine_mode op_mode = GET_MODE (op0);
7857 if (op_mode == VOIDmode)
7858 op_mode = GET_MODE (op1);
7859 result = simplify_relational_operation (code, op_mode, op0, op1);
7860 }
7861 else
7862 result = simplify_binary_operation (code, mode, op0, op1);
7863
7864 if (result)
7865 return result;
7866
7867 /* Put complex operands first and constants second. */
7868 if (GET_RTX_CLASS (code) == 'c'
7869 && ((CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)
7870 || (GET_RTX_CLASS (GET_CODE (op0)) == 'o'
7871 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')
7872 || (GET_CODE (op0) == SUBREG
7873 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op0))) == 'o'
7874 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')))
7875 return gen_rtx_combine (code, mode, op1, op0);
7876
7877 return gen_rtx_combine (code, mode, op0, op1);
7878}
7879
7880static rtx
7881gen_unary (code, mode, op0)
7882 enum rtx_code code;
7883 enum machine_mode mode;
7884 rtx op0;
7885{
7886 rtx result = simplify_unary_operation (code, mode, op0, mode);
7887
7888 if (result)
7889 return result;
7890
7891 return gen_rtx_combine (code, mode, op0);
7892}
7893\f
7894/* Simplify a comparison between *POP0 and *POP1 where CODE is the
7895 comparison code that will be tested.
7896
7897 The result is a possibly different comparison code to use. *POP0 and
7898 *POP1 may be updated.
7899
7900 It is possible that we might detect that a comparison is either always
7901 true or always false. However, we do not perform general constant
5089e22e 7902 folding in combine, so this knowledge isn't useful. Such tautologies
230d793d
RS
7903 should have been detected earlier. Hence we ignore all such cases. */
7904
7905static enum rtx_code
7906simplify_comparison (code, pop0, pop1)
7907 enum rtx_code code;
7908 rtx *pop0;
7909 rtx *pop1;
7910{
7911 rtx op0 = *pop0;
7912 rtx op1 = *pop1;
7913 rtx tem, tem1;
7914 int i;
7915 enum machine_mode mode, tmode;
7916
7917 /* Try a few ways of applying the same transformation to both operands. */
7918 while (1)
7919 {
7920 /* If both operands are the same constant shift, see if we can ignore the
7921 shift. We can if the shift is a rotate or if the bits shifted out of
951553af 7922 this shift are known to be zero for both inputs and if the type of
230d793d
RS
7923 comparison is compatible with the shift. */
7924 if (GET_CODE (op0) == GET_CODE (op1)
5f4f0e22 7925 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
230d793d
RS
7926 && ((GET_CODE (op0) == ROTATE && (code == NE || code == EQ))
7927 || ((GET_CODE (op0) == LSHIFTRT
7928 || GET_CODE (op0) == ASHIFT || GET_CODE (op0) == LSHIFT)
7929 && (code != GT && code != LT && code != GE && code != LE))
7930 || (GET_CODE (op0) == ASHIFTRT
7931 && (code != GTU && code != LTU
7932 && code != GEU && code != GEU)))
7933 && GET_CODE (XEXP (op0, 1)) == CONST_INT
7934 && INTVAL (XEXP (op0, 1)) >= 0
5f4f0e22 7935 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
230d793d
RS
7936 && XEXP (op0, 1) == XEXP (op1, 1))
7937 {
7938 enum machine_mode mode = GET_MODE (op0);
5f4f0e22 7939 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
230d793d
RS
7940 int shift_count = INTVAL (XEXP (op0, 1));
7941
7942 if (GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFTRT)
7943 mask &= (mask >> shift_count) << shift_count;
7944 else if (GET_CODE (op0) == ASHIFT || GET_CODE (op0) == LSHIFT)
7945 mask = (mask & (mask << shift_count)) >> shift_count;
7946
951553af
RK
7947 if ((nonzero_bits (XEXP (op0, 0), mode) & ~ mask) == 0
7948 && (nonzero_bits (XEXP (op1, 0), mode) & ~ mask) == 0)
230d793d
RS
7949 op0 = XEXP (op0, 0), op1 = XEXP (op1, 0);
7950 else
7951 break;
7952 }
7953
7954 /* If both operands are AND's of a paradoxical SUBREG by constant, the
7955 SUBREGs are of the same mode, and, in both cases, the AND would
7956 be redundant if the comparison was done in the narrower mode,
7957 do the comparison in the narrower mode (e.g., we are AND'ing with 1
951553af
RK
7958 and the operand's possibly nonzero bits are 0xffffff01; in that case
7959 if we only care about QImode, we don't need the AND). This case
7960 occurs if the output mode of an scc insn is not SImode and
230d793d
RS
7961 STORE_FLAG_VALUE == 1 (e.g., the 386). */
7962
7963 else if (GET_CODE (op0) == AND && GET_CODE (op1) == AND
7964 && GET_CODE (XEXP (op0, 1)) == CONST_INT
7965 && GET_CODE (XEXP (op1, 1)) == CONST_INT
7966 && GET_CODE (XEXP (op0, 0)) == SUBREG
7967 && GET_CODE (XEXP (op1, 0)) == SUBREG
7968 && (GET_MODE_SIZE (GET_MODE (XEXP (op0, 0)))
7969 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (op0, 0)))))
7970 && (GET_MODE (SUBREG_REG (XEXP (op0, 0)))
7971 == GET_MODE (SUBREG_REG (XEXP (op1, 0))))
ac49a949
RS
7972 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (XEXP (op0, 0))))
7973 <= HOST_BITS_PER_WIDE_INT)
951553af 7974 && (nonzero_bits (SUBREG_REG (XEXP (op0, 0)),
230d793d
RS
7975 GET_MODE (SUBREG_REG (XEXP (op0, 0))))
7976 & ~ INTVAL (XEXP (op0, 1))) == 0
951553af 7977 && (nonzero_bits (SUBREG_REG (XEXP (op1, 0)),
230d793d
RS
7978 GET_MODE (SUBREG_REG (XEXP (op1, 0))))
7979 & ~ INTVAL (XEXP (op1, 1))) == 0)
7980 {
7981 op0 = SUBREG_REG (XEXP (op0, 0));
7982 op1 = SUBREG_REG (XEXP (op1, 0));
7983
7984 /* the resulting comparison is always unsigned since we masked off
7985 the original sign bit. */
7986 code = unsigned_condition (code);
7987 }
7988 else
7989 break;
7990 }
7991
7992 /* If the first operand is a constant, swap the operands and adjust the
7993 comparison code appropriately. */
7994 if (CONSTANT_P (op0))
7995 {
7996 tem = op0, op0 = op1, op1 = tem;
7997 code = swap_condition (code);
7998 }
7999
8000 /* We now enter a loop during which we will try to simplify the comparison.
8001 For the most part, we only are concerned with comparisons with zero,
8002 but some things may really be comparisons with zero but not start
8003 out looking that way. */
8004
8005 while (GET_CODE (op1) == CONST_INT)
8006 {
8007 enum machine_mode mode = GET_MODE (op0);
8008 int mode_width = GET_MODE_BITSIZE (mode);
5f4f0e22 8009 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
230d793d
RS
8010 int equality_comparison_p;
8011 int sign_bit_comparison_p;
8012 int unsigned_comparison_p;
5f4f0e22 8013 HOST_WIDE_INT const_op;
230d793d
RS
8014
8015 /* We only want to handle integral modes. This catches VOIDmode,
8016 CCmode, and the floating-point modes. An exception is that we
8017 can handle VOIDmode if OP0 is a COMPARE or a comparison
8018 operation. */
8019
8020 if (GET_MODE_CLASS (mode) != MODE_INT
8021 && ! (mode == VOIDmode
8022 && (GET_CODE (op0) == COMPARE
8023 || GET_RTX_CLASS (GET_CODE (op0)) == '<')))
8024 break;
8025
8026 /* Get the constant we are comparing against and turn off all bits
8027 not on in our mode. */
8028 const_op = INTVAL (op1);
5f4f0e22 8029 if (mode_width <= HOST_BITS_PER_WIDE_INT)
4803a34a 8030 const_op &= mask;
230d793d
RS
8031
8032 /* If we are comparing against a constant power of two and the value
951553af 8033 being compared can only have that single bit nonzero (e.g., it was
230d793d
RS
8034 `and'ed with that bit), we can replace this with a comparison
8035 with zero. */
8036 if (const_op
8037 && (code == EQ || code == NE || code == GE || code == GEU
8038 || code == LT || code == LTU)
5f4f0e22 8039 && mode_width <= HOST_BITS_PER_WIDE_INT
230d793d 8040 && exact_log2 (const_op) >= 0
951553af 8041 && nonzero_bits (op0, mode) == const_op)
230d793d
RS
8042 {
8043 code = (code == EQ || code == GE || code == GEU ? NE : EQ);
8044 op1 = const0_rtx, const_op = 0;
8045 }
8046
d0ab8cd3
RK
8047 /* Similarly, if we are comparing a value known to be either -1 or
8048 0 with -1, change it to the opposite comparison against zero. */
8049
8050 if (const_op == -1
8051 && (code == EQ || code == NE || code == GT || code == LE
8052 || code == GEU || code == LTU)
8053 && num_sign_bit_copies (op0, mode) == mode_width)
8054 {
8055 code = (code == EQ || code == LE || code == GEU ? NE : EQ);
8056 op1 = const0_rtx, const_op = 0;
8057 }
8058
230d793d 8059 /* Do some canonicalizations based on the comparison code. We prefer
4803a34a
RK
8060 comparisons against zero and then prefer equality comparisons.
8061 If we can reduce the size of a constant, we will do that too. */
230d793d
RS
8062
8063 switch (code)
8064 {
8065 case LT:
4803a34a
RK
8066 /* < C is equivalent to <= (C - 1) */
8067 if (const_op > 0)
230d793d 8068 {
4803a34a 8069 const_op -= 1;
5f4f0e22 8070 op1 = GEN_INT (const_op);
230d793d
RS
8071 code = LE;
8072 /* ... fall through to LE case below. */
8073 }
8074 else
8075 break;
8076
8077 case LE:
4803a34a
RK
8078 /* <= C is equivalent to < (C + 1); we do this for C < 0 */
8079 if (const_op < 0)
8080 {
8081 const_op += 1;
5f4f0e22 8082 op1 = GEN_INT (const_op);
4803a34a
RK
8083 code = LT;
8084 }
230d793d
RS
8085
8086 /* If we are doing a <= 0 comparison on a value known to have
8087 a zero sign bit, we can replace this with == 0. */
8088 else if (const_op == 0
5f4f0e22 8089 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 8090 && (nonzero_bits (op0, mode)
5f4f0e22 8091 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
230d793d
RS
8092 code = EQ;
8093 break;
8094
8095 case GE:
4803a34a
RK
8096 /* >= C is equivalent to > (C - 1). */
8097 if (const_op > 0)
230d793d 8098 {
4803a34a 8099 const_op -= 1;
5f4f0e22 8100 op1 = GEN_INT (const_op);
230d793d
RS
8101 code = GT;
8102 /* ... fall through to GT below. */
8103 }
8104 else
8105 break;
8106
8107 case GT:
4803a34a
RK
8108 /* > C is equivalent to >= (C + 1); we do this for C < 0*/
8109 if (const_op < 0)
8110 {
8111 const_op += 1;
5f4f0e22 8112 op1 = GEN_INT (const_op);
4803a34a
RK
8113 code = GE;
8114 }
230d793d
RS
8115
8116 /* If we are doing a > 0 comparison on a value known to have
8117 a zero sign bit, we can replace this with != 0. */
8118 else if (const_op == 0
5f4f0e22 8119 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 8120 && (nonzero_bits (op0, mode)
5f4f0e22 8121 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
230d793d
RS
8122 code = NE;
8123 break;
8124
230d793d 8125 case LTU:
4803a34a
RK
8126 /* < C is equivalent to <= (C - 1). */
8127 if (const_op > 0)
8128 {
8129 const_op -= 1;
5f4f0e22 8130 op1 = GEN_INT (const_op);
4803a34a
RK
8131 code = LEU;
8132 /* ... fall through ... */
8133 }
d0ab8cd3
RK
8134
8135 /* (unsigned) < 0x80000000 is equivalent to >= 0. */
8136 else if (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1))
8137 {
8138 const_op = 0, op1 = const0_rtx;
8139 code = GE;
8140 break;
8141 }
4803a34a
RK
8142 else
8143 break;
230d793d
RS
8144
8145 case LEU:
8146 /* unsigned <= 0 is equivalent to == 0 */
8147 if (const_op == 0)
8148 code = EQ;
d0ab8cd3
RK
8149
8150 /* (unsigned) <= 0x7fffffff is equivalent to >= 0. */
8151 else if (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1)
8152 {
8153 const_op = 0, op1 = const0_rtx;
8154 code = GE;
8155 }
230d793d
RS
8156 break;
8157
4803a34a
RK
8158 case GEU:
8159 /* >= C is equivalent to < (C - 1). */
8160 if (const_op > 1)
8161 {
8162 const_op -= 1;
5f4f0e22 8163 op1 = GEN_INT (const_op);
4803a34a
RK
8164 code = GTU;
8165 /* ... fall through ... */
8166 }
d0ab8cd3
RK
8167
8168 /* (unsigned) >= 0x80000000 is equivalent to < 0. */
8169 else if (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1))
8170 {
8171 const_op = 0, op1 = const0_rtx;
8172 code = LT;
8173 }
4803a34a
RK
8174 else
8175 break;
8176
230d793d
RS
8177 case GTU:
8178 /* unsigned > 0 is equivalent to != 0 */
8179 if (const_op == 0)
8180 code = NE;
d0ab8cd3
RK
8181
8182 /* (unsigned) > 0x7fffffff is equivalent to < 0. */
8183 else if (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1)
8184 {
8185 const_op = 0, op1 = const0_rtx;
8186 code = LT;
8187 }
230d793d
RS
8188 break;
8189 }
8190
8191 /* Compute some predicates to simplify code below. */
8192
8193 equality_comparison_p = (code == EQ || code == NE);
8194 sign_bit_comparison_p = ((code == LT || code == GE) && const_op == 0);
8195 unsigned_comparison_p = (code == LTU || code == LEU || code == GTU
8196 || code == LEU);
8197
8198 /* Now try cases based on the opcode of OP0. If none of the cases
8199 does a "continue", we exit this loop immediately after the
8200 switch. */
8201
8202 switch (GET_CODE (op0))
8203 {
8204 case ZERO_EXTRACT:
8205 /* If we are extracting a single bit from a variable position in
8206 a constant that has only a single bit set and are comparing it
8207 with zero, we can convert this into an equality comparison
8208 between the position and the location of the single bit. We can't
8209 do this if bit endian and we don't have an extzv since we then
8210 can't know what mode to use for the endianness adjustment. */
8211
8212#if ! BITS_BIG_ENDIAN || defined (HAVE_extzv)
8213 if (GET_CODE (XEXP (op0, 0)) == CONST_INT
8214 && XEXP (op0, 1) == const1_rtx
8215 && equality_comparison_p && const_op == 0
8216 && (i = exact_log2 (INTVAL (XEXP (op0, 0)))) >= 0)
8217 {
8218#if BITS_BIG_ENDIAN
8219 i = (GET_MODE_BITSIZE
8220 (insn_operand_mode[(int) CODE_FOR_extzv][1]) - 1 - i);
8221#endif
8222
8223 op0 = XEXP (op0, 2);
5f4f0e22 8224 op1 = GEN_INT (i);
230d793d
RS
8225 const_op = i;
8226
8227 /* Result is nonzero iff shift count is equal to I. */
8228 code = reverse_condition (code);
8229 continue;
8230 }
8231#endif
8232
8233 /* ... fall through ... */
8234
8235 case SIGN_EXTRACT:
8236 tem = expand_compound_operation (op0);
8237 if (tem != op0)
8238 {
8239 op0 = tem;
8240 continue;
8241 }
8242 break;
8243
8244 case NOT:
8245 /* If testing for equality, we can take the NOT of the constant. */
8246 if (equality_comparison_p
8247 && (tem = simplify_unary_operation (NOT, mode, op1, mode)) != 0)
8248 {
8249 op0 = XEXP (op0, 0);
8250 op1 = tem;
8251 continue;
8252 }
8253
8254 /* If just looking at the sign bit, reverse the sense of the
8255 comparison. */
8256 if (sign_bit_comparison_p)
8257 {
8258 op0 = XEXP (op0, 0);
8259 code = (code == GE ? LT : GE);
8260 continue;
8261 }
8262 break;
8263
8264 case NEG:
8265 /* If testing for equality, we can take the NEG of the constant. */
8266 if (equality_comparison_p
8267 && (tem = simplify_unary_operation (NEG, mode, op1, mode)) != 0)
8268 {
8269 op0 = XEXP (op0, 0);
8270 op1 = tem;
8271 continue;
8272 }
8273
8274 /* The remaining cases only apply to comparisons with zero. */
8275 if (const_op != 0)
8276 break;
8277
8278 /* When X is ABS or is known positive,
8279 (neg X) is < 0 if and only if X != 0. */
8280
8281 if (sign_bit_comparison_p
8282 && (GET_CODE (XEXP (op0, 0)) == ABS
5f4f0e22 8283 || (mode_width <= HOST_BITS_PER_WIDE_INT
951553af 8284 && (nonzero_bits (XEXP (op0, 0), mode)
5f4f0e22 8285 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)))
230d793d
RS
8286 {
8287 op0 = XEXP (op0, 0);
8288 code = (code == LT ? NE : EQ);
8289 continue;
8290 }
8291
3bed8141
RK
8292 /* If we have NEG of something whose two high-order bits are the
8293 same, we know that "(-a) < 0" is equivalent to "a > 0". */
8294 if (num_sign_bit_copies (op0, mode) >= 2)
230d793d
RS
8295 {
8296 op0 = XEXP (op0, 0);
8297 code = swap_condition (code);
8298 continue;
8299 }
8300 break;
8301
8302 case ROTATE:
8303 /* If we are testing equality and our count is a constant, we
8304 can perform the inverse operation on our RHS. */
8305 if (equality_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
8306 && (tem = simplify_binary_operation (ROTATERT, mode,
8307 op1, XEXP (op0, 1))) != 0)
8308 {
8309 op0 = XEXP (op0, 0);
8310 op1 = tem;
8311 continue;
8312 }
8313
8314 /* If we are doing a < 0 or >= 0 comparison, it means we are testing
8315 a particular bit. Convert it to an AND of a constant of that
8316 bit. This will be converted into a ZERO_EXTRACT. */
8317 if (const_op == 0 && sign_bit_comparison_p
8318 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5f4f0e22 8319 && mode_width <= HOST_BITS_PER_WIDE_INT)
230d793d 8320 {
5f4f0e22
CH
8321 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
8322 ((HOST_WIDE_INT) 1
8323 << (mode_width - 1
8324 - INTVAL (XEXP (op0, 1)))));
230d793d
RS
8325 code = (code == LT ? NE : EQ);
8326 continue;
8327 }
8328
8329 /* ... fall through ... */
8330
8331 case ABS:
8332 /* ABS is ignorable inside an equality comparison with zero. */
8333 if (const_op == 0 && equality_comparison_p)
8334 {
8335 op0 = XEXP (op0, 0);
8336 continue;
8337 }
8338 break;
8339
8340
8341 case SIGN_EXTEND:
8342 /* Can simplify (compare (zero/sign_extend FOO) CONST)
8343 to (compare FOO CONST) if CONST fits in FOO's mode and we
8344 are either testing inequality or have an unsigned comparison
8345 with ZERO_EXTEND or a signed comparison with SIGN_EXTEND. */
8346 if (! unsigned_comparison_p
8347 && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
5f4f0e22
CH
8348 <= HOST_BITS_PER_WIDE_INT)
8349 && ((unsigned HOST_WIDE_INT) const_op
8350 < (((HOST_WIDE_INT) 1
8351 << (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0))) - 1)))))
230d793d
RS
8352 {
8353 op0 = XEXP (op0, 0);
8354 continue;
8355 }
8356 break;
8357
8358 case SUBREG:
a687e897
RK
8359 /* Check for the case where we are comparing A - C1 with C2,
8360 both constants are smaller than 1/2 the maxium positive
8361 value in MODE, and the comparison is equality or unsigned.
8362 In that case, if A is either zero-extended to MODE or has
8363 sufficient sign bits so that the high-order bit in MODE
8364 is a copy of the sign in the inner mode, we can prove that it is
8365 safe to do the operation in the wider mode. This simplifies
8366 many range checks. */
8367
8368 if (mode_width <= HOST_BITS_PER_WIDE_INT
8369 && subreg_lowpart_p (op0)
8370 && GET_CODE (SUBREG_REG (op0)) == PLUS
8371 && GET_CODE (XEXP (SUBREG_REG (op0), 1)) == CONST_INT
8372 && INTVAL (XEXP (SUBREG_REG (op0), 1)) < 0
8373 && (- INTVAL (XEXP (SUBREG_REG (op0), 1))
8374 < GET_MODE_MASK (mode) / 2)
adb7a1cb 8375 && (unsigned HOST_WIDE_INT) const_op < GET_MODE_MASK (mode) / 2
951553af
RK
8376 && (0 == (nonzero_bits (XEXP (SUBREG_REG (op0), 0),
8377 GET_MODE (SUBREG_REG (op0)))
a687e897
RK
8378 & ~ GET_MODE_MASK (mode))
8379 || (num_sign_bit_copies (XEXP (SUBREG_REG (op0), 0),
8380 GET_MODE (SUBREG_REG (op0)))
8381 > (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
8382 - GET_MODE_BITSIZE (mode)))))
8383 {
8384 op0 = SUBREG_REG (op0);
8385 continue;
8386 }
8387
fe0cf571
RK
8388 /* If the inner mode is narrower and we are extracting the low part,
8389 we can treat the SUBREG as if it were a ZERO_EXTEND. */
8390 if (subreg_lowpart_p (op0)
89f1c7f2
RS
8391 && GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) < mode_width)
8392 /* Fall through */ ;
8393 else
230d793d
RS
8394 break;
8395
8396 /* ... fall through ... */
8397
8398 case ZERO_EXTEND:
8399 if ((unsigned_comparison_p || equality_comparison_p)
8400 && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
5f4f0e22
CH
8401 <= HOST_BITS_PER_WIDE_INT)
8402 && ((unsigned HOST_WIDE_INT) const_op
230d793d
RS
8403 < GET_MODE_MASK (GET_MODE (XEXP (op0, 0)))))
8404 {
8405 op0 = XEXP (op0, 0);
8406 continue;
8407 }
8408 break;
8409
8410 case PLUS:
20fdd649 8411 /* (eq (plus X A) B) -> (eq X (minus B A)). We can only do
5089e22e 8412 this for equality comparisons due to pathological cases involving
230d793d 8413 overflows. */
20fdd649
RK
8414 if (equality_comparison_p
8415 && 0 != (tem = simplify_binary_operation (MINUS, mode,
8416 op1, XEXP (op0, 1))))
230d793d
RS
8417 {
8418 op0 = XEXP (op0, 0);
8419 op1 = tem;
8420 continue;
8421 }
8422
8423 /* (plus (abs X) (const_int -1)) is < 0 if and only if X == 0. */
8424 if (const_op == 0 && XEXP (op0, 1) == constm1_rtx
8425 && GET_CODE (XEXP (op0, 0)) == ABS && sign_bit_comparison_p)
8426 {
8427 op0 = XEXP (XEXP (op0, 0), 0);
8428 code = (code == LT ? EQ : NE);
8429 continue;
8430 }
8431 break;
8432
8433 case MINUS:
20fdd649
RK
8434 /* (eq (minus A B) C) -> (eq A (plus B C)) or
8435 (eq B (minus A C)), whichever simplifies. We can only do
8436 this for equality comparisons due to pathological cases involving
8437 overflows. */
8438 if (equality_comparison_p
8439 && 0 != (tem = simplify_binary_operation (PLUS, mode,
8440 XEXP (op0, 1), op1)))
8441 {
8442 op0 = XEXP (op0, 0);
8443 op1 = tem;
8444 continue;
8445 }
8446
8447 if (equality_comparison_p
8448 && 0 != (tem = simplify_binary_operation (MINUS, mode,
8449 XEXP (op0, 0), op1)))
8450 {
8451 op0 = XEXP (op0, 1);
8452 op1 = tem;
8453 continue;
8454 }
8455
230d793d
RS
8456 /* The sign bit of (minus (ashiftrt X C) X), where C is the number
8457 of bits in X minus 1, is one iff X > 0. */
8458 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == ASHIFTRT
8459 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
8460 && INTVAL (XEXP (XEXP (op0, 0), 1)) == mode_width - 1
8461 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
8462 {
8463 op0 = XEXP (op0, 1);
8464 code = (code == GE ? LE : GT);
8465 continue;
8466 }
8467 break;
8468
8469 case XOR:
8470 /* (eq (xor A B) C) -> (eq A (xor B C)). This is a simplification
8471 if C is zero or B is a constant. */
8472 if (equality_comparison_p
8473 && 0 != (tem = simplify_binary_operation (XOR, mode,
8474 XEXP (op0, 1), op1)))
8475 {
8476 op0 = XEXP (op0, 0);
8477 op1 = tem;
8478 continue;
8479 }
8480 break;
8481
8482 case EQ: case NE:
8483 case LT: case LTU: case LE: case LEU:
8484 case GT: case GTU: case GE: case GEU:
8485 /* We can't do anything if OP0 is a condition code value, rather
8486 than an actual data value. */
8487 if (const_op != 0
8488#ifdef HAVE_cc0
8489 || XEXP (op0, 0) == cc0_rtx
8490#endif
8491 || GET_MODE_CLASS (GET_MODE (XEXP (op0, 0))) == MODE_CC)
8492 break;
8493
8494 /* Get the two operands being compared. */
8495 if (GET_CODE (XEXP (op0, 0)) == COMPARE)
8496 tem = XEXP (XEXP (op0, 0), 0), tem1 = XEXP (XEXP (op0, 0), 1);
8497 else
8498 tem = XEXP (op0, 0), tem1 = XEXP (op0, 1);
8499
8500 /* Check for the cases where we simply want the result of the
8501 earlier test or the opposite of that result. */
8502 if (code == NE
8503 || (code == EQ && reversible_comparison_p (op0))
5f4f0e22 8504 || (GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
3f508eca 8505 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
230d793d 8506 && (STORE_FLAG_VALUE
5f4f0e22
CH
8507 & (((HOST_WIDE_INT) 1
8508 << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
230d793d
RS
8509 && (code == LT
8510 || (code == GE && reversible_comparison_p (op0)))))
8511 {
8512 code = (code == LT || code == NE
8513 ? GET_CODE (op0) : reverse_condition (GET_CODE (op0)));
8514 op0 = tem, op1 = tem1;
8515 continue;
8516 }
8517 break;
8518
8519 case IOR:
8520 /* The sign bit of (ior (plus X (const_int -1)) X) is non-zero
8521 iff X <= 0. */
8522 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == PLUS
8523 && XEXP (XEXP (op0, 0), 1) == constm1_rtx
8524 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
8525 {
8526 op0 = XEXP (op0, 1);
8527 code = (code == GE ? GT : LE);
8528 continue;
8529 }
8530 break;
8531
8532 case AND:
8533 /* Convert (and (xshift 1 X) Y) to (and (lshiftrt Y X) 1). This
8534 will be converted to a ZERO_EXTRACT later. */
8535 if (const_op == 0 && equality_comparison_p
8536 && (GET_CODE (XEXP (op0, 0)) == ASHIFT
8537 || GET_CODE (XEXP (op0, 0)) == LSHIFT)
8538 && XEXP (XEXP (op0, 0), 0) == const1_rtx)
8539 {
8540 op0 = simplify_and_const_int
8541 (op0, mode, gen_rtx_combine (LSHIFTRT, mode,
8542 XEXP (op0, 1),
8543 XEXP (XEXP (op0, 0), 1)),
5f4f0e22 8544 (HOST_WIDE_INT) 1);
230d793d
RS
8545 continue;
8546 }
8547
8548 /* If we are comparing (and (lshiftrt X C1) C2) for equality with
8549 zero and X is a comparison and C1 and C2 describe only bits set
8550 in STORE_FLAG_VALUE, we can compare with X. */
8551 if (const_op == 0 && equality_comparison_p
5f4f0e22 8552 && mode_width <= HOST_BITS_PER_WIDE_INT
230d793d
RS
8553 && GET_CODE (XEXP (op0, 1)) == CONST_INT
8554 && GET_CODE (XEXP (op0, 0)) == LSHIFTRT
8555 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
8556 && INTVAL (XEXP (XEXP (op0, 0), 1)) >= 0
5f4f0e22 8557 && INTVAL (XEXP (XEXP (op0, 0), 1)) < HOST_BITS_PER_WIDE_INT)
230d793d
RS
8558 {
8559 mask = ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
8560 << INTVAL (XEXP (XEXP (op0, 0), 1)));
8561 if ((~ STORE_FLAG_VALUE & mask) == 0
8562 && (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (op0, 0), 0))) == '<'
8563 || ((tem = get_last_value (XEXP (XEXP (op0, 0), 0))) != 0
8564 && GET_RTX_CLASS (GET_CODE (tem)) == '<')))
8565 {
8566 op0 = XEXP (XEXP (op0, 0), 0);
8567 continue;
8568 }
8569 }
8570
8571 /* If we are doing an equality comparison of an AND of a bit equal
8572 to the sign bit, replace this with a LT or GE comparison of
8573 the underlying value. */
8574 if (equality_comparison_p
8575 && const_op == 0
8576 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5f4f0e22 8577 && mode_width <= HOST_BITS_PER_WIDE_INT
230d793d 8578 && ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
5f4f0e22 8579 == (HOST_WIDE_INT) 1 << (mode_width - 1)))
230d793d
RS
8580 {
8581 op0 = XEXP (op0, 0);
8582 code = (code == EQ ? GE : LT);
8583 continue;
8584 }
8585
8586 /* If this AND operation is really a ZERO_EXTEND from a narrower
8587 mode, the constant fits within that mode, and this is either an
8588 equality or unsigned comparison, try to do this comparison in
8589 the narrower mode. */
8590 if ((equality_comparison_p || unsigned_comparison_p)
8591 && GET_CODE (XEXP (op0, 1)) == CONST_INT
8592 && (i = exact_log2 ((INTVAL (XEXP (op0, 1))
8593 & GET_MODE_MASK (mode))
8594 + 1)) >= 0
8595 && const_op >> i == 0
8596 && (tmode = mode_for_size (i, MODE_INT, 1)) != BLKmode)
8597 {
8598 op0 = gen_lowpart_for_combine (tmode, XEXP (op0, 0));
8599 continue;
8600 }
8601 break;
8602
8603 case ASHIFT:
8604 case LSHIFT:
8605 /* If we have (compare (xshift FOO N) (const_int C)) and
8606 the high order N bits of FOO (N+1 if an inequality comparison)
951553af 8607 are known to be zero, we can do this by comparing FOO with C
230d793d
RS
8608 shifted right N bits so long as the low-order N bits of C are
8609 zero. */
8610 if (GET_CODE (XEXP (op0, 1)) == CONST_INT
8611 && INTVAL (XEXP (op0, 1)) >= 0
8612 && ((INTVAL (XEXP (op0, 1)) + ! equality_comparison_p)
5f4f0e22
CH
8613 < HOST_BITS_PER_WIDE_INT)
8614 && ((const_op
1a26b032 8615 & ((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1) == 0)
5f4f0e22 8616 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 8617 && (nonzero_bits (XEXP (op0, 0), mode)
230d793d
RS
8618 & ~ (mask >> (INTVAL (XEXP (op0, 1))
8619 + ! equality_comparison_p))) == 0)
8620 {
8621 const_op >>= INTVAL (XEXP (op0, 1));
5f4f0e22 8622 op1 = GEN_INT (const_op);
230d793d
RS
8623 op0 = XEXP (op0, 0);
8624 continue;
8625 }
8626
dfbe1b2f 8627 /* If we are doing a sign bit comparison, it means we are testing
230d793d 8628 a particular bit. Convert it to the appropriate AND. */
dfbe1b2f 8629 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
5f4f0e22 8630 && mode_width <= HOST_BITS_PER_WIDE_INT)
230d793d 8631 {
5f4f0e22
CH
8632 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
8633 ((HOST_WIDE_INT) 1
8634 << (mode_width - 1
8635 - INTVAL (XEXP (op0, 1)))));
230d793d
RS
8636 code = (code == LT ? NE : EQ);
8637 continue;
8638 }
dfbe1b2f
RK
8639
8640 /* If this an equality comparison with zero and we are shifting
8641 the low bit to the sign bit, we can convert this to an AND of the
8642 low-order bit. */
8643 if (const_op == 0 && equality_comparison_p
8644 && GET_CODE (XEXP (op0, 1)) == CONST_INT
8645 && INTVAL (XEXP (op0, 1)) == mode_width - 1)
8646 {
5f4f0e22
CH
8647 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
8648 (HOST_WIDE_INT) 1);
dfbe1b2f
RK
8649 continue;
8650 }
230d793d
RS
8651 break;
8652
8653 case ASHIFTRT:
d0ab8cd3
RK
8654 /* If this is an equality comparison with zero, we can do this
8655 as a logical shift, which might be much simpler. */
8656 if (equality_comparison_p && const_op == 0
8657 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
8658 {
8659 op0 = simplify_shift_const (NULL_RTX, LSHIFTRT, mode,
8660 XEXP (op0, 0),
8661 INTVAL (XEXP (op0, 1)));
8662 continue;
8663 }
8664
230d793d
RS
8665 /* If OP0 is a sign extension and CODE is not an unsigned comparison,
8666 do the comparison in a narrower mode. */
8667 if (! unsigned_comparison_p
8668 && GET_CODE (XEXP (op0, 1)) == CONST_INT
8669 && GET_CODE (XEXP (op0, 0)) == ASHIFT
8670 && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1)
8671 && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)),
22331794 8672 MODE_INT, 1)) != BLKmode
5f4f0e22
CH
8673 && ((unsigned HOST_WIDE_INT) const_op <= GET_MODE_MASK (tmode)
8674 || ((unsigned HOST_WIDE_INT) - const_op
8675 <= GET_MODE_MASK (tmode))))
230d793d
RS
8676 {
8677 op0 = gen_lowpart_for_combine (tmode, XEXP (XEXP (op0, 0), 0));
8678 continue;
8679 }
8680
8681 /* ... fall through ... */
8682 case LSHIFTRT:
8683 /* If we have (compare (xshiftrt FOO N) (const_int C)) and
951553af 8684 the low order N bits of FOO are known to be zero, we can do this
230d793d
RS
8685 by comparing FOO with C shifted left N bits so long as no
8686 overflow occurs. */
8687 if (GET_CODE (XEXP (op0, 1)) == CONST_INT
8688 && INTVAL (XEXP (op0, 1)) >= 0
5f4f0e22
CH
8689 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
8690 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 8691 && (nonzero_bits (XEXP (op0, 0), mode)
5f4f0e22 8692 & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0
230d793d
RS
8693 && (const_op == 0
8694 || (floor_log2 (const_op) + INTVAL (XEXP (op0, 1))
8695 < mode_width)))
8696 {
8697 const_op <<= INTVAL (XEXP (op0, 1));
5f4f0e22 8698 op1 = GEN_INT (const_op);
230d793d
RS
8699 op0 = XEXP (op0, 0);
8700 continue;
8701 }
8702
8703 /* If we are using this shift to extract just the sign bit, we
8704 can replace this with an LT or GE comparison. */
8705 if (const_op == 0
8706 && (equality_comparison_p || sign_bit_comparison_p)
8707 && GET_CODE (XEXP (op0, 1)) == CONST_INT
8708 && INTVAL (XEXP (op0, 1)) == mode_width - 1)
8709 {
8710 op0 = XEXP (op0, 0);
8711 code = (code == NE || code == GT ? LT : GE);
8712 continue;
8713 }
8714 break;
8715 }
8716
8717 break;
8718 }
8719
8720 /* Now make any compound operations involved in this comparison. Then,
8721 check for an outmost SUBREG on OP0 that isn't doing anything or is
8722 paradoxical. The latter case can only occur when it is known that the
8723 "extra" bits will be zero. Therefore, it is safe to remove the SUBREG.
8724 We can never remove a SUBREG for a non-equality comparison because the
8725 sign bit is in a different place in the underlying object. */
8726
8727 op0 = make_compound_operation (op0, op1 == const0_rtx ? COMPARE : SET);
8728 op1 = make_compound_operation (op1, SET);
8729
8730 if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
8731 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
8732 && (code == NE || code == EQ)
8733 && ((GET_MODE_SIZE (GET_MODE (op0))
8734 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0))))))
8735 {
8736 op0 = SUBREG_REG (op0);
8737 op1 = gen_lowpart_for_combine (GET_MODE (op0), op1);
8738 }
8739
8740 else if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
8741 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
8742 && (code == NE || code == EQ)
ac49a949
RS
8743 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
8744 <= HOST_BITS_PER_WIDE_INT)
951553af 8745 && (nonzero_bits (SUBREG_REG (op0), GET_MODE (SUBREG_REG (op0)))
230d793d
RS
8746 & ~ GET_MODE_MASK (GET_MODE (op0))) == 0
8747 && (tem = gen_lowpart_for_combine (GET_MODE (SUBREG_REG (op0)),
8748 op1),
951553af 8749 (nonzero_bits (tem, GET_MODE (SUBREG_REG (op0)))
230d793d
RS
8750 & ~ GET_MODE_MASK (GET_MODE (op0))) == 0))
8751 op0 = SUBREG_REG (op0), op1 = tem;
8752
8753 /* We now do the opposite procedure: Some machines don't have compare
8754 insns in all modes. If OP0's mode is an integer mode smaller than a
8755 word and we can't do a compare in that mode, see if there is a larger
a687e897
RK
8756 mode for which we can do the compare. There are a number of cases in
8757 which we can use the wider mode. */
230d793d
RS
8758
8759 mode = GET_MODE (op0);
8760 if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT
8761 && GET_MODE_SIZE (mode) < UNITS_PER_WORD
8762 && cmp_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
8763 for (tmode = GET_MODE_WIDER_MODE (mode);
5f4f0e22
CH
8764 (tmode != VOIDmode
8765 && GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT);
230d793d 8766 tmode = GET_MODE_WIDER_MODE (tmode))
a687e897 8767 if (cmp_optab->handlers[(int) tmode].insn_code != CODE_FOR_nothing)
230d793d 8768 {
951553af 8769 /* If the only nonzero bits in OP0 and OP1 are those in the
a687e897
RK
8770 narrower mode and this is an equality or unsigned comparison,
8771 we can use the wider mode. Similarly for sign-extended
8772 values and equality or signed comparisons. */
8773 if (((code == EQ || code == NE
8774 || code == GEU || code == GTU || code == LEU || code == LTU)
951553af
RK
8775 && (nonzero_bits (op0, tmode) & ~ GET_MODE_MASK (mode)) == 0
8776 && (nonzero_bits (op1, tmode) & ~ GET_MODE_MASK (mode)) == 0)
a687e897
RK
8777 || ((code == EQ || code == NE
8778 || code == GE || code == GT || code == LE || code == LT)
8779 && (num_sign_bit_copies (op0, tmode)
58744483 8780 > GET_MODE_BITSIZE (tmode) - GET_MODE_BITSIZE (mode))
a687e897 8781 && (num_sign_bit_copies (op1, tmode)
58744483 8782 > GET_MODE_BITSIZE (tmode) - GET_MODE_BITSIZE (mode))))
a687e897
RK
8783 {
8784 op0 = gen_lowpart_for_combine (tmode, op0);
8785 op1 = gen_lowpart_for_combine (tmode, op1);
8786 break;
8787 }
230d793d 8788
a687e897
RK
8789 /* If this is a test for negative, we can make an explicit
8790 test of the sign bit. */
8791
8792 if (op1 == const0_rtx && (code == LT || code == GE)
8793 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
230d793d 8794 {
a687e897
RK
8795 op0 = gen_binary (AND, tmode,
8796 gen_lowpart_for_combine (tmode, op0),
5f4f0e22
CH
8797 GEN_INT ((HOST_WIDE_INT) 1
8798 << (GET_MODE_BITSIZE (mode) - 1)));
230d793d 8799 code = (code == LT) ? NE : EQ;
a687e897 8800 break;
230d793d 8801 }
230d793d
RS
8802 }
8803
8804 *pop0 = op0;
8805 *pop1 = op1;
8806
8807 return code;
8808}
8809\f
8810/* Return 1 if we know that X, a comparison operation, is not operating
8811 on a floating-point value or is EQ or NE, meaning that we can safely
8812 reverse it. */
8813
8814static int
8815reversible_comparison_p (x)
8816 rtx x;
8817{
8818 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
8819 || GET_CODE (x) == NE || GET_CODE (x) == EQ)
8820 return 1;
8821
8822 switch (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))))
8823 {
8824 case MODE_INT:
8825 return 1;
8826
8827 case MODE_CC:
8828 x = get_last_value (XEXP (x, 0));
8829 return (x && GET_CODE (x) == COMPARE
8830 && GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) == MODE_INT);
8831 }
8832
8833 return 0;
8834}
8835\f
8836/* Utility function for following routine. Called when X is part of a value
8837 being stored into reg_last_set_value. Sets reg_last_set_table_tick
8838 for each register mentioned. Similar to mention_regs in cse.c */
8839
8840static void
8841update_table_tick (x)
8842 rtx x;
8843{
8844 register enum rtx_code code = GET_CODE (x);
8845 register char *fmt = GET_RTX_FORMAT (code);
8846 register int i;
8847
8848 if (code == REG)
8849 {
8850 int regno = REGNO (x);
8851 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
8852 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
8853
8854 for (i = regno; i < endregno; i++)
8855 reg_last_set_table_tick[i] = label_tick;
8856
8857 return;
8858 }
8859
8860 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
8861 /* Note that we can't have an "E" in values stored; see
8862 get_last_value_validate. */
8863 if (fmt[i] == 'e')
8864 update_table_tick (XEXP (x, i));
8865}
8866
8867/* Record that REG is set to VALUE in insn INSN. If VALUE is zero, we
8868 are saying that the register is clobbered and we no longer know its
7988fd36
RK
8869 value. If INSN is zero, don't update reg_last_set; this is only permitted
8870 with VALUE also zero and is used to invalidate the register. */
230d793d
RS
8871
8872static void
8873record_value_for_reg (reg, insn, value)
8874 rtx reg;
8875 rtx insn;
8876 rtx value;
8877{
8878 int regno = REGNO (reg);
8879 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
8880 ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1);
8881 int i;
8882
8883 /* If VALUE contains REG and we have a previous value for REG, substitute
8884 the previous value. */
8885 if (value && insn && reg_overlap_mentioned_p (reg, value))
8886 {
8887 rtx tem;
8888
8889 /* Set things up so get_last_value is allowed to see anything set up to
8890 our insn. */
8891 subst_low_cuid = INSN_CUID (insn);
8892 tem = get_last_value (reg);
8893
8894 if (tem)
8895 value = replace_rtx (copy_rtx (value), reg, tem);
8896 }
8897
8898 /* For each register modified, show we don't know its value, that
8899 its value has been updated, and that we don't know the location of
8900 the death of the register. */
8901 for (i = regno; i < endregno; i ++)
8902 {
8903 if (insn)
8904 reg_last_set[i] = insn;
8905 reg_last_set_value[i] = 0;
8906 reg_last_death[i] = 0;
8907 }
8908
8909 /* Mark registers that are being referenced in this value. */
8910 if (value)
8911 update_table_tick (value);
8912
8913 /* Now update the status of each register being set.
8914 If someone is using this register in this block, set this register
8915 to invalid since we will get confused between the two lives in this
8916 basic block. This makes using this register always invalid. In cse, we
8917 scan the table to invalidate all entries using this register, but this
8918 is too much work for us. */
8919
8920 for (i = regno; i < endregno; i++)
8921 {
8922 reg_last_set_label[i] = label_tick;
8923 if (value && reg_last_set_table_tick[i] == label_tick)
8924 reg_last_set_invalid[i] = 1;
8925 else
8926 reg_last_set_invalid[i] = 0;
8927 }
8928
8929 /* The value being assigned might refer to X (like in "x++;"). In that
8930 case, we must replace it with (clobber (const_int 0)) to prevent
8931 infinite loops. */
8932 if (value && ! get_last_value_validate (&value,
8933 reg_last_set_label[regno], 0))
8934 {
8935 value = copy_rtx (value);
8936 if (! get_last_value_validate (&value, reg_last_set_label[regno], 1))
8937 value = 0;
8938 }
8939
55310dad
RK
8940 /* For the main register being modified, update the value, the mode, the
8941 nonzero bits, and the number of sign bit copies. */
8942
230d793d
RS
8943 reg_last_set_value[regno] = value;
8944
55310dad
RK
8945 if (value)
8946 {
2afabb48 8947 subst_low_cuid = INSN_CUID (insn);
55310dad
RK
8948 reg_last_set_mode[regno] = GET_MODE (reg);
8949 reg_last_set_nonzero_bits[regno] = nonzero_bits (value, GET_MODE (reg));
8950 reg_last_set_sign_bit_copies[regno]
8951 = num_sign_bit_copies (value, GET_MODE (reg));
8952 }
230d793d
RS
8953}
8954
8955/* Used for communication between the following two routines. */
8956static rtx record_dead_insn;
8957
8958/* Called via note_stores from record_dead_and_set_regs to handle one
8959 SET or CLOBBER in an insn. */
8960
8961static void
8962record_dead_and_set_regs_1 (dest, setter)
8963 rtx dest, setter;
8964{
8965 if (GET_CODE (dest) == REG)
8966 {
8967 /* If we are setting the whole register, we know its value. Otherwise
8968 show that we don't know the value. We can handle SUBREG in
8969 some cases. */
8970 if (GET_CODE (setter) == SET && dest == SET_DEST (setter))
8971 record_value_for_reg (dest, record_dead_insn, SET_SRC (setter));
8972 else if (GET_CODE (setter) == SET
8973 && GET_CODE (SET_DEST (setter)) == SUBREG
8974 && SUBREG_REG (SET_DEST (setter)) == dest
8975 && subreg_lowpart_p (SET_DEST (setter)))
d0ab8cd3
RK
8976 record_value_for_reg (dest, record_dead_insn,
8977 gen_lowpart_for_combine (GET_MODE (dest),
8978 SET_SRC (setter)));
230d793d 8979 else
5f4f0e22 8980 record_value_for_reg (dest, record_dead_insn, NULL_RTX);
230d793d
RS
8981 }
8982 else if (GET_CODE (dest) == MEM
8983 /* Ignore pushes, they clobber nothing. */
8984 && ! push_operand (dest, GET_MODE (dest)))
8985 mem_last_set = INSN_CUID (record_dead_insn);
8986}
8987
8988/* Update the records of when each REG was most recently set or killed
8989 for the things done by INSN. This is the last thing done in processing
8990 INSN in the combiner loop.
8991
8992 We update reg_last_set, reg_last_set_value, reg_last_death, and also the
8993 similar information mem_last_set (which insn most recently modified memory)
8994 and last_call_cuid (which insn was the most recent subroutine call). */
8995
8996static void
8997record_dead_and_set_regs (insn)
8998 rtx insn;
8999{
9000 register rtx link;
55310dad
RK
9001 int i;
9002
230d793d
RS
9003 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
9004 {
dbc131f3
RK
9005 if (REG_NOTE_KIND (link) == REG_DEAD
9006 && GET_CODE (XEXP (link, 0)) == REG)
9007 {
9008 int regno = REGNO (XEXP (link, 0));
9009 int endregno
9010 = regno + (regno < FIRST_PSEUDO_REGISTER
9011 ? HARD_REGNO_NREGS (regno, GET_MODE (XEXP (link, 0)))
9012 : 1);
dbc131f3
RK
9013
9014 for (i = regno; i < endregno; i++)
9015 reg_last_death[i] = insn;
9016 }
230d793d 9017 else if (REG_NOTE_KIND (link) == REG_INC)
5f4f0e22 9018 record_value_for_reg (XEXP (link, 0), insn, NULL_RTX);
230d793d
RS
9019 }
9020
9021 if (GET_CODE (insn) == CALL_INSN)
55310dad
RK
9022 {
9023 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
9024 if (call_used_regs[i])
9025 {
9026 reg_last_set_value[i] = 0;
9027 reg_last_death[i] = 0;
9028 }
9029
9030 last_call_cuid = mem_last_set = INSN_CUID (insn);
9031 }
230d793d
RS
9032
9033 record_dead_insn = insn;
9034 note_stores (PATTERN (insn), record_dead_and_set_regs_1);
9035}
9036\f
9037/* Utility routine for the following function. Verify that all the registers
9038 mentioned in *LOC are valid when *LOC was part of a value set when
9039 label_tick == TICK. Return 0 if some are not.
9040
9041 If REPLACE is non-zero, replace the invalid reference with
9042 (clobber (const_int 0)) and return 1. This replacement is useful because
9043 we often can get useful information about the form of a value (e.g., if
9044 it was produced by a shift that always produces -1 or 0) even though
9045 we don't know exactly what registers it was produced from. */
9046
9047static int
9048get_last_value_validate (loc, tick, replace)
9049 rtx *loc;
9050 int tick;
9051 int replace;
9052{
9053 rtx x = *loc;
9054 char *fmt = GET_RTX_FORMAT (GET_CODE (x));
9055 int len = GET_RTX_LENGTH (GET_CODE (x));
9056 int i;
9057
9058 if (GET_CODE (x) == REG)
9059 {
9060 int regno = REGNO (x);
9061 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
9062 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
9063 int j;
9064
9065 for (j = regno; j < endregno; j++)
9066 if (reg_last_set_invalid[j]
9067 /* If this is a pseudo-register that was only set once, it is
9068 always valid. */
9069 || (! (regno >= FIRST_PSEUDO_REGISTER && reg_n_sets[regno] == 1)
9070 && reg_last_set_label[j] > tick))
9071 {
9072 if (replace)
9073 *loc = gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
9074 return replace;
9075 }
9076
9077 return 1;
9078 }
9079
9080 for (i = 0; i < len; i++)
9081 if ((fmt[i] == 'e'
9082 && get_last_value_validate (&XEXP (x, i), tick, replace) == 0)
9083 /* Don't bother with these. They shouldn't occur anyway. */
9084 || fmt[i] == 'E')
9085 return 0;
9086
9087 /* If we haven't found a reason for it to be invalid, it is valid. */
9088 return 1;
9089}
9090
9091/* Get the last value assigned to X, if known. Some registers
9092 in the value may be replaced with (clobber (const_int 0)) if their value
9093 is known longer known reliably. */
9094
9095static rtx
9096get_last_value (x)
9097 rtx x;
9098{
9099 int regno;
9100 rtx value;
9101
9102 /* If this is a non-paradoxical SUBREG, get the value of its operand and
9103 then convert it to the desired mode. If this is a paradoxical SUBREG,
9104 we cannot predict what values the "extra" bits might have. */
9105 if (GET_CODE (x) == SUBREG
9106 && subreg_lowpart_p (x)
9107 && (GET_MODE_SIZE (GET_MODE (x))
9108 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
9109 && (value = get_last_value (SUBREG_REG (x))) != 0)
9110 return gen_lowpart_for_combine (GET_MODE (x), value);
9111
9112 if (GET_CODE (x) != REG)
9113 return 0;
9114
9115 regno = REGNO (x);
9116 value = reg_last_set_value[regno];
9117
d0ab8cd3 9118 /* If we don't have a value or if it isn't for this basic block, return 0. */
230d793d
RS
9119
9120 if (value == 0
9121 || (reg_n_sets[regno] != 1
55310dad 9122 && reg_last_set_label[regno] != label_tick))
230d793d
RS
9123 return 0;
9124
d0ab8cd3 9125 /* If the value was set in a later insn that the ones we are processing,
4090a6b3
RK
9126 we can't use it even if the register was only set once, but make a quick
9127 check to see if the previous insn set it to something. This is commonly
9128 the case when the same pseudo is used by repeated insns. */
d0ab8cd3 9129
4090a6b3 9130 if (INSN_CUID (reg_last_set[regno]) >= subst_low_cuid)
d0ab8cd3
RK
9131 {
9132 rtx insn, set;
9133
2fc9c644 9134 for (insn = prev_nonnote_insn (subst_insn);
d0ab8cd3 9135 insn && INSN_CUID (insn) >= subst_low_cuid;
2fc9c644 9136 insn = prev_nonnote_insn (insn))
d0ab8cd3
RK
9137 ;
9138
9139 if (insn
9140 && (set = single_set (insn)) != 0
9141 && rtx_equal_p (SET_DEST (set), x))
9142 {
9143 value = SET_SRC (set);
9144
9145 /* Make sure that VALUE doesn't reference X. Replace any
9146 expliit references with a CLOBBER. If there are any remaining
9147 references (rare), don't use the value. */
9148
9149 if (reg_mentioned_p (x, value))
9150 value = replace_rtx (copy_rtx (value), x,
9151 gen_rtx (CLOBBER, GET_MODE (x), const0_rtx));
9152
9153 if (reg_overlap_mentioned_p (x, value))
9154 return 0;
9155 }
9156 else
9157 return 0;
9158 }
9159
9160 /* If the value has all its registers valid, return it. */
230d793d
RS
9161 if (get_last_value_validate (&value, reg_last_set_label[regno], 0))
9162 return value;
9163
9164 /* Otherwise, make a copy and replace any invalid register with
9165 (clobber (const_int 0)). If that fails for some reason, return 0. */
9166
9167 value = copy_rtx (value);
9168 if (get_last_value_validate (&value, reg_last_set_label[regno], 1))
9169 return value;
9170
9171 return 0;
9172}
9173\f
9174/* Return nonzero if expression X refers to a REG or to memory
9175 that is set in an instruction more recent than FROM_CUID. */
9176
9177static int
9178use_crosses_set_p (x, from_cuid)
9179 register rtx x;
9180 int from_cuid;
9181{
9182 register char *fmt;
9183 register int i;
9184 register enum rtx_code code = GET_CODE (x);
9185
9186 if (code == REG)
9187 {
9188 register int regno = REGNO (x);
9189#ifdef PUSH_ROUNDING
9190 /* Don't allow uses of the stack pointer to be moved,
9191 because we don't know whether the move crosses a push insn. */
9192 if (regno == STACK_POINTER_REGNUM)
9193 return 1;
9194#endif
9195 return (reg_last_set[regno]
9196 && INSN_CUID (reg_last_set[regno]) > from_cuid);
9197 }
9198
9199 if (code == MEM && mem_last_set > from_cuid)
9200 return 1;
9201
9202 fmt = GET_RTX_FORMAT (code);
9203
9204 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
9205 {
9206 if (fmt[i] == 'E')
9207 {
9208 register int j;
9209 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
9210 if (use_crosses_set_p (XVECEXP (x, i, j), from_cuid))
9211 return 1;
9212 }
9213 else if (fmt[i] == 'e'
9214 && use_crosses_set_p (XEXP (x, i), from_cuid))
9215 return 1;
9216 }
9217 return 0;
9218}
9219\f
9220/* Define three variables used for communication between the following
9221 routines. */
9222
9223static int reg_dead_regno, reg_dead_endregno;
9224static int reg_dead_flag;
9225
9226/* Function called via note_stores from reg_dead_at_p.
9227
9228 If DEST is within [reg_dead_rengno, reg_dead_endregno), set
9229 reg_dead_flag to 1 if X is a CLOBBER and to -1 it is a SET. */
9230
9231static void
9232reg_dead_at_p_1 (dest, x)
9233 rtx dest;
9234 rtx x;
9235{
9236 int regno, endregno;
9237
9238 if (GET_CODE (dest) != REG)
9239 return;
9240
9241 regno = REGNO (dest);
9242 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
9243 ? HARD_REGNO_NREGS (regno, GET_MODE (dest)) : 1);
9244
9245 if (reg_dead_endregno > regno && reg_dead_regno < endregno)
9246 reg_dead_flag = (GET_CODE (x) == CLOBBER) ? 1 : -1;
9247}
9248
9249/* Return non-zero if REG is known to be dead at INSN.
9250
9251 We scan backwards from INSN. If we hit a REG_DEAD note or a CLOBBER
9252 referencing REG, it is dead. If we hit a SET referencing REG, it is
9253 live. Otherwise, see if it is live or dead at the start of the basic
9254 block we are in. */
9255
9256static int
9257reg_dead_at_p (reg, insn)
9258 rtx reg;
9259 rtx insn;
9260{
9261 int block, i;
9262
9263 /* Set variables for reg_dead_at_p_1. */
9264 reg_dead_regno = REGNO (reg);
9265 reg_dead_endregno = reg_dead_regno + (reg_dead_regno < FIRST_PSEUDO_REGISTER
9266 ? HARD_REGNO_NREGS (reg_dead_regno,
9267 GET_MODE (reg))
9268 : 1);
9269
9270 reg_dead_flag = 0;
9271
9272 /* Scan backwards until we find a REG_DEAD note, SET, CLOBBER, label, or
9273 beginning of function. */
9274 for (; insn && GET_CODE (insn) != CODE_LABEL;
9275 insn = prev_nonnote_insn (insn))
9276 {
9277 note_stores (PATTERN (insn), reg_dead_at_p_1);
9278 if (reg_dead_flag)
9279 return reg_dead_flag == 1 ? 1 : 0;
9280
9281 if (find_regno_note (insn, REG_DEAD, reg_dead_regno))
9282 return 1;
9283 }
9284
9285 /* Get the basic block number that we were in. */
9286 if (insn == 0)
9287 block = 0;
9288 else
9289 {
9290 for (block = 0; block < n_basic_blocks; block++)
9291 if (insn == basic_block_head[block])
9292 break;
9293
9294 if (block == n_basic_blocks)
9295 return 0;
9296 }
9297
9298 for (i = reg_dead_regno; i < reg_dead_endregno; i++)
5f4f0e22
CH
9299 if (basic_block_live_at_start[block][i / REGSET_ELT_BITS]
9300 & ((REGSET_ELT_TYPE) 1 << (i % REGSET_ELT_BITS)))
230d793d
RS
9301 return 0;
9302
9303 return 1;
9304}
9305\f
9306/* Remove register number REGNO from the dead registers list of INSN.
9307
9308 Return the note used to record the death, if there was one. */
9309
9310rtx
9311remove_death (regno, insn)
9312 int regno;
9313 rtx insn;
9314{
9315 register rtx note = find_regno_note (insn, REG_DEAD, regno);
9316
9317 if (note)
1a26b032
RK
9318 {
9319 reg_n_deaths[regno]--;
9320 remove_note (insn, note);
9321 }
230d793d
RS
9322
9323 return note;
9324}
9325
9326/* For each register (hardware or pseudo) used within expression X, if its
9327 death is in an instruction with cuid between FROM_CUID (inclusive) and
9328 TO_INSN (exclusive), put a REG_DEAD note for that register in the
9329 list headed by PNOTES.
9330
9331 This is done when X is being merged by combination into TO_INSN. These
9332 notes will then be distributed as needed. */
9333
9334static void
9335move_deaths (x, from_cuid, to_insn, pnotes)
9336 rtx x;
9337 int from_cuid;
9338 rtx to_insn;
9339 rtx *pnotes;
9340{
9341 register char *fmt;
9342 register int len, i;
9343 register enum rtx_code code = GET_CODE (x);
9344
9345 if (code == REG)
9346 {
9347 register int regno = REGNO (x);
9348 register rtx where_dead = reg_last_death[regno];
9349
9350 if (where_dead && INSN_CUID (where_dead) >= from_cuid
9351 && INSN_CUID (where_dead) < INSN_CUID (to_insn))
9352 {
dbc131f3 9353 rtx note = remove_death (regno, where_dead);
230d793d
RS
9354
9355 /* It is possible for the call above to return 0. This can occur
9356 when reg_last_death points to I2 or I1 that we combined with.
dbc131f3
RK
9357 In that case make a new note.
9358
9359 We must also check for the case where X is a hard register
9360 and NOTE is a death note for a range of hard registers
9361 including X. In that case, we must put REG_DEAD notes for
9362 the remaining registers in place of NOTE. */
9363
9364 if (note != 0 && regno < FIRST_PSEUDO_REGISTER
9365 && (GET_MODE_SIZE (GET_MODE (XEXP (note, 0)))
9366 != GET_MODE_SIZE (GET_MODE (x))))
9367 {
9368 int deadregno = REGNO (XEXP (note, 0));
9369 int deadend
9370 = (deadregno + HARD_REGNO_NREGS (deadregno,
9371 GET_MODE (XEXP (note, 0))));
9372 int ourend = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
9373 int i;
9374
9375 for (i = deadregno; i < deadend; i++)
9376 if (i < regno || i >= ourend)
9377 REG_NOTES (where_dead)
9378 = gen_rtx (EXPR_LIST, REG_DEAD,
9379 gen_rtx (REG, word_mode, i),
9380 REG_NOTES (where_dead));
9381 }
230d793d 9382
dbc131f3 9383 if (note != 0 && GET_MODE (XEXP (note, 0)) == GET_MODE (x))
230d793d
RS
9384 {
9385 XEXP (note, 1) = *pnotes;
9386 *pnotes = note;
9387 }
9388 else
9389 *pnotes = gen_rtx (EXPR_LIST, REG_DEAD, x, *pnotes);
1a26b032
RK
9390
9391 reg_n_deaths[regno]++;
230d793d
RS
9392 }
9393
9394 return;
9395 }
9396
9397 else if (GET_CODE (x) == SET)
9398 {
9399 rtx dest = SET_DEST (x);
9400
9401 move_deaths (SET_SRC (x), from_cuid, to_insn, pnotes);
9402
a7c99304
RK
9403 /* In the case of a ZERO_EXTRACT, a STRICT_LOW_PART, or a SUBREG
9404 that accesses one word of a multi-word item, some
9405 piece of everything register in the expression is used by
9406 this insn, so remove any old death. */
9407
9408 if (GET_CODE (dest) == ZERO_EXTRACT
9409 || GET_CODE (dest) == STRICT_LOW_PART
9410 || (GET_CODE (dest) == SUBREG
9411 && (((GET_MODE_SIZE (GET_MODE (dest))
9412 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
9413 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
9414 + UNITS_PER_WORD - 1) / UNITS_PER_WORD))))
230d793d 9415 {
a7c99304
RK
9416 move_deaths (dest, from_cuid, to_insn, pnotes);
9417 return;
230d793d
RS
9418 }
9419
a7c99304
RK
9420 /* If this is some other SUBREG, we know it replaces the entire
9421 value, so use that as the destination. */
9422 if (GET_CODE (dest) == SUBREG)
9423 dest = SUBREG_REG (dest);
9424
9425 /* If this is a MEM, adjust deaths of anything used in the address.
9426 For a REG (the only other possibility), the entire value is
9427 being replaced so the old value is not used in this insn. */
230d793d
RS
9428
9429 if (GET_CODE (dest) == MEM)
9430 move_deaths (XEXP (dest, 0), from_cuid, to_insn, pnotes);
9431 return;
9432 }
9433
9434 else if (GET_CODE (x) == CLOBBER)
9435 return;
9436
9437 len = GET_RTX_LENGTH (code);
9438 fmt = GET_RTX_FORMAT (code);
9439
9440 for (i = 0; i < len; i++)
9441 {
9442 if (fmt[i] == 'E')
9443 {
9444 register int j;
9445 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
9446 move_deaths (XVECEXP (x, i, j), from_cuid, to_insn, pnotes);
9447 }
9448 else if (fmt[i] == 'e')
9449 move_deaths (XEXP (x, i), from_cuid, to_insn, pnotes);
9450 }
9451}
9452\f
a7c99304
RK
9453/* Return 1 if X is the target of a bit-field assignment in BODY, the
9454 pattern of an insn. X must be a REG. */
230d793d
RS
9455
9456static int
a7c99304
RK
9457reg_bitfield_target_p (x, body)
9458 rtx x;
230d793d
RS
9459 rtx body;
9460{
9461 int i;
9462
9463 if (GET_CODE (body) == SET)
a7c99304
RK
9464 {
9465 rtx dest = SET_DEST (body);
9466 rtx target;
9467 int regno, tregno, endregno, endtregno;
9468
9469 if (GET_CODE (dest) == ZERO_EXTRACT)
9470 target = XEXP (dest, 0);
9471 else if (GET_CODE (dest) == STRICT_LOW_PART)
9472 target = SUBREG_REG (XEXP (dest, 0));
9473 else
9474 return 0;
9475
9476 if (GET_CODE (target) == SUBREG)
9477 target = SUBREG_REG (target);
9478
9479 if (GET_CODE (target) != REG)
9480 return 0;
9481
9482 tregno = REGNO (target), regno = REGNO (x);
9483 if (tregno >= FIRST_PSEUDO_REGISTER || regno >= FIRST_PSEUDO_REGISTER)
9484 return target == x;
9485
9486 endtregno = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (target));
9487 endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
9488
9489 return endregno > tregno && regno < endtregno;
9490 }
230d793d
RS
9491
9492 else if (GET_CODE (body) == PARALLEL)
9493 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
a7c99304 9494 if (reg_bitfield_target_p (x, XVECEXP (body, 0, i)))
230d793d
RS
9495 return 1;
9496
9497 return 0;
9498}
9499\f
9500/* Given a chain of REG_NOTES originally from FROM_INSN, try to place them
9501 as appropriate. I3 and I2 are the insns resulting from the combination
9502 insns including FROM (I2 may be zero).
9503
9504 ELIM_I2 and ELIM_I1 are either zero or registers that we know will
9505 not need REG_DEAD notes because they are being substituted for. This
9506 saves searching in the most common cases.
9507
9508 Each note in the list is either ignored or placed on some insns, depending
9509 on the type of note. */
9510
9511static void
9512distribute_notes (notes, from_insn, i3, i2, elim_i2, elim_i1)
9513 rtx notes;
9514 rtx from_insn;
9515 rtx i3, i2;
9516 rtx elim_i2, elim_i1;
9517{
9518 rtx note, next_note;
9519 rtx tem;
9520
9521 for (note = notes; note; note = next_note)
9522 {
9523 rtx place = 0, place2 = 0;
9524
9525 /* If this NOTE references a pseudo register, ensure it references
9526 the latest copy of that register. */
9527 if (XEXP (note, 0) && GET_CODE (XEXP (note, 0)) == REG
9528 && REGNO (XEXP (note, 0)) >= FIRST_PSEUDO_REGISTER)
9529 XEXP (note, 0) = regno_reg_rtx[REGNO (XEXP (note, 0))];
9530
9531 next_note = XEXP (note, 1);
9532 switch (REG_NOTE_KIND (note))
9533 {
9534 case REG_UNUSED:
9535 /* If this register is set or clobbered in I3, put the note there
9536 unless there is one already. */
9537 if (reg_set_p (XEXP (note, 0), PATTERN (i3)))
9538 {
9539 if (! (GET_CODE (XEXP (note, 0)) == REG
9540 ? find_regno_note (i3, REG_UNUSED, REGNO (XEXP (note, 0)))
9541 : find_reg_note (i3, REG_UNUSED, XEXP (note, 0))))
9542 place = i3;
9543 }
9544 /* Otherwise, if this register is used by I3, then this register
9545 now dies here, so we must put a REG_DEAD note here unless there
9546 is one already. */
9547 else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3))
9548 && ! (GET_CODE (XEXP (note, 0)) == REG
9549 ? find_regno_note (i3, REG_DEAD, REGNO (XEXP (note, 0)))
9550 : find_reg_note (i3, REG_DEAD, XEXP (note, 0))))
9551 {
9552 PUT_REG_NOTE_KIND (note, REG_DEAD);
9553 place = i3;
9554 }
9555 break;
9556
9557 case REG_EQUAL:
9558 case REG_EQUIV:
9559 case REG_NONNEG:
9560 /* These notes say something about results of an insn. We can
9561 only support them if they used to be on I3 in which case they
a687e897
RK
9562 remain on I3. Otherwise they are ignored.
9563
9564 If the note refers to an expression that is not a constant, we
9565 must also ignore the note since we cannot tell whether the
9566 equivalence is still true. It might be possible to do
9567 slightly better than this (we only have a problem if I2DEST
9568 or I1DEST is present in the expression), but it doesn't
9569 seem worth the trouble. */
9570
9571 if (from_insn == i3
9572 && (XEXP (note, 0) == 0 || CONSTANT_P (XEXP (note, 0))))
230d793d
RS
9573 place = i3;
9574 break;
9575
9576 case REG_INC:
9577 case REG_NO_CONFLICT:
9578 case REG_LABEL:
9579 /* These notes say something about how a register is used. They must
9580 be present on any use of the register in I2 or I3. */
9581 if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3)))
9582 place = i3;
9583
9584 if (i2 && reg_mentioned_p (XEXP (note, 0), PATTERN (i2)))
9585 {
9586 if (place)
9587 place2 = i2;
9588 else
9589 place = i2;
9590 }
9591 break;
9592
9593 case REG_WAS_0:
9594 /* It is too much trouble to try to see if this note is still
9595 correct in all situations. It is better to simply delete it. */
9596 break;
9597
9598 case REG_RETVAL:
9599 /* If the insn previously containing this note still exists,
9600 put it back where it was. Otherwise move it to the previous
9601 insn. Adjust the corresponding REG_LIBCALL note. */
9602 if (GET_CODE (from_insn) != NOTE)
9603 place = from_insn;
9604 else
9605 {
5f4f0e22 9606 tem = find_reg_note (XEXP (note, 0), REG_LIBCALL, NULL_RTX);
230d793d
RS
9607 place = prev_real_insn (from_insn);
9608 if (tem && place)
9609 XEXP (tem, 0) = place;
9610 }
9611 break;
9612
9613 case REG_LIBCALL:
9614 /* This is handled similarly to REG_RETVAL. */
9615 if (GET_CODE (from_insn) != NOTE)
9616 place = from_insn;
9617 else
9618 {
5f4f0e22 9619 tem = find_reg_note (XEXP (note, 0), REG_RETVAL, NULL_RTX);
230d793d
RS
9620 place = next_real_insn (from_insn);
9621 if (tem && place)
9622 XEXP (tem, 0) = place;
9623 }
9624 break;
9625
9626 case REG_DEAD:
9627 /* If the register is used as an input in I3, it dies there.
9628 Similarly for I2, if it is non-zero and adjacent to I3.
9629
9630 If the register is not used as an input in either I3 or I2
9631 and it is not one of the registers we were supposed to eliminate,
9632 there are two possibilities. We might have a non-adjacent I2
9633 or we might have somehow eliminated an additional register
9634 from a computation. For example, we might have had A & B where
9635 we discover that B will always be zero. In this case we will
9636 eliminate the reference to A.
9637
9638 In both cases, we must search to see if we can find a previous
9639 use of A and put the death note there. */
9640
9641 if (reg_referenced_p (XEXP (note, 0), PATTERN (i3)))
9642 place = i3;
9643 else if (i2 != 0 && next_nonnote_insn (i2) == i3
9644 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
9645 place = i2;
9646
9647 if (XEXP (note, 0) == elim_i2 || XEXP (note, 0) == elim_i1)
9648 break;
9649
510dd77e
RK
9650 /* If the register is used in both I2 and I3 and it dies in I3,
9651 we might have added another reference to it. If reg_n_refs
9652 was 2, bump it to 3. This has to be correct since the
9653 register must have been set somewhere. The reason this is
9654 done is because local-alloc.c treats 2 references as a
9655 special case. */
9656
9657 if (place == i3 && i2 != 0 && GET_CODE (XEXP (note, 0)) == REG
9658 && reg_n_refs[REGNO (XEXP (note, 0))]== 2
9659 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
9660 reg_n_refs[REGNO (XEXP (note, 0))] = 3;
9661
230d793d
RS
9662 if (place == 0)
9663 for (tem = prev_nonnote_insn (i3);
9664 tem && (GET_CODE (tem) == INSN
9665 || GET_CODE (tem) == CALL_INSN);
9666 tem = prev_nonnote_insn (tem))
9667 {
9668 /* If the register is being set at TEM, see if that is all
9669 TEM is doing. If so, delete TEM. Otherwise, make this
9670 into a REG_UNUSED note instead. */
9671 if (reg_set_p (XEXP (note, 0), PATTERN (tem)))
9672 {
9673 rtx set = single_set (tem);
9674
5089e22e
RS
9675 /* Verify that it was the set, and not a clobber that
9676 modified the register. */
9677
9678 if (set != 0 && ! side_effects_p (SET_SRC (set))
9679 && rtx_equal_p (XEXP (note, 0), SET_DEST (set)))
230d793d
RS
9680 {
9681 /* Move the notes and links of TEM elsewhere.
9682 This might delete other dead insns recursively.
9683 First set the pattern to something that won't use
9684 any register. */
9685
9686 PATTERN (tem) = pc_rtx;
9687
5f4f0e22
CH
9688 distribute_notes (REG_NOTES (tem), tem, tem,
9689 NULL_RTX, NULL_RTX, NULL_RTX);
230d793d
RS
9690 distribute_links (LOG_LINKS (tem));
9691
9692 PUT_CODE (tem, NOTE);
9693 NOTE_LINE_NUMBER (tem) = NOTE_INSN_DELETED;
9694 NOTE_SOURCE_FILE (tem) = 0;
9695 }
9696 else
9697 {
9698 PUT_REG_NOTE_KIND (note, REG_UNUSED);
9699
9700 /* If there isn't already a REG_UNUSED note, put one
9701 here. */
9702 if (! find_regno_note (tem, REG_UNUSED,
9703 REGNO (XEXP (note, 0))))
9704 place = tem;
9705 break;
9706 }
9707 }
9708 else if (reg_referenced_p (XEXP (note, 0), PATTERN (tem)))
9709 {
9710 place = tem;
9711 break;
9712 }
9713 }
9714
9715 /* If the register is set or already dead at PLACE, we needn't do
9716 anything with this note if it is still a REG_DEAD note.
9717
9718 Note that we cannot use just `dead_or_set_p' here since we can
9719 convert an assignment to a register into a bit-field assignment.
9720 Therefore, we must also omit the note if the register is the
9721 target of a bitfield assignment. */
9722
9723 if (place && REG_NOTE_KIND (note) == REG_DEAD)
9724 {
9725 int regno = REGNO (XEXP (note, 0));
9726
9727 if (dead_or_set_p (place, XEXP (note, 0))
9728 || reg_bitfield_target_p (XEXP (note, 0), PATTERN (place)))
9729 {
9730 /* Unless the register previously died in PLACE, clear
9731 reg_last_death. [I no longer understand why this is
9732 being done.] */
9733 if (reg_last_death[regno] != place)
9734 reg_last_death[regno] = 0;
9735 place = 0;
9736 }
9737 else
9738 reg_last_death[regno] = place;
9739
9740 /* If this is a death note for a hard reg that is occupying
9741 multiple registers, ensure that we are still using all
9742 parts of the object. If we find a piece of the object
9743 that is unused, we must add a USE for that piece before
9744 PLACE and put the appropriate REG_DEAD note on it.
9745
9746 An alternative would be to put a REG_UNUSED for the pieces
9747 on the insn that set the register, but that can't be done if
9748 it is not in the same block. It is simpler, though less
9749 efficient, to add the USE insns. */
9750
9751 if (place && regno < FIRST_PSEUDO_REGISTER
9752 && HARD_REGNO_NREGS (regno, GET_MODE (XEXP (note, 0))) > 1)
9753 {
9754 int endregno
9755 = regno + HARD_REGNO_NREGS (regno,
9756 GET_MODE (XEXP (note, 0)));
9757 int all_used = 1;
9758 int i;
9759
9760 for (i = regno; i < endregno; i++)
9761 if (! refers_to_regno_p (i, i + 1, PATTERN (place), 0))
9762 {
9763 rtx piece = gen_rtx (REG, word_mode, i);
28f6d3af
RK
9764 rtx p;
9765
9766 /* See if we already placed a USE note for this
9767 register in front of PLACE. */
9768 for (p = place;
9769 GET_CODE (PREV_INSN (p)) == INSN
9770 && GET_CODE (PATTERN (PREV_INSN (p))) == USE;
9771 p = PREV_INSN (p))
9772 if (rtx_equal_p (piece,
9773 XEXP (PATTERN (PREV_INSN (p)), 0)))
9774 {
9775 p = 0;
9776 break;
9777 }
9778
9779 if (p)
9780 {
9781 rtx use_insn
9782 = emit_insn_before (gen_rtx (USE, VOIDmode,
9783 piece),
9784 p);
9785 REG_NOTES (use_insn)
9786 = gen_rtx (EXPR_LIST, REG_DEAD, piece,
9787 REG_NOTES (use_insn));
9788 }
230d793d 9789
5089e22e 9790 all_used = 0;
230d793d
RS
9791 }
9792
a394b17b
JW
9793 /* Check for the case where the register dying partially
9794 overlaps the register set by this insn. */
9795 if (all_used)
9796 for (i = regno; i < endregno; i++)
9797 if (dead_or_set_regno_p (place, i))
9798 {
9799 all_used = 0;
9800 break;
9801 }
9802
230d793d
RS
9803 if (! all_used)
9804 {
9805 /* Put only REG_DEAD notes for pieces that are
9806 still used and that are not already dead or set. */
9807
9808 for (i = regno; i < endregno; i++)
9809 {
9810 rtx piece = gen_rtx (REG, word_mode, i);
9811
9812 if (reg_referenced_p (piece, PATTERN (place))
9813 && ! dead_or_set_p (place, piece)
9814 && ! reg_bitfield_target_p (piece,
9815 PATTERN (place)))
9816 REG_NOTES (place) = gen_rtx (EXPR_LIST, REG_DEAD,
9817 piece,
9818 REG_NOTES (place));
9819 }
9820
9821 place = 0;
9822 }
9823 }
9824 }
9825 break;
9826
9827 default:
9828 /* Any other notes should not be present at this point in the
9829 compilation. */
9830 abort ();
9831 }
9832
9833 if (place)
9834 {
9835 XEXP (note, 1) = REG_NOTES (place);
9836 REG_NOTES (place) = note;
9837 }
1a26b032
RK
9838 else if ((REG_NOTE_KIND (note) == REG_DEAD
9839 || REG_NOTE_KIND (note) == REG_UNUSED)
9840 && GET_CODE (XEXP (note, 0)) == REG)
9841 reg_n_deaths[REGNO (XEXP (note, 0))]--;
230d793d
RS
9842
9843 if (place2)
1a26b032
RK
9844 {
9845 if ((REG_NOTE_KIND (note) == REG_DEAD
9846 || REG_NOTE_KIND (note) == REG_UNUSED)
9847 && GET_CODE (XEXP (note, 0)) == REG)
9848 reg_n_deaths[REGNO (XEXP (note, 0))]++;
9849
9850 REG_NOTES (place2) = gen_rtx (GET_CODE (note), REG_NOTE_KIND (note),
9851 XEXP (note, 0), REG_NOTES (place2));
9852 }
230d793d
RS
9853 }
9854}
9855\f
9856/* Similarly to above, distribute the LOG_LINKS that used to be present on
5089e22e
RS
9857 I3, I2, and I1 to new locations. This is also called in one case to
9858 add a link pointing at I3 when I3's destination is changed. */
230d793d
RS
9859
9860static void
9861distribute_links (links)
9862 rtx links;
9863{
9864 rtx link, next_link;
9865
9866 for (link = links; link; link = next_link)
9867 {
9868 rtx place = 0;
9869 rtx insn;
9870 rtx set, reg;
9871
9872 next_link = XEXP (link, 1);
9873
9874 /* If the insn that this link points to is a NOTE or isn't a single
9875 set, ignore it. In the latter case, it isn't clear what we
9876 can do other than ignore the link, since we can't tell which
9877 register it was for. Such links wouldn't be used by combine
9878 anyway.
9879
9880 It is not possible for the destination of the target of the link to
9881 have been changed by combine. The only potential of this is if we
9882 replace I3, I2, and I1 by I3 and I2. But in that case the
9883 destination of I2 also remains unchanged. */
9884
9885 if (GET_CODE (XEXP (link, 0)) == NOTE
9886 || (set = single_set (XEXP (link, 0))) == 0)
9887 continue;
9888
9889 reg = SET_DEST (set);
9890 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
9891 || GET_CODE (reg) == SIGN_EXTRACT
9892 || GET_CODE (reg) == STRICT_LOW_PART)
9893 reg = XEXP (reg, 0);
9894
9895 /* A LOG_LINK is defined as being placed on the first insn that uses
9896 a register and points to the insn that sets the register. Start
9897 searching at the next insn after the target of the link and stop
9898 when we reach a set of the register or the end of the basic block.
9899
9900 Note that this correctly handles the link that used to point from
5089e22e 9901 I3 to I2. Also note that not much searching is typically done here
230d793d
RS
9902 since most links don't point very far away. */
9903
9904 for (insn = NEXT_INSN (XEXP (link, 0));
9905 (insn && GET_CODE (insn) != CODE_LABEL
9906 && GET_CODE (PREV_INSN (insn)) != JUMP_INSN);
9907 insn = NEXT_INSN (insn))
9908 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
9909 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
9910 {
9911 if (reg_referenced_p (reg, PATTERN (insn)))
9912 place = insn;
9913 break;
9914 }
9915
9916 /* If we found a place to put the link, place it there unless there
9917 is already a link to the same insn as LINK at that point. */
9918
9919 if (place)
9920 {
9921 rtx link2;
9922
9923 for (link2 = LOG_LINKS (place); link2; link2 = XEXP (link2, 1))
9924 if (XEXP (link2, 0) == XEXP (link, 0))
9925 break;
9926
9927 if (link2 == 0)
9928 {
9929 XEXP (link, 1) = LOG_LINKS (place);
9930 LOG_LINKS (place) = link;
9931 }
9932 }
9933 }
9934}
9935\f
9936void
9937dump_combine_stats (file)
9938 FILE *file;
9939{
9940 fprintf
9941 (file,
9942 ";; Combiner statistics: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n\n",
9943 combine_attempts, combine_merges, combine_extras, combine_successes);
9944}
9945
9946void
9947dump_combine_total_stats (file)
9948 FILE *file;
9949{
9950 fprintf
9951 (file,
9952 "\n;; Combiner totals: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n",
9953 total_attempts, total_merges, total_extras, total_successes);
9954}
This page took 1.280439 seconds and 5 git commands to generate.