]> gcc.gnu.org Git - gcc.git/blame - gcc/combine.c
(num_sign_bit_copies): Ifndef WORD_REGISTER_OPERATIONS, ensure we
[gcc.git] / gcc / combine.c
CommitLineData
230d793d 1/* Optimize by combining instructions for GNU compiler.
0c314d1a 2 Copyright (C) 1987, 1988, 1992, 1993, 1994 Free Software Foundation, Inc.
230d793d
RS
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
18the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21/* This module is essentially the "combiner" phase of the U. of Arizona
22 Portable Optimizer, but redone to work on our list-structured
23 representation for RTL instead of their string representation.
24
25 The LOG_LINKS of each insn identify the most recent assignment
26 to each REG used in the insn. It is a list of previous insns,
27 each of which contains a SET for a REG that is used in this insn
28 and not used or set in between. LOG_LINKs never cross basic blocks.
29 They were set up by the preceding pass (lifetime analysis).
30
31 We try to combine each pair of insns joined by a logical link.
32 We also try to combine triples of insns A, B and C when
33 C has a link back to B and B has a link back to A.
34
35 LOG_LINKS does not have links for use of the CC0. They don't
36 need to, because the insn that sets the CC0 is always immediately
37 before the insn that tests it. So we always regard a branch
38 insn as having a logical link to the preceding insn. The same is true
39 for an insn explicitly using CC0.
40
41 We check (with use_crosses_set_p) to avoid combining in such a way
42 as to move a computation to a place where its value would be different.
43
44 Combination is done by mathematically substituting the previous
45 insn(s) values for the regs they set into the expressions in
46 the later insns that refer to these regs. If the result is a valid insn
47 for our target machine, according to the machine description,
48 we install it, delete the earlier insns, and update the data flow
49 information (LOG_LINKS and REG_NOTES) for what we did.
50
51 There are a few exceptions where the dataflow information created by
52 flow.c aren't completely updated:
53
54 - reg_live_length is not updated
55 - reg_n_refs is not adjusted in the rare case when a register is
56 no longer required in a computation
57 - there are extremely rare cases (see distribute_regnotes) when a
58 REG_DEAD note is lost
59 - a LOG_LINKS entry that refers to an insn with multiple SETs may be
60 removed because there is no way to know which register it was
61 linking
62
63 To simplify substitution, we combine only when the earlier insn(s)
64 consist of only a single assignment. To simplify updating afterward,
65 we never combine when a subroutine call appears in the middle.
66
67 Since we do not represent assignments to CC0 explicitly except when that
68 is all an insn does, there is no LOG_LINKS entry in an insn that uses
69 the condition code for the insn that set the condition code.
70 Fortunately, these two insns must be consecutive.
71 Therefore, every JUMP_INSN is taken to have an implicit logical link
72 to the preceding insn. This is not quite right, since non-jumps can
73 also use the condition code; but in practice such insns would not
74 combine anyway. */
75
230d793d 76#include "config.h"
dfa3449b
RS
77#include "gvarargs.h"
78
9c3b4c8b
RS
79/* Must precede rtl.h for FFS. */
80#include <stdio.h>
81
230d793d
RS
82#include "rtl.h"
83#include "flags.h"
84#include "regs.h"
55310dad 85#include "hard-reg-set.h"
230d793d
RS
86#include "expr.h"
87#include "basic-block.h"
88#include "insn-config.h"
89#include "insn-flags.h"
90#include "insn-codes.h"
91#include "insn-attr.h"
92#include "recog.h"
93#include "real.h"
94
95/* It is not safe to use ordinary gen_lowpart in combine.
96 Use gen_lowpart_for_combine instead. See comments there. */
97#define gen_lowpart dont_use_gen_lowpart_you_dummy
98
99/* Number of attempts to combine instructions in this function. */
100
101static int combine_attempts;
102
103/* Number of attempts that got as far as substitution in this function. */
104
105static int combine_merges;
106
107/* Number of instructions combined with added SETs in this function. */
108
109static int combine_extras;
110
111/* Number of instructions combined in this function. */
112
113static int combine_successes;
114
115/* Totals over entire compilation. */
116
117static int total_attempts, total_merges, total_extras, total_successes;
118\f
119/* Vector mapping INSN_UIDs to cuids.
5089e22e 120 The cuids are like uids but increase monotonically always.
230d793d
RS
121 Combine always uses cuids so that it can compare them.
122 But actually renumbering the uids, which we used to do,
123 proves to be a bad idea because it makes it hard to compare
124 the dumps produced by earlier passes with those from later passes. */
125
126static int *uid_cuid;
127
128/* Get the cuid of an insn. */
129
130#define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
131
132/* Maximum register number, which is the size of the tables below. */
133
134static int combine_max_regno;
135
136/* Record last point of death of (hard or pseudo) register n. */
137
138static rtx *reg_last_death;
139
140/* Record last point of modification of (hard or pseudo) register n. */
141
142static rtx *reg_last_set;
143
144/* Record the cuid of the last insn that invalidated memory
145 (anything that writes memory, and subroutine calls, but not pushes). */
146
147static int mem_last_set;
148
149/* Record the cuid of the last CALL_INSN
150 so we can tell whether a potential combination crosses any calls. */
151
152static int last_call_cuid;
153
154/* When `subst' is called, this is the insn that is being modified
155 (by combining in a previous insn). The PATTERN of this insn
156 is still the old pattern partially modified and it should not be
157 looked at, but this may be used to examine the successors of the insn
158 to judge whether a simplification is valid. */
159
160static rtx subst_insn;
161
137e889e
RK
162/* If nonzero, this is the insn that should be presumed to be
163 immediately in front of `subst_insn'. */
164
165static rtx subst_prev_insn;
166
230d793d
RS
167/* This is the lowest CUID that `subst' is currently dealing with.
168 get_last_value will not return a value if the register was set at or
169 after this CUID. If not for this mechanism, we could get confused if
170 I2 or I1 in try_combine were an insn that used the old value of a register
171 to obtain a new value. In that case, we might erroneously get the
172 new value of the register when we wanted the old one. */
173
174static int subst_low_cuid;
175
abe6e52f
RK
176/* This is an insn to which a LOG_LINKS entry has been added. If this
177 insn is the earlier than I2 or I3, combine should rescan starting at
178 that location. */
179
180static rtx added_links_insn;
181
230d793d
RS
182/* This is the value of undobuf.num_undo when we started processing this
183 substitution. This will prevent gen_rtx_combine from re-used a piece
184 from the previous expression. Doing so can produce circular rtl
185 structures. */
186
187static int previous_num_undos;
ca5c3ef4 188
0d4d42c3
RK
189/* Basic block number of the block in which we are performing combines. */
190static int this_basic_block;
230d793d
RS
191\f
192/* The next group of arrays allows the recording of the last value assigned
193 to (hard or pseudo) register n. We use this information to see if a
5089e22e 194 operation being processed is redundant given a prior operation performed
230d793d
RS
195 on the register. For example, an `and' with a constant is redundant if
196 all the zero bits are already known to be turned off.
197
198 We use an approach similar to that used by cse, but change it in the
199 following ways:
200
201 (1) We do not want to reinitialize at each label.
202 (2) It is useful, but not critical, to know the actual value assigned
203 to a register. Often just its form is helpful.
204
205 Therefore, we maintain the following arrays:
206
207 reg_last_set_value the last value assigned
208 reg_last_set_label records the value of label_tick when the
209 register was assigned
210 reg_last_set_table_tick records the value of label_tick when a
211 value using the register is assigned
212 reg_last_set_invalid set to non-zero when it is not valid
213 to use the value of this register in some
214 register's value
215
216 To understand the usage of these tables, it is important to understand
217 the distinction between the value in reg_last_set_value being valid
218 and the register being validly contained in some other expression in the
219 table.
220
221 Entry I in reg_last_set_value is valid if it is non-zero, and either
222 reg_n_sets[i] is 1 or reg_last_set_label[i] == label_tick.
223
224 Register I may validly appear in any expression returned for the value
225 of another register if reg_n_sets[i] is 1. It may also appear in the
226 value for register J if reg_last_set_label[i] < reg_last_set_label[j] or
227 reg_last_set_invalid[j] is zero.
228
229 If an expression is found in the table containing a register which may
230 not validly appear in an expression, the register is replaced by
231 something that won't match, (clobber (const_int 0)).
232
233 reg_last_set_invalid[i] is set non-zero when register I is being assigned
234 to and reg_last_set_table_tick[i] == label_tick. */
235
236/* Record last value assigned to (hard or pseudo) register n. */
237
238static rtx *reg_last_set_value;
239
240/* Record the value of label_tick when the value for register n is placed in
241 reg_last_set_value[n]. */
242
568356af 243static int *reg_last_set_label;
230d793d
RS
244
245/* Record the value of label_tick when an expression involving register n
246 is placed in reg_last_set_value. */
247
568356af 248static int *reg_last_set_table_tick;
230d793d
RS
249
250/* Set non-zero if references to register n in expressions should not be
251 used. */
252
253static char *reg_last_set_invalid;
254
255/* Incremented for each label. */
256
568356af 257static int label_tick;
230d793d
RS
258
259/* Some registers that are set more than once and used in more than one
260 basic block are nevertheless always set in similar ways. For example,
261 a QImode register may be loaded from memory in two places on a machine
262 where byte loads zero extend.
263
951553af 264 We record in the following array what we know about the nonzero
230d793d
RS
265 bits of a register, specifically which bits are known to be zero.
266
267 If an entry is zero, it means that we don't know anything special. */
268
55310dad 269static unsigned HOST_WIDE_INT *reg_nonzero_bits;
230d793d 270
951553af 271/* Mode used to compute significance in reg_nonzero_bits. It is the largest
5f4f0e22 272 integer mode that can fit in HOST_BITS_PER_WIDE_INT. */
230d793d 273
951553af 274static enum machine_mode nonzero_bits_mode;
230d793d 275
d0ab8cd3
RK
276/* Nonzero if we know that a register has some leading bits that are always
277 equal to the sign bit. */
278
279static char *reg_sign_bit_copies;
280
951553af 281/* Nonzero when reg_nonzero_bits and reg_sign_bit_copies can be safely used.
1a26b032
RK
282 It is zero while computing them and after combine has completed. This
283 former test prevents propagating values based on previously set values,
284 which can be incorrect if a variable is modified in a loop. */
230d793d 285
951553af 286static int nonzero_sign_valid;
55310dad
RK
287
288/* These arrays are maintained in parallel with reg_last_set_value
289 and are used to store the mode in which the register was last set,
290 the bits that were known to be zero when it was last set, and the
291 number of sign bits copies it was known to have when it was last set. */
292
293static enum machine_mode *reg_last_set_mode;
294static unsigned HOST_WIDE_INT *reg_last_set_nonzero_bits;
295static char *reg_last_set_sign_bit_copies;
230d793d
RS
296\f
297/* Record one modification to rtl structure
298 to be undone by storing old_contents into *where.
299 is_int is 1 if the contents are an int. */
300
301struct undo
302{
230d793d 303 int is_int;
f5393ab9
RS
304 union {rtx r; int i;} old_contents;
305 union {rtx *r; int *i;} where;
230d793d
RS
306};
307
308/* Record a bunch of changes to be undone, up to MAX_UNDO of them.
309 num_undo says how many are currently recorded.
310
311 storage is nonzero if we must undo the allocation of new storage.
312 The value of storage is what to pass to obfree.
313
314 other_insn is nonzero if we have modified some other insn in the process
315 of working on subst_insn. It must be verified too. */
316
317#define MAX_UNDO 50
318
319struct undobuf
320{
321 int num_undo;
322 char *storage;
323 struct undo undo[MAX_UNDO];
324 rtx other_insn;
325};
326
327static struct undobuf undobuf;
328
cc876596 329/* Substitute NEWVAL, an rtx expression, into INTO, a place in some
230d793d 330 insn. The substitution can be undone by undo_all. If INTO is already
cc876596
RK
331 set to NEWVAL, do not record this change. Because computing NEWVAL might
332 also call SUBST, we have to compute it before we put anything into
333 the undo table. */
230d793d
RS
334
335#define SUBST(INTO, NEWVAL) \
cc876596
RK
336 do { rtx _new = (NEWVAL); \
337 if (undobuf.num_undo < MAX_UNDO) \
230d793d 338 { \
230d793d 339 undobuf.undo[undobuf.num_undo].is_int = 0; \
f5393ab9
RS
340 undobuf.undo[undobuf.num_undo].where.r = &INTO; \
341 undobuf.undo[undobuf.num_undo].old_contents.r = INTO; \
cc876596 342 INTO = _new; \
f5393ab9 343 if (undobuf.undo[undobuf.num_undo].old_contents.r != INTO) \
230d793d
RS
344 undobuf.num_undo++; \
345 } \
346 } while (0)
347
348/* Similar to SUBST, but NEWVAL is an int. INTO will normally be an XINT
349 expression.
350 Note that substitution for the value of a CONST_INT is not safe. */
351
352#define SUBST_INT(INTO, NEWVAL) \
353 do { if (undobuf.num_undo < MAX_UNDO) \
354{ \
7c046e4e
RK
355 undobuf.undo[undobuf.num_undo].is_int = 1; \
356 undobuf.undo[undobuf.num_undo].where.i = (int *) &INTO; \
357 undobuf.undo[undobuf.num_undo].old_contents.i = INTO; \
230d793d 358 INTO = NEWVAL; \
7c046e4e 359 if (undobuf.undo[undobuf.num_undo].old_contents.i != INTO) \
230d793d
RS
360 undobuf.num_undo++; \
361 } \
362 } while (0)
363
364/* Number of times the pseudo being substituted for
365 was found and replaced. */
366
367static int n_occurrences;
368
ef026f91 369static void init_reg_last_arrays PROTO(());
fe2db4fb
RK
370static void setup_incoming_promotions PROTO(());
371static void set_nonzero_bits_and_sign_copies PROTO((rtx, rtx));
372static int can_combine_p PROTO((rtx, rtx, rtx, rtx, rtx *, rtx *));
373static int combinable_i3pat PROTO((rtx, rtx *, rtx, rtx, int, rtx *));
374static rtx try_combine PROTO((rtx, rtx, rtx));
375static void undo_all PROTO((void));
376static rtx *find_split_point PROTO((rtx *, rtx));
377static rtx subst PROTO((rtx, rtx, rtx, int, int));
378static rtx expand_compound_operation PROTO((rtx));
379static rtx expand_field_assignment PROTO((rtx));
380static rtx make_extraction PROTO((enum machine_mode, rtx, int, rtx, int,
381 int, int, int));
382static rtx make_compound_operation PROTO((rtx, enum rtx_code));
383static int get_pos_from_mask PROTO((unsigned HOST_WIDE_INT, int *));
6139ff20 384static rtx force_to_mode PROTO((rtx, enum machine_mode,
e3d616e3 385 unsigned HOST_WIDE_INT, rtx, int));
abe6e52f 386static rtx if_then_else_cond PROTO((rtx, rtx *, rtx *));
fe2db4fb
RK
387static rtx known_cond PROTO((rtx, enum rtx_code, rtx, rtx));
388static rtx make_field_assignment PROTO((rtx));
389static rtx apply_distributive_law PROTO((rtx));
390static rtx simplify_and_const_int PROTO((rtx, enum machine_mode, rtx,
391 unsigned HOST_WIDE_INT));
392static unsigned HOST_WIDE_INT nonzero_bits PROTO((rtx, enum machine_mode));
393static int num_sign_bit_copies PROTO((rtx, enum machine_mode));
394static int merge_outer_ops PROTO((enum rtx_code *, HOST_WIDE_INT *,
395 enum rtx_code, HOST_WIDE_INT,
396 enum machine_mode, int *));
397static rtx simplify_shift_const PROTO((rtx, enum rtx_code, enum machine_mode,
398 rtx, int));
399static int recog_for_combine PROTO((rtx *, rtx, rtx *));
400static rtx gen_lowpart_for_combine PROTO((enum machine_mode, rtx));
401static rtx gen_rtx_combine (); /* This is varargs. */
402static rtx gen_binary PROTO((enum rtx_code, enum machine_mode,
403 rtx, rtx));
404static rtx gen_unary PROTO((enum rtx_code, enum machine_mode, rtx));
405static enum rtx_code simplify_comparison PROTO((enum rtx_code, rtx *, rtx *));
406static int reversible_comparison_p PROTO((rtx));
407static void update_table_tick PROTO((rtx));
408static void record_value_for_reg PROTO((rtx, rtx, rtx));
409static void record_dead_and_set_regs_1 PROTO((rtx, rtx));
410static void record_dead_and_set_regs PROTO((rtx));
411static int get_last_value_validate PROTO((rtx *, int, int));
412static rtx get_last_value PROTO((rtx));
413static int use_crosses_set_p PROTO((rtx, int));
414static void reg_dead_at_p_1 PROTO((rtx, rtx));
415static int reg_dead_at_p PROTO((rtx, rtx));
416static void move_deaths PROTO((rtx, int, rtx, rtx *));
417static int reg_bitfield_target_p PROTO((rtx, rtx));
418static void distribute_notes PROTO((rtx, rtx, rtx, rtx, rtx, rtx));
419static void distribute_links PROTO((rtx));
230d793d
RS
420\f
421/* Main entry point for combiner. F is the first insn of the function.
422 NREGS is the first unused pseudo-reg number. */
423
424void
425combine_instructions (f, nregs)
426 rtx f;
427 int nregs;
428{
429 register rtx insn, next, prev;
430 register int i;
431 register rtx links, nextlinks;
432
433 combine_attempts = 0;
434 combine_merges = 0;
435 combine_extras = 0;
436 combine_successes = 0;
bef9925b 437 undobuf.num_undo = previous_num_undos = 0;
230d793d
RS
438
439 combine_max_regno = nregs;
440
ef026f91
RS
441 reg_nonzero_bits
442 = (unsigned HOST_WIDE_INT *) alloca (nregs * sizeof (HOST_WIDE_INT));
443 reg_sign_bit_copies = (char *) alloca (nregs * sizeof (char));
444
445 bzero (reg_nonzero_bits, nregs * sizeof (HOST_WIDE_INT));
446 bzero (reg_sign_bit_copies, nregs * sizeof (char));
447
230d793d
RS
448 reg_last_death = (rtx *) alloca (nregs * sizeof (rtx));
449 reg_last_set = (rtx *) alloca (nregs * sizeof (rtx));
450 reg_last_set_value = (rtx *) alloca (nregs * sizeof (rtx));
568356af
RK
451 reg_last_set_table_tick = (int *) alloca (nregs * sizeof (int));
452 reg_last_set_label = (int *) alloca (nregs * sizeof (int));
5f4f0e22 453 reg_last_set_invalid = (char *) alloca (nregs * sizeof (char));
55310dad
RK
454 reg_last_set_mode
455 = (enum machine_mode *) alloca (nregs * sizeof (enum machine_mode));
456 reg_last_set_nonzero_bits
457 = (unsigned HOST_WIDE_INT *) alloca (nregs * sizeof (HOST_WIDE_INT));
458 reg_last_set_sign_bit_copies
459 = (char *) alloca (nregs * sizeof (char));
460
ef026f91 461 init_reg_last_arrays ();
230d793d
RS
462
463 init_recog_no_volatile ();
464
465 /* Compute maximum uid value so uid_cuid can be allocated. */
466
467 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
468 if (INSN_UID (insn) > i)
469 i = INSN_UID (insn);
470
471 uid_cuid = (int *) alloca ((i + 1) * sizeof (int));
472
951553af 473 nonzero_bits_mode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
230d793d 474
951553af 475 /* Don't use reg_nonzero_bits when computing it. This can cause problems
230d793d
RS
476 when, for example, we have j <<= 1 in a loop. */
477
951553af 478 nonzero_sign_valid = 0;
230d793d
RS
479
480 /* Compute the mapping from uids to cuids.
481 Cuids are numbers assigned to insns, like uids,
482 except that cuids increase monotonically through the code.
483
484 Scan all SETs and see if we can deduce anything about what
951553af 485 bits are known to be zero for some registers and how many copies
d79f08e0
RK
486 of the sign bit are known to exist for those registers.
487
488 Also set any known values so that we can use it while searching
489 for what bits are known to be set. */
490
491 label_tick = 1;
230d793d 492
7988fd36
RK
493 setup_incoming_promotions ();
494
230d793d
RS
495 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
496 {
497 INSN_CUID (insn) = ++i;
d79f08e0
RK
498 subst_low_cuid = i;
499 subst_insn = insn;
500
230d793d 501 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
d79f08e0
RK
502 {
503 note_stores (PATTERN (insn), set_nonzero_bits_and_sign_copies);
504 record_dead_and_set_regs (insn);
505 }
506
507 if (GET_CODE (insn) == CODE_LABEL)
508 label_tick++;
230d793d
RS
509 }
510
951553af 511 nonzero_sign_valid = 1;
230d793d
RS
512
513 /* Now scan all the insns in forward order. */
514
0d4d42c3 515 this_basic_block = -1;
230d793d
RS
516 label_tick = 1;
517 last_call_cuid = 0;
518 mem_last_set = 0;
ef026f91 519 init_reg_last_arrays ();
7988fd36
RK
520 setup_incoming_promotions ();
521
230d793d
RS
522 for (insn = f; insn; insn = next ? next : NEXT_INSN (insn))
523 {
524 next = 0;
525
0d4d42c3 526 /* If INSN starts a new basic block, update our basic block number. */
f085c9cd 527 if (this_basic_block + 1 < n_basic_blocks
0d4d42c3
RK
528 && basic_block_head[this_basic_block + 1] == insn)
529 this_basic_block++;
530
230d793d
RS
531 if (GET_CODE (insn) == CODE_LABEL)
532 label_tick++;
533
0d4d42c3 534 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
230d793d
RS
535 {
536 /* Try this insn with each insn it links back to. */
537
538 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
5f4f0e22 539 if ((next = try_combine (insn, XEXP (links, 0), NULL_RTX)) != 0)
230d793d
RS
540 goto retry;
541
542 /* Try each sequence of three linked insns ending with this one. */
543
544 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
545 for (nextlinks = LOG_LINKS (XEXP (links, 0)); nextlinks;
546 nextlinks = XEXP (nextlinks, 1))
547 if ((next = try_combine (insn, XEXP (links, 0),
548 XEXP (nextlinks, 0))) != 0)
549 goto retry;
550
551#ifdef HAVE_cc0
552 /* Try to combine a jump insn that uses CC0
553 with a preceding insn that sets CC0, and maybe with its
554 logical predecessor as well.
555 This is how we make decrement-and-branch insns.
556 We need this special code because data flow connections
557 via CC0 do not get entered in LOG_LINKS. */
558
559 if (GET_CODE (insn) == JUMP_INSN
560 && (prev = prev_nonnote_insn (insn)) != 0
561 && GET_CODE (prev) == INSN
562 && sets_cc0_p (PATTERN (prev)))
563 {
5f4f0e22 564 if ((next = try_combine (insn, prev, NULL_RTX)) != 0)
230d793d
RS
565 goto retry;
566
567 for (nextlinks = LOG_LINKS (prev); nextlinks;
568 nextlinks = XEXP (nextlinks, 1))
569 if ((next = try_combine (insn, prev,
570 XEXP (nextlinks, 0))) != 0)
571 goto retry;
572 }
573
574 /* Do the same for an insn that explicitly references CC0. */
575 if (GET_CODE (insn) == INSN
576 && (prev = prev_nonnote_insn (insn)) != 0
577 && GET_CODE (prev) == INSN
578 && sets_cc0_p (PATTERN (prev))
579 && GET_CODE (PATTERN (insn)) == SET
580 && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (insn))))
581 {
5f4f0e22 582 if ((next = try_combine (insn, prev, NULL_RTX)) != 0)
230d793d
RS
583 goto retry;
584
585 for (nextlinks = LOG_LINKS (prev); nextlinks;
586 nextlinks = XEXP (nextlinks, 1))
587 if ((next = try_combine (insn, prev,
588 XEXP (nextlinks, 0))) != 0)
589 goto retry;
590 }
591
592 /* Finally, see if any of the insns that this insn links to
593 explicitly references CC0. If so, try this insn, that insn,
5089e22e 594 and its predecessor if it sets CC0. */
230d793d
RS
595 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
596 if (GET_CODE (XEXP (links, 0)) == INSN
597 && GET_CODE (PATTERN (XEXP (links, 0))) == SET
598 && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (XEXP (links, 0))))
599 && (prev = prev_nonnote_insn (XEXP (links, 0))) != 0
600 && GET_CODE (prev) == INSN
601 && sets_cc0_p (PATTERN (prev))
602 && (next = try_combine (insn, XEXP (links, 0), prev)) != 0)
603 goto retry;
604#endif
605
606 /* Try combining an insn with two different insns whose results it
607 uses. */
608 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
609 for (nextlinks = XEXP (links, 1); nextlinks;
610 nextlinks = XEXP (nextlinks, 1))
611 if ((next = try_combine (insn, XEXP (links, 0),
612 XEXP (nextlinks, 0))) != 0)
613 goto retry;
614
615 if (GET_CODE (insn) != NOTE)
616 record_dead_and_set_regs (insn);
617
618 retry:
619 ;
620 }
621 }
622
623 total_attempts += combine_attempts;
624 total_merges += combine_merges;
625 total_extras += combine_extras;
626 total_successes += combine_successes;
1a26b032 627
951553af 628 nonzero_sign_valid = 0;
230d793d 629}
ef026f91
RS
630
631/* Wipe the reg_last_xxx arrays in preparation for another pass. */
632
633static void
634init_reg_last_arrays ()
635{
636 int nregs = combine_max_regno;
637
638 bzero (reg_last_death, nregs * sizeof (rtx));
639 bzero (reg_last_set, nregs * sizeof (rtx));
640 bzero (reg_last_set_value, nregs * sizeof (rtx));
641 bzero (reg_last_set_table_tick, nregs * sizeof (int));
642 bzero (reg_last_set_label, nregs * sizeof (int));
643 bzero (reg_last_set_invalid, nregs * sizeof (char));
644 bzero (reg_last_set_mode, nregs * sizeof (enum machine_mode));
645 bzero (reg_last_set_nonzero_bits, nregs * sizeof (HOST_WIDE_INT));
646 bzero (reg_last_set_sign_bit_copies, nregs * sizeof (char));
647}
230d793d 648\f
7988fd36
RK
649/* Set up any promoted values for incoming argument registers. */
650
ee791cc3 651static void
7988fd36
RK
652setup_incoming_promotions ()
653{
654#ifdef PROMOTE_FUNCTION_ARGS
655 int regno;
656 rtx reg;
657 enum machine_mode mode;
658 int unsignedp;
659 rtx first = get_insns ();
660
661 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
662 if (FUNCTION_ARG_REGNO_P (regno)
663 && (reg = promoted_input_arg (regno, &mode, &unsignedp)) != 0)
664 record_value_for_reg (reg, first,
665 gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
500c518b
RK
666 GET_MODE (reg),
667 gen_rtx (CLOBBER, mode, const0_rtx)));
7988fd36
RK
668#endif
669}
670\f
230d793d 671/* Called via note_stores. If X is a pseudo that is used in more than
5f4f0e22 672 one basic block, is narrower that HOST_BITS_PER_WIDE_INT, and is being
951553af 673 set, record what bits are known zero. If we are clobbering X,
230d793d
RS
674 ignore this "set" because the clobbered value won't be used.
675
676 If we are setting only a portion of X and we can't figure out what
677 portion, assume all bits will be used since we don't know what will
d0ab8cd3
RK
678 be happening.
679
680 Similarly, set how many bits of X are known to be copies of the sign bit
681 at all locations in the function. This is the smallest number implied
682 by any set of X. */
230d793d
RS
683
684static void
951553af 685set_nonzero_bits_and_sign_copies (x, set)
230d793d
RS
686 rtx x;
687 rtx set;
688{
d0ab8cd3
RK
689 int num;
690
230d793d
RS
691 if (GET_CODE (x) == REG
692 && REGNO (x) >= FIRST_PSEUDO_REGISTER
693 && reg_n_sets[REGNO (x)] > 1
694 && reg_basic_block[REGNO (x)] < 0
e8095e80
RK
695 /* If this register is undefined at the start of the file, we can't
696 say what its contents were. */
697 && ! (basic_block_live_at_start[0][REGNO (x) / REGSET_ELT_BITS]
698 & ((REGSET_ELT_TYPE) 1 << (REGNO (x) % REGSET_ELT_BITS)))
5f4f0e22 699 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
230d793d
RS
700 {
701 if (GET_CODE (set) == CLOBBER)
e8095e80
RK
702 {
703 reg_nonzero_bits[REGNO (x)] = GET_MODE_MASK (GET_MODE (x));
704 reg_sign_bit_copies[REGNO (x)] = 0;
705 return;
706 }
230d793d
RS
707
708 /* If this is a complex assignment, see if we can convert it into a
5089e22e 709 simple assignment. */
230d793d 710 set = expand_field_assignment (set);
d79f08e0
RK
711
712 /* If this is a simple assignment, or we have a paradoxical SUBREG,
713 set what we know about X. */
714
715 if (SET_DEST (set) == x
716 || (GET_CODE (SET_DEST (set)) == SUBREG
705c7b3b
JW
717 && (GET_MODE_SIZE (GET_MODE (SET_DEST (set)))
718 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (set)))))
d79f08e0 719 && SUBREG_REG (SET_DEST (set)) == x))
d0ab8cd3 720 {
9afa3d54
RK
721 rtx src = SET_SRC (set);
722
723#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
724 /* If X is narrower than a word and SRC is a non-negative
725 constant that would appear negative in the mode of X,
726 sign-extend it for use in reg_nonzero_bits because some
727 machines (maybe most) will actually do the sign-extension
728 and this is the conservative approach.
729
730 ??? For 2.5, try to tighten up the MD files in this regard
731 instead of this kludge. */
732
733 if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
734 && GET_CODE (src) == CONST_INT
735 && INTVAL (src) > 0
736 && 0 != (INTVAL (src)
737 & ((HOST_WIDE_INT) 1
9e69be8c 738 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
9afa3d54
RK
739 src = GEN_INT (INTVAL (src)
740 | ((HOST_WIDE_INT) (-1)
741 << GET_MODE_BITSIZE (GET_MODE (x))));
742#endif
743
951553af 744 reg_nonzero_bits[REGNO (x)]
9afa3d54 745 |= nonzero_bits (src, nonzero_bits_mode);
d0ab8cd3
RK
746 num = num_sign_bit_copies (SET_SRC (set), GET_MODE (x));
747 if (reg_sign_bit_copies[REGNO (x)] == 0
748 || reg_sign_bit_copies[REGNO (x)] > num)
749 reg_sign_bit_copies[REGNO (x)] = num;
750 }
230d793d 751 else
d0ab8cd3 752 {
951553af 753 reg_nonzero_bits[REGNO (x)] = GET_MODE_MASK (GET_MODE (x));
d0ab8cd3
RK
754 reg_sign_bit_copies[REGNO (x)] = 0;
755 }
230d793d
RS
756 }
757}
758\f
759/* See if INSN can be combined into I3. PRED and SUCC are optionally
760 insns that were previously combined into I3 or that will be combined
761 into the merger of INSN and I3.
762
763 Return 0 if the combination is not allowed for any reason.
764
765 If the combination is allowed, *PDEST will be set to the single
766 destination of INSN and *PSRC to the single source, and this function
767 will return 1. */
768
769static int
770can_combine_p (insn, i3, pred, succ, pdest, psrc)
771 rtx insn;
772 rtx i3;
773 rtx pred, succ;
774 rtx *pdest, *psrc;
775{
776 int i;
777 rtx set = 0, src, dest;
778 rtx p, link;
779 int all_adjacent = (succ ? (next_active_insn (insn) == succ
780 && next_active_insn (succ) == i3)
781 : next_active_insn (insn) == i3);
782
783 /* Can combine only if previous insn is a SET of a REG, a SUBREG or CC0.
784 or a PARALLEL consisting of such a SET and CLOBBERs.
785
786 If INSN has CLOBBER parallel parts, ignore them for our processing.
787 By definition, these happen during the execution of the insn. When it
788 is merged with another insn, all bets are off. If they are, in fact,
789 needed and aren't also supplied in I3, they may be added by
790 recog_for_combine. Otherwise, it won't match.
791
792 We can also ignore a SET whose SET_DEST is mentioned in a REG_UNUSED
793 note.
794
795 Get the source and destination of INSN. If more than one, can't
796 combine. */
797
798 if (GET_CODE (PATTERN (insn)) == SET)
799 set = PATTERN (insn);
800 else if (GET_CODE (PATTERN (insn)) == PARALLEL
801 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
802 {
803 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
804 {
805 rtx elt = XVECEXP (PATTERN (insn), 0, i);
806
807 switch (GET_CODE (elt))
808 {
809 /* We can ignore CLOBBERs. */
810 case CLOBBER:
811 break;
812
813 case SET:
814 /* Ignore SETs whose result isn't used but not those that
815 have side-effects. */
816 if (find_reg_note (insn, REG_UNUSED, SET_DEST (elt))
817 && ! side_effects_p (elt))
818 break;
819
820 /* If we have already found a SET, this is a second one and
821 so we cannot combine with this insn. */
822 if (set)
823 return 0;
824
825 set = elt;
826 break;
827
828 default:
829 /* Anything else means we can't combine. */
830 return 0;
831 }
832 }
833
834 if (set == 0
835 /* If SET_SRC is an ASM_OPERANDS we can't throw away these CLOBBERs,
836 so don't do anything with it. */
837 || GET_CODE (SET_SRC (set)) == ASM_OPERANDS)
838 return 0;
839 }
840 else
841 return 0;
842
843 if (set == 0)
844 return 0;
845
846 set = expand_field_assignment (set);
847 src = SET_SRC (set), dest = SET_DEST (set);
848
849 /* Don't eliminate a store in the stack pointer. */
850 if (dest == stack_pointer_rtx
230d793d
RS
851 /* If we couldn't eliminate a field assignment, we can't combine. */
852 || GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == STRICT_LOW_PART
853 /* Don't combine with an insn that sets a register to itself if it has
854 a REG_EQUAL note. This may be part of a REG_NO_CONFLICT sequence. */
5f4f0e22 855 || (rtx_equal_p (src, dest) && find_reg_note (insn, REG_EQUAL, NULL_RTX))
230d793d
RS
856 /* Can't merge a function call. */
857 || GET_CODE (src) == CALL
858 /* Don't substitute into an incremented register. */
859 || FIND_REG_INC_NOTE (i3, dest)
860 || (succ && FIND_REG_INC_NOTE (succ, dest))
861 /* Don't combine the end of a libcall into anything. */
5f4f0e22 862 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
230d793d
RS
863 /* Make sure that DEST is not used after SUCC but before I3. */
864 || (succ && ! all_adjacent
865 && reg_used_between_p (dest, succ, i3))
866 /* Make sure that the value that is to be substituted for the register
867 does not use any registers whose values alter in between. However,
868 If the insns are adjacent, a use can't cross a set even though we
869 think it might (this can happen for a sequence of insns each setting
870 the same destination; reg_last_set of that register might point to
d81481d3
RK
871 a NOTE). If INSN has a REG_EQUIV note, the register is always
872 equivalent to the memory so the substitution is valid even if there
873 are intervening stores. Also, don't move a volatile asm or
874 UNSPEC_VOLATILE across any other insns. */
230d793d 875 || (! all_adjacent
d81481d3
RK
876 && (((GET_CODE (src) != MEM
877 || ! find_reg_note (insn, REG_EQUIV, src))
878 && use_crosses_set_p (src, INSN_CUID (insn)))
a66a10c7
RS
879 || (GET_CODE (src) == ASM_OPERANDS && MEM_VOLATILE_P (src))
880 || GET_CODE (src) == UNSPEC_VOLATILE))
230d793d
RS
881 /* If there is a REG_NO_CONFLICT note for DEST in I3 or SUCC, we get
882 better register allocation by not doing the combine. */
883 || find_reg_note (i3, REG_NO_CONFLICT, dest)
884 || (succ && find_reg_note (succ, REG_NO_CONFLICT, dest))
885 /* Don't combine across a CALL_INSN, because that would possibly
886 change whether the life span of some REGs crosses calls or not,
887 and it is a pain to update that information.
888 Exception: if source is a constant, moving it later can't hurt.
889 Accept that special case, because it helps -fforce-addr a lot. */
890 || (INSN_CUID (insn) < last_call_cuid && ! CONSTANT_P (src)))
891 return 0;
892
893 /* DEST must either be a REG or CC0. */
894 if (GET_CODE (dest) == REG)
895 {
896 /* If register alignment is being enforced for multi-word items in all
897 cases except for parameters, it is possible to have a register copy
898 insn referencing a hard register that is not allowed to contain the
899 mode being copied and which would not be valid as an operand of most
900 insns. Eliminate this problem by not combining with such an insn.
901
902 Also, on some machines we don't want to extend the life of a hard
903 register. */
904
905 if (GET_CODE (src) == REG
906 && ((REGNO (dest) < FIRST_PSEUDO_REGISTER
907 && ! HARD_REGNO_MODE_OK (REGNO (dest), GET_MODE (dest)))
908#ifdef SMALL_REGISTER_CLASSES
909 /* Don't extend the life of a hard register. */
910 || REGNO (src) < FIRST_PSEUDO_REGISTER
911#else
912 || (REGNO (src) < FIRST_PSEUDO_REGISTER
913 && ! HARD_REGNO_MODE_OK (REGNO (src), GET_MODE (src)))
914#endif
915 ))
916 return 0;
917 }
918 else if (GET_CODE (dest) != CC0)
919 return 0;
920
5f96750d
RS
921 /* Don't substitute for a register intended as a clobberable operand.
922 Similarly, don't substitute an expression containing a register that
923 will be clobbered in I3. */
230d793d
RS
924 if (GET_CODE (PATTERN (i3)) == PARALLEL)
925 for (i = XVECLEN (PATTERN (i3), 0) - 1; i >= 0; i--)
926 if (GET_CODE (XVECEXP (PATTERN (i3), 0, i)) == CLOBBER
5f96750d
RS
927 && (reg_overlap_mentioned_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0),
928 src)
929 || rtx_equal_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0), dest)))
230d793d
RS
930 return 0;
931
932 /* If INSN contains anything volatile, or is an `asm' (whether volatile
933 or not), reject, unless nothing volatile comes between it and I3,
934 with the exception of SUCC. */
935
936 if (GET_CODE (src) == ASM_OPERANDS || volatile_refs_p (src))
937 for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
938 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
939 && p != succ && volatile_refs_p (PATTERN (p)))
940 return 0;
941
4b2cb4a2
RS
942 /* If there are any volatile insns between INSN and I3, reject, because
943 they might affect machine state. */
944
945 for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
946 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
947 && p != succ && volatile_insn_p (PATTERN (p)))
948 return 0;
949
230d793d
RS
950 /* If INSN or I2 contains an autoincrement or autodecrement,
951 make sure that register is not used between there and I3,
952 and not already used in I3 either.
953 Also insist that I3 not be a jump; if it were one
954 and the incremented register were spilled, we would lose. */
955
956#ifdef AUTO_INC_DEC
957 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
958 if (REG_NOTE_KIND (link) == REG_INC
959 && (GET_CODE (i3) == JUMP_INSN
960 || reg_used_between_p (XEXP (link, 0), insn, i3)
961 || reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i3))))
962 return 0;
963#endif
964
965#ifdef HAVE_cc0
966 /* Don't combine an insn that follows a CC0-setting insn.
967 An insn that uses CC0 must not be separated from the one that sets it.
968 We do, however, allow I2 to follow a CC0-setting insn if that insn
969 is passed as I1; in that case it will be deleted also.
970 We also allow combining in this case if all the insns are adjacent
971 because that would leave the two CC0 insns adjacent as well.
972 It would be more logical to test whether CC0 occurs inside I1 or I2,
973 but that would be much slower, and this ought to be equivalent. */
974
975 p = prev_nonnote_insn (insn);
976 if (p && p != pred && GET_CODE (p) == INSN && sets_cc0_p (PATTERN (p))
977 && ! all_adjacent)
978 return 0;
979#endif
980
981 /* If we get here, we have passed all the tests and the combination is
982 to be allowed. */
983
984 *pdest = dest;
985 *psrc = src;
986
987 return 1;
988}
989\f
990/* LOC is the location within I3 that contains its pattern or the component
991 of a PARALLEL of the pattern. We validate that it is valid for combining.
992
993 One problem is if I3 modifies its output, as opposed to replacing it
994 entirely, we can't allow the output to contain I2DEST or I1DEST as doing
995 so would produce an insn that is not equivalent to the original insns.
996
997 Consider:
998
999 (set (reg:DI 101) (reg:DI 100))
1000 (set (subreg:SI (reg:DI 101) 0) <foo>)
1001
1002 This is NOT equivalent to:
1003
1004 (parallel [(set (subreg:SI (reg:DI 100) 0) <foo>)
1005 (set (reg:DI 101) (reg:DI 100))])
1006
1007 Not only does this modify 100 (in which case it might still be valid
1008 if 100 were dead in I2), it sets 101 to the ORIGINAL value of 100.
1009
1010 We can also run into a problem if I2 sets a register that I1
1011 uses and I1 gets directly substituted into I3 (not via I2). In that
1012 case, we would be getting the wrong value of I2DEST into I3, so we
1013 must reject the combination. This case occurs when I2 and I1 both
1014 feed into I3, rather than when I1 feeds into I2, which feeds into I3.
1015 If I1_NOT_IN_SRC is non-zero, it means that finding I1 in the source
1016 of a SET must prevent combination from occurring.
1017
1018 On machines where SMALL_REGISTER_CLASSES is defined, we don't combine
1019 if the destination of a SET is a hard register.
1020
1021 Before doing the above check, we first try to expand a field assignment
1022 into a set of logical operations.
1023
1024 If PI3_DEST_KILLED is non-zero, it is a pointer to a location in which
1025 we place a register that is both set and used within I3. If more than one
1026 such register is detected, we fail.
1027
1028 Return 1 if the combination is valid, zero otherwise. */
1029
1030static int
1031combinable_i3pat (i3, loc, i2dest, i1dest, i1_not_in_src, pi3dest_killed)
1032 rtx i3;
1033 rtx *loc;
1034 rtx i2dest;
1035 rtx i1dest;
1036 int i1_not_in_src;
1037 rtx *pi3dest_killed;
1038{
1039 rtx x = *loc;
1040
1041 if (GET_CODE (x) == SET)
1042 {
1043 rtx set = expand_field_assignment (x);
1044 rtx dest = SET_DEST (set);
1045 rtx src = SET_SRC (set);
1046 rtx inner_dest = dest, inner_src = src;
1047
1048 SUBST (*loc, set);
1049
1050 while (GET_CODE (inner_dest) == STRICT_LOW_PART
1051 || GET_CODE (inner_dest) == SUBREG
1052 || GET_CODE (inner_dest) == ZERO_EXTRACT)
1053 inner_dest = XEXP (inner_dest, 0);
1054
1055 /* We probably don't need this any more now that LIMIT_RELOAD_CLASS
1056 was added. */
1057#if 0
1058 while (GET_CODE (inner_src) == STRICT_LOW_PART
1059 || GET_CODE (inner_src) == SUBREG
1060 || GET_CODE (inner_src) == ZERO_EXTRACT)
1061 inner_src = XEXP (inner_src, 0);
1062
1063 /* If it is better that two different modes keep two different pseudos,
1064 avoid combining them. This avoids producing the following pattern
1065 on a 386:
1066 (set (subreg:SI (reg/v:QI 21) 0)
1067 (lshiftrt:SI (reg/v:SI 20)
1068 (const_int 24)))
1069 If that were made, reload could not handle the pair of
1070 reg 20/21, since it would try to get any GENERAL_REGS
1071 but some of them don't handle QImode. */
1072
1073 if (rtx_equal_p (inner_src, i2dest)
1074 && GET_CODE (inner_dest) == REG
1075 && ! MODES_TIEABLE_P (GET_MODE (i2dest), GET_MODE (inner_dest)))
1076 return 0;
1077#endif
1078
1079 /* Check for the case where I3 modifies its output, as
1080 discussed above. */
1081 if ((inner_dest != dest
1082 && (reg_overlap_mentioned_p (i2dest, inner_dest)
1083 || (i1dest && reg_overlap_mentioned_p (i1dest, inner_dest))))
3f508eca
RK
1084 /* This is the same test done in can_combine_p except that we
1085 allow a hard register with SMALL_REGISTER_CLASSES if SRC is a
1086 CALL operation. */
230d793d 1087 || (GET_CODE (inner_dest) == REG
dfbe1b2f 1088 && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER
3f508eca
RK
1089#ifdef SMALL_REGISTER_CLASSES
1090 && GET_CODE (src) != CALL
1091#else
dfbe1b2f
RK
1092 && ! HARD_REGNO_MODE_OK (REGNO (inner_dest),
1093 GET_MODE (inner_dest))
230d793d 1094#endif
dfbe1b2f
RK
1095 )
1096
230d793d
RS
1097 || (i1_not_in_src && reg_overlap_mentioned_p (i1dest, src)))
1098 return 0;
1099
1100 /* If DEST is used in I3, it is being killed in this insn,
36a9c2e9
JL
1101 so record that for later.
1102 Never add REG_DEAD notes for the FRAME_POINTER_REGNUM or the
1103 STACK_POINTER_REGNUM, since these are always considered to be
1104 live. Similarly for ARG_POINTER_REGNUM if it is fixed. */
230d793d 1105 if (pi3dest_killed && GET_CODE (dest) == REG
36a9c2e9
JL
1106 && reg_referenced_p (dest, PATTERN (i3))
1107 && REGNO (dest) != FRAME_POINTER_REGNUM
6d7096b0
DE
1108#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1109 && REGNO (dest) != HARD_FRAME_POINTER_REGNUM
1110#endif
36a9c2e9
JL
1111#if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
1112 && (REGNO (dest) != ARG_POINTER_REGNUM
1113 || ! fixed_regs [REGNO (dest)])
1114#endif
1115 && REGNO (dest) != STACK_POINTER_REGNUM)
230d793d
RS
1116 {
1117 if (*pi3dest_killed)
1118 return 0;
1119
1120 *pi3dest_killed = dest;
1121 }
1122 }
1123
1124 else if (GET_CODE (x) == PARALLEL)
1125 {
1126 int i;
1127
1128 for (i = 0; i < XVECLEN (x, 0); i++)
1129 if (! combinable_i3pat (i3, &XVECEXP (x, 0, i), i2dest, i1dest,
1130 i1_not_in_src, pi3dest_killed))
1131 return 0;
1132 }
1133
1134 return 1;
1135}
1136\f
1137/* Try to combine the insns I1 and I2 into I3.
1138 Here I1 and I2 appear earlier than I3.
1139 I1 can be zero; then we combine just I2 into I3.
1140
1141 It we are combining three insns and the resulting insn is not recognized,
1142 try splitting it into two insns. If that happens, I2 and I3 are retained
1143 and I1 is pseudo-deleted by turning it into a NOTE. Otherwise, I1 and I2
1144 are pseudo-deleted.
1145
abe6e52f
RK
1146 Return 0 if the combination does not work. Then nothing is changed.
1147 If we did the combination, return the insn at which combine should
1148 resume scanning. */
230d793d
RS
1149
1150static rtx
1151try_combine (i3, i2, i1)
1152 register rtx i3, i2, i1;
1153{
1154 /* New patterns for I3 and I3, respectively. */
1155 rtx newpat, newi2pat = 0;
1156 /* Indicates need to preserve SET in I1 or I2 in I3 if it is not dead. */
1157 int added_sets_1, added_sets_2;
1158 /* Total number of SETs to put into I3. */
1159 int total_sets;
1160 /* Nonzero is I2's body now appears in I3. */
1161 int i2_is_used;
1162 /* INSN_CODEs for new I3, new I2, and user of condition code. */
1163 int insn_code_number, i2_code_number, other_code_number;
1164 /* Contains I3 if the destination of I3 is used in its source, which means
1165 that the old life of I3 is being killed. If that usage is placed into
1166 I2 and not in I3, a REG_DEAD note must be made. */
1167 rtx i3dest_killed = 0;
1168 /* SET_DEST and SET_SRC of I2 and I1. */
1169 rtx i2dest, i2src, i1dest = 0, i1src = 0;
1170 /* PATTERN (I2), or a copy of it in certain cases. */
1171 rtx i2pat;
1172 /* Indicates if I2DEST or I1DEST is in I2SRC or I1_SRC. */
c4e861e8 1173 int i2dest_in_i2src = 0, i1dest_in_i1src = 0, i2dest_in_i1src = 0;
230d793d
RS
1174 int i1_feeds_i3 = 0;
1175 /* Notes that must be added to REG_NOTES in I3 and I2. */
1176 rtx new_i3_notes, new_i2_notes;
176c9e6b
JW
1177 /* Notes that we substituted I3 into I2 instead of the normal case. */
1178 int i3_subst_into_i2 = 0;
230d793d
RS
1179
1180 int maxreg;
1181 rtx temp;
1182 register rtx link;
1183 int i;
1184
1185 /* If any of I1, I2, and I3 isn't really an insn, we can't do anything.
1186 This can occur when flow deletes an insn that it has merged into an
1187 auto-increment address. We also can't do anything if I3 has a
1188 REG_LIBCALL note since we don't want to disrupt the contiguity of a
1189 libcall. */
1190
1191 if (GET_RTX_CLASS (GET_CODE (i3)) != 'i'
1192 || GET_RTX_CLASS (GET_CODE (i2)) != 'i'
1193 || (i1 && GET_RTX_CLASS (GET_CODE (i1)) != 'i')
5f4f0e22 1194 || find_reg_note (i3, REG_LIBCALL, NULL_RTX))
230d793d
RS
1195 return 0;
1196
1197 combine_attempts++;
1198
1199 undobuf.num_undo = previous_num_undos = 0;
1200 undobuf.other_insn = 0;
1201
1202 /* Save the current high-water-mark so we can free storage if we didn't
1203 accept this combination. */
1204 undobuf.storage = (char *) oballoc (0);
1205
1206 /* If I1 and I2 both feed I3, they can be in any order. To simplify the
1207 code below, set I1 to be the earlier of the two insns. */
1208 if (i1 && INSN_CUID (i1) > INSN_CUID (i2))
1209 temp = i1, i1 = i2, i2 = temp;
1210
137e889e 1211 subst_prev_insn = 0;
abe6e52f 1212 added_links_insn = 0;
137e889e 1213
230d793d
RS
1214 /* First check for one important special-case that the code below will
1215 not handle. Namely, the case where I1 is zero, I2 has multiple sets,
1216 and I3 is a SET whose SET_SRC is a SET_DEST in I2. In that case,
1217 we may be able to replace that destination with the destination of I3.
1218 This occurs in the common code where we compute both a quotient and
1219 remainder into a structure, in which case we want to do the computation
1220 directly into the structure to avoid register-register copies.
1221
1222 We make very conservative checks below and only try to handle the
1223 most common cases of this. For example, we only handle the case
1224 where I2 and I3 are adjacent to avoid making difficult register
1225 usage tests. */
1226
1227 if (i1 == 0 && GET_CODE (i3) == INSN && GET_CODE (PATTERN (i3)) == SET
1228 && GET_CODE (SET_SRC (PATTERN (i3))) == REG
1229 && REGNO (SET_SRC (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER
1230#ifdef SMALL_REGISTER_CLASSES
1231 && (GET_CODE (SET_DEST (PATTERN (i3))) != REG
1232 || REGNO (SET_DEST (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER)
1233#endif
1234 && find_reg_note (i3, REG_DEAD, SET_SRC (PATTERN (i3)))
1235 && GET_CODE (PATTERN (i2)) == PARALLEL
1236 && ! side_effects_p (SET_DEST (PATTERN (i3)))
5089e22e
RS
1237 /* If the dest of I3 is a ZERO_EXTRACT or STRICT_LOW_PART, the code
1238 below would need to check what is inside (and reg_overlap_mentioned_p
1239 doesn't support those codes anyway). Don't allow those destinations;
1240 the resulting insn isn't likely to be recognized anyway. */
1241 && GET_CODE (SET_DEST (PATTERN (i3))) != ZERO_EXTRACT
1242 && GET_CODE (SET_DEST (PATTERN (i3))) != STRICT_LOW_PART
230d793d
RS
1243 && ! reg_overlap_mentioned_p (SET_SRC (PATTERN (i3)),
1244 SET_DEST (PATTERN (i3)))
1245 && next_real_insn (i2) == i3)
5089e22e
RS
1246 {
1247 rtx p2 = PATTERN (i2);
1248
1249 /* Make sure that the destination of I3,
1250 which we are going to substitute into one output of I2,
1251 is not used within another output of I2. We must avoid making this:
1252 (parallel [(set (mem (reg 69)) ...)
1253 (set (reg 69) ...)])
1254 which is not well-defined as to order of actions.
1255 (Besides, reload can't handle output reloads for this.)
1256
1257 The problem can also happen if the dest of I3 is a memory ref,
1258 if another dest in I2 is an indirect memory ref. */
1259 for (i = 0; i < XVECLEN (p2, 0); i++)
1260 if (GET_CODE (XVECEXP (p2, 0, i)) == SET
1261 && reg_overlap_mentioned_p (SET_DEST (PATTERN (i3)),
1262 SET_DEST (XVECEXP (p2, 0, i))))
1263 break;
230d793d 1264
5089e22e
RS
1265 if (i == XVECLEN (p2, 0))
1266 for (i = 0; i < XVECLEN (p2, 0); i++)
1267 if (SET_DEST (XVECEXP (p2, 0, i)) == SET_SRC (PATTERN (i3)))
1268 {
1269 combine_merges++;
230d793d 1270
5089e22e
RS
1271 subst_insn = i3;
1272 subst_low_cuid = INSN_CUID (i2);
230d793d 1273
c4e861e8 1274 added_sets_2 = added_sets_1 = 0;
5089e22e 1275 i2dest = SET_SRC (PATTERN (i3));
230d793d 1276
5089e22e
RS
1277 /* Replace the dest in I2 with our dest and make the resulting
1278 insn the new pattern for I3. Then skip to where we
1279 validate the pattern. Everything was set up above. */
1280 SUBST (SET_DEST (XVECEXP (p2, 0, i)),
1281 SET_DEST (PATTERN (i3)));
1282
1283 newpat = p2;
176c9e6b 1284 i3_subst_into_i2 = 1;
5089e22e
RS
1285 goto validate_replacement;
1286 }
1287 }
230d793d
RS
1288
1289#ifndef HAVE_cc0
1290 /* If we have no I1 and I2 looks like:
1291 (parallel [(set (reg:CC X) (compare:CC OP (const_int 0)))
1292 (set Y OP)])
1293 make up a dummy I1 that is
1294 (set Y OP)
1295 and change I2 to be
1296 (set (reg:CC X) (compare:CC Y (const_int 0)))
1297
1298 (We can ignore any trailing CLOBBERs.)
1299
1300 This undoes a previous combination and allows us to match a branch-and-
1301 decrement insn. */
1302
1303 if (i1 == 0 && GET_CODE (PATTERN (i2)) == PARALLEL
1304 && XVECLEN (PATTERN (i2), 0) >= 2
1305 && GET_CODE (XVECEXP (PATTERN (i2), 0, 0)) == SET
1306 && (GET_MODE_CLASS (GET_MODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 0))))
1307 == MODE_CC)
1308 && GET_CODE (SET_SRC (XVECEXP (PATTERN (i2), 0, 0))) == COMPARE
1309 && XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 1) == const0_rtx
1310 && GET_CODE (XVECEXP (PATTERN (i2), 0, 1)) == SET
1311 && GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 1))) == REG
1312 && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 0),
1313 SET_SRC (XVECEXP (PATTERN (i2), 0, 1))))
1314 {
1315 for (i = XVECLEN (PATTERN (i2), 0) - 1; i >= 2; i--)
1316 if (GET_CODE (XVECEXP (PATTERN (i2), 0, i)) != CLOBBER)
1317 break;
1318
1319 if (i == 1)
1320 {
1321 /* We make I1 with the same INSN_UID as I2. This gives it
1322 the same INSN_CUID for value tracking. Our fake I1 will
1323 never appear in the insn stream so giving it the same INSN_UID
1324 as I2 will not cause a problem. */
1325
137e889e
RK
1326 subst_prev_insn = i1
1327 = gen_rtx (INSN, VOIDmode, INSN_UID (i2), 0, i2,
1328 XVECEXP (PATTERN (i2), 0, 1), -1, 0, 0);
230d793d
RS
1329
1330 SUBST (PATTERN (i2), XVECEXP (PATTERN (i2), 0, 0));
1331 SUBST (XEXP (SET_SRC (PATTERN (i2)), 0),
1332 SET_DEST (PATTERN (i1)));
1333 }
1334 }
1335#endif
1336
1337 /* Verify that I2 and I1 are valid for combining. */
5f4f0e22
CH
1338 if (! can_combine_p (i2, i3, i1, NULL_RTX, &i2dest, &i2src)
1339 || (i1 && ! can_combine_p (i1, i3, NULL_RTX, i2, &i1dest, &i1src)))
230d793d
RS
1340 {
1341 undo_all ();
1342 return 0;
1343 }
1344
1345 /* Record whether I2DEST is used in I2SRC and similarly for the other
1346 cases. Knowing this will help in register status updating below. */
1347 i2dest_in_i2src = reg_overlap_mentioned_p (i2dest, i2src);
1348 i1dest_in_i1src = i1 && reg_overlap_mentioned_p (i1dest, i1src);
1349 i2dest_in_i1src = i1 && reg_overlap_mentioned_p (i2dest, i1src);
1350
916f14f1 1351 /* See if I1 directly feeds into I3. It does if I1DEST is not used
230d793d
RS
1352 in I2SRC. */
1353 i1_feeds_i3 = i1 && ! reg_overlap_mentioned_p (i1dest, i2src);
1354
1355 /* Ensure that I3's pattern can be the destination of combines. */
1356 if (! combinable_i3pat (i3, &PATTERN (i3), i2dest, i1dest,
1357 i1 && i2dest_in_i1src && i1_feeds_i3,
1358 &i3dest_killed))
1359 {
1360 undo_all ();
1361 return 0;
1362 }
1363
1364 /* If I3 has an inc, then give up if I1 or I2 uses the reg that is inc'd.
1365 We used to do this EXCEPT in one case: I3 has a post-inc in an
1366 output operand. However, that exception can give rise to insns like
1367 mov r3,(r3)+
1368 which is a famous insn on the PDP-11 where the value of r3 used as the
5089e22e 1369 source was model-dependent. Avoid this sort of thing. */
230d793d
RS
1370
1371#if 0
1372 if (!(GET_CODE (PATTERN (i3)) == SET
1373 && GET_CODE (SET_SRC (PATTERN (i3))) == REG
1374 && GET_CODE (SET_DEST (PATTERN (i3))) == MEM
1375 && (GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_INC
1376 || GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_DEC)))
1377 /* It's not the exception. */
1378#endif
1379#ifdef AUTO_INC_DEC
1380 for (link = REG_NOTES (i3); link; link = XEXP (link, 1))
1381 if (REG_NOTE_KIND (link) == REG_INC
1382 && (reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i2))
1383 || (i1 != 0
1384 && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i1)))))
1385 {
1386 undo_all ();
1387 return 0;
1388 }
1389#endif
1390
1391 /* See if the SETs in I1 or I2 need to be kept around in the merged
1392 instruction: whenever the value set there is still needed past I3.
1393 For the SETs in I2, this is easy: we see if I2DEST dies or is set in I3.
1394
1395 For the SET in I1, we have two cases: If I1 and I2 independently
1396 feed into I3, the set in I1 needs to be kept around if I1DEST dies
1397 or is set in I3. Otherwise (if I1 feeds I2 which feeds I3), the set
1398 in I1 needs to be kept around unless I1DEST dies or is set in either
1399 I2 or I3. We can distinguish these cases by seeing if I2SRC mentions
1400 I1DEST. If so, we know I1 feeds into I2. */
1401
1402 added_sets_2 = ! dead_or_set_p (i3, i2dest);
1403
1404 added_sets_1
1405 = i1 && ! (i1_feeds_i3 ? dead_or_set_p (i3, i1dest)
1406 : (dead_or_set_p (i3, i1dest) || dead_or_set_p (i2, i1dest)));
1407
1408 /* If the set in I2 needs to be kept around, we must make a copy of
1409 PATTERN (I2), so that when we substitute I1SRC for I1DEST in
5089e22e 1410 PATTERN (I2), we are only substituting for the original I1DEST, not into
230d793d
RS
1411 an already-substituted copy. This also prevents making self-referential
1412 rtx. If I2 is a PARALLEL, we just need the piece that assigns I2SRC to
1413 I2DEST. */
1414
1415 i2pat = (GET_CODE (PATTERN (i2)) == PARALLEL
1416 ? gen_rtx (SET, VOIDmode, i2dest, i2src)
1417 : PATTERN (i2));
1418
1419 if (added_sets_2)
1420 i2pat = copy_rtx (i2pat);
1421
1422 combine_merges++;
1423
1424 /* Substitute in the latest insn for the regs set by the earlier ones. */
1425
1426 maxreg = max_reg_num ();
1427
1428 subst_insn = i3;
230d793d
RS
1429
1430 /* It is possible that the source of I2 or I1 may be performing an
1431 unneeded operation, such as a ZERO_EXTEND of something that is known
1432 to have the high part zero. Handle that case by letting subst look at
1433 the innermost one of them.
1434
1435 Another way to do this would be to have a function that tries to
1436 simplify a single insn instead of merging two or more insns. We don't
1437 do this because of the potential of infinite loops and because
1438 of the potential extra memory required. However, doing it the way
1439 we are is a bit of a kludge and doesn't catch all cases.
1440
1441 But only do this if -fexpensive-optimizations since it slows things down
1442 and doesn't usually win. */
1443
1444 if (flag_expensive_optimizations)
1445 {
1446 /* Pass pc_rtx so no substitutions are done, just simplifications.
1447 The cases that we are interested in here do not involve the few
1448 cases were is_replaced is checked. */
1449 if (i1)
d0ab8cd3
RK
1450 {
1451 subst_low_cuid = INSN_CUID (i1);
1452 i1src = subst (i1src, pc_rtx, pc_rtx, 0, 0);
1453 }
230d793d 1454 else
d0ab8cd3
RK
1455 {
1456 subst_low_cuid = INSN_CUID (i2);
1457 i2src = subst (i2src, pc_rtx, pc_rtx, 0, 0);
1458 }
230d793d
RS
1459
1460 previous_num_undos = undobuf.num_undo;
1461 }
1462
1463#ifndef HAVE_cc0
1464 /* Many machines that don't use CC0 have insns that can both perform an
1465 arithmetic operation and set the condition code. These operations will
1466 be represented as a PARALLEL with the first element of the vector
1467 being a COMPARE of an arithmetic operation with the constant zero.
1468 The second element of the vector will set some pseudo to the result
1469 of the same arithmetic operation. If we simplify the COMPARE, we won't
1470 match such a pattern and so will generate an extra insn. Here we test
1471 for this case, where both the comparison and the operation result are
1472 needed, and make the PARALLEL by just replacing I2DEST in I3SRC with
1473 I2SRC. Later we will make the PARALLEL that contains I2. */
1474
1475 if (i1 == 0 && added_sets_2 && GET_CODE (PATTERN (i3)) == SET
1476 && GET_CODE (SET_SRC (PATTERN (i3))) == COMPARE
1477 && XEXP (SET_SRC (PATTERN (i3)), 1) == const0_rtx
1478 && rtx_equal_p (XEXP (SET_SRC (PATTERN (i3)), 0), i2dest))
1479 {
1480 rtx *cc_use;
1481 enum machine_mode compare_mode;
1482
1483 newpat = PATTERN (i3);
1484 SUBST (XEXP (SET_SRC (newpat), 0), i2src);
1485
1486 i2_is_used = 1;
1487
1488#ifdef EXTRA_CC_MODES
1489 /* See if a COMPARE with the operand we substituted in should be done
1490 with the mode that is currently being used. If not, do the same
1491 processing we do in `subst' for a SET; namely, if the destination
1492 is used only once, try to replace it with a register of the proper
1493 mode and also replace the COMPARE. */
1494 if (undobuf.other_insn == 0
1495 && (cc_use = find_single_use (SET_DEST (newpat), i3,
1496 &undobuf.other_insn))
77fa0940
RK
1497 && ((compare_mode = SELECT_CC_MODE (GET_CODE (*cc_use),
1498 i2src, const0_rtx))
230d793d
RS
1499 != GET_MODE (SET_DEST (newpat))))
1500 {
1501 int regno = REGNO (SET_DEST (newpat));
1502 rtx new_dest = gen_rtx (REG, compare_mode, regno);
1503
1504 if (regno < FIRST_PSEUDO_REGISTER
1505 || (reg_n_sets[regno] == 1 && ! added_sets_2
1506 && ! REG_USERVAR_P (SET_DEST (newpat))))
1507 {
1508 if (regno >= FIRST_PSEUDO_REGISTER)
1509 SUBST (regno_reg_rtx[regno], new_dest);
1510
1511 SUBST (SET_DEST (newpat), new_dest);
1512 SUBST (XEXP (*cc_use, 0), new_dest);
1513 SUBST (SET_SRC (newpat),
1514 gen_rtx_combine (COMPARE, compare_mode,
1515 i2src, const0_rtx));
1516 }
1517 else
1518 undobuf.other_insn = 0;
1519 }
1520#endif
1521 }
1522 else
1523#endif
1524 {
1525 n_occurrences = 0; /* `subst' counts here */
1526
1527 /* If I1 feeds into I2 (not into I3) and I1DEST is in I1SRC, we
1528 need to make a unique copy of I2SRC each time we substitute it
1529 to avoid self-referential rtl. */
1530
d0ab8cd3 1531 subst_low_cuid = INSN_CUID (i2);
230d793d
RS
1532 newpat = subst (PATTERN (i3), i2dest, i2src, 0,
1533 ! i1_feeds_i3 && i1dest_in_i1src);
1534 previous_num_undos = undobuf.num_undo;
1535
1536 /* Record whether i2's body now appears within i3's body. */
1537 i2_is_used = n_occurrences;
1538 }
1539
1540 /* If we already got a failure, don't try to do more. Otherwise,
1541 try to substitute in I1 if we have it. */
1542
1543 if (i1 && GET_CODE (newpat) != CLOBBER)
1544 {
1545 /* Before we can do this substitution, we must redo the test done
1546 above (see detailed comments there) that ensures that I1DEST
1547 isn't mentioned in any SETs in NEWPAT that are field assignments. */
1548
5f4f0e22
CH
1549 if (! combinable_i3pat (NULL_RTX, &newpat, i1dest, NULL_RTX,
1550 0, NULL_PTR))
230d793d
RS
1551 {
1552 undo_all ();
1553 return 0;
1554 }
1555
1556 n_occurrences = 0;
d0ab8cd3 1557 subst_low_cuid = INSN_CUID (i1);
230d793d
RS
1558 newpat = subst (newpat, i1dest, i1src, 0, 0);
1559 previous_num_undos = undobuf.num_undo;
1560 }
1561
916f14f1
RK
1562 /* Fail if an autoincrement side-effect has been duplicated. Be careful
1563 to count all the ways that I2SRC and I1SRC can be used. */
5f4f0e22 1564 if ((FIND_REG_INC_NOTE (i2, NULL_RTX) != 0
916f14f1 1565 && i2_is_used + added_sets_2 > 1)
5f4f0e22 1566 || (i1 != 0 && FIND_REG_INC_NOTE (i1, NULL_RTX) != 0
916f14f1
RK
1567 && (n_occurrences + added_sets_1 + (added_sets_2 && ! i1_feeds_i3)
1568 > 1))
230d793d
RS
1569 /* Fail if we tried to make a new register (we used to abort, but there's
1570 really no reason to). */
1571 || max_reg_num () != maxreg
1572 /* Fail if we couldn't do something and have a CLOBBER. */
1573 || GET_CODE (newpat) == CLOBBER)
1574 {
1575 undo_all ();
1576 return 0;
1577 }
1578
1579 /* If the actions of the earlier insns must be kept
1580 in addition to substituting them into the latest one,
1581 we must make a new PARALLEL for the latest insn
1582 to hold additional the SETs. */
1583
1584 if (added_sets_1 || added_sets_2)
1585 {
1586 combine_extras++;
1587
1588 if (GET_CODE (newpat) == PARALLEL)
1589 {
1590 rtvec old = XVEC (newpat, 0);
1591 total_sets = XVECLEN (newpat, 0) + added_sets_1 + added_sets_2;
1592 newpat = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (total_sets));
1593 bcopy (&old->elem[0], &XVECEXP (newpat, 0, 0),
1594 sizeof (old->elem[0]) * old->num_elem);
1595 }
1596 else
1597 {
1598 rtx old = newpat;
1599 total_sets = 1 + added_sets_1 + added_sets_2;
1600 newpat = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (total_sets));
1601 XVECEXP (newpat, 0, 0) = old;
1602 }
1603
1604 if (added_sets_1)
1605 XVECEXP (newpat, 0, --total_sets)
1606 = (GET_CODE (PATTERN (i1)) == PARALLEL
1607 ? gen_rtx (SET, VOIDmode, i1dest, i1src) : PATTERN (i1));
1608
1609 if (added_sets_2)
1610 {
1611 /* If there is no I1, use I2's body as is. We used to also not do
1612 the subst call below if I2 was substituted into I3,
1613 but that could lose a simplification. */
1614 if (i1 == 0)
1615 XVECEXP (newpat, 0, --total_sets) = i2pat;
1616 else
1617 /* See comment where i2pat is assigned. */
1618 XVECEXP (newpat, 0, --total_sets)
1619 = subst (i2pat, i1dest, i1src, 0, 0);
1620 }
1621 }
1622
1623 /* We come here when we are replacing a destination in I2 with the
1624 destination of I3. */
1625 validate_replacement:
1626
1627 /* Is the result of combination a valid instruction? */
1628 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1629
1630 /* If the result isn't valid, see if it is a PARALLEL of two SETs where
1631 the second SET's destination is a register that is unused. In that case,
1632 we just need the first SET. This can occur when simplifying a divmod
1633 insn. We *must* test for this case here because the code below that
1634 splits two independent SETs doesn't handle this case correctly when it
1635 updates the register status. Also check the case where the first
1636 SET's destination is unused. That would not cause incorrect code, but
1637 does cause an unneeded insn to remain. */
1638
1639 if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
1640 && XVECLEN (newpat, 0) == 2
1641 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1642 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1643 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == REG
1644 && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 1)))
1645 && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 1)))
1646 && asm_noperands (newpat) < 0)
1647 {
1648 newpat = XVECEXP (newpat, 0, 0);
1649 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1650 }
1651
1652 else if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
1653 && XVECLEN (newpat, 0) == 2
1654 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1655 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1656 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) == REG
1657 && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 0)))
1658 && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 0)))
1659 && asm_noperands (newpat) < 0)
1660 {
1661 newpat = XVECEXP (newpat, 0, 1);
1662 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1663 }
1664
d0ab8cd3
RK
1665 /* See if this is an XOR. If so, perhaps the problem is that the
1666 constant is out of range. Replace it with a complemented XOR with
1667 a complemented constant; it might be in range. */
1668
1669 else if (insn_code_number < 0 && GET_CODE (newpat) == SET
1670 && GET_CODE (SET_SRC (newpat)) == XOR
1671 && GET_CODE (XEXP (SET_SRC (newpat), 1)) == CONST_INT
1672 && ((temp = simplify_unary_operation (NOT,
1673 GET_MODE (SET_SRC (newpat)),
1674 XEXP (SET_SRC (newpat), 1),
1675 GET_MODE (SET_SRC (newpat))))
1676 != 0))
1677 {
1678 enum machine_mode i_mode = GET_MODE (SET_SRC (newpat));
1679 rtx pat
1680 = gen_rtx_combine (SET, VOIDmode, SET_DEST (newpat),
1681 gen_unary (NOT, i_mode,
1682 gen_binary (XOR, i_mode,
1683 XEXP (SET_SRC (newpat), 0),
1684 temp)));
1685
1686 insn_code_number = recog_for_combine (&pat, i3, &new_i3_notes);
1687 if (insn_code_number >= 0)
1688 newpat = pat;
1689 }
1690
230d793d
RS
1691 /* If we were combining three insns and the result is a simple SET
1692 with no ASM_OPERANDS that wasn't recognized, try to split it into two
916f14f1
RK
1693 insns. There are two ways to do this. It can be split using a
1694 machine-specific method (like when you have an addition of a large
1695 constant) or by combine in the function find_split_point. */
1696
230d793d
RS
1697 if (i1 && insn_code_number < 0 && GET_CODE (newpat) == SET
1698 && asm_noperands (newpat) < 0)
1699 {
916f14f1 1700 rtx m_split, *split;
42495ca0 1701 rtx ni2dest = i2dest;
916f14f1
RK
1702
1703 /* See if the MD file can split NEWPAT. If it can't, see if letting it
42495ca0
RK
1704 use I2DEST as a scratch register will help. In the latter case,
1705 convert I2DEST to the mode of the source of NEWPAT if we can. */
916f14f1
RK
1706
1707 m_split = split_insns (newpat, i3);
a70c61d9
JW
1708
1709 /* We can only use I2DEST as a scratch reg if it doesn't overlap any
1710 inputs of NEWPAT. */
1711
1712 /* ??? If I2DEST is not safe, and I1DEST exists, then it would be
1713 possible to try that as a scratch reg. This would require adding
1714 more code to make it work though. */
1715
1716 if (m_split == 0 && ! reg_overlap_mentioned_p (ni2dest, newpat))
42495ca0
RK
1717 {
1718 /* If I2DEST is a hard register or the only use of a pseudo,
1719 we can change its mode. */
1720 if (GET_MODE (SET_DEST (newpat)) != GET_MODE (i2dest)
02f4ada4 1721 && GET_MODE (SET_DEST (newpat)) != VOIDmode
60654f77 1722 && GET_CODE (i2dest) == REG
42495ca0
RK
1723 && (REGNO (i2dest) < FIRST_PSEUDO_REGISTER
1724 || (reg_n_sets[REGNO (i2dest)] == 1 && ! added_sets_2
1725 && ! REG_USERVAR_P (i2dest))))
1726 ni2dest = gen_rtx (REG, GET_MODE (SET_DEST (newpat)),
1727 REGNO (i2dest));
1728
1729 m_split = split_insns (gen_rtx (PARALLEL, VOIDmode,
1730 gen_rtvec (2, newpat,
1731 gen_rtx (CLOBBER,
1732 VOIDmode,
1733 ni2dest))),
1734 i3);
1735 }
916f14f1
RK
1736
1737 if (m_split && GET_CODE (m_split) == SEQUENCE
3f508eca
RK
1738 && XVECLEN (m_split, 0) == 2
1739 && (next_real_insn (i2) == i3
1740 || ! use_crosses_set_p (PATTERN (XVECEXP (m_split, 0, 0)),
1741 INSN_CUID (i2))))
916f14f1 1742 {
1a26b032 1743 rtx i2set, i3set;
d0ab8cd3 1744 rtx newi3pat = PATTERN (XVECEXP (m_split, 0, 1));
916f14f1 1745 newi2pat = PATTERN (XVECEXP (m_split, 0, 0));
916f14f1 1746
e4ba89be
RK
1747 i3set = single_set (XVECEXP (m_split, 0, 1));
1748 i2set = single_set (XVECEXP (m_split, 0, 0));
1a26b032 1749
42495ca0
RK
1750 /* In case we changed the mode of I2DEST, replace it in the
1751 pseudo-register table here. We can't do it above in case this
1752 code doesn't get executed and we do a split the other way. */
1753
1754 if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
1755 SUBST (regno_reg_rtx[REGNO (i2dest)], ni2dest);
1756
916f14f1 1757 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
1a26b032
RK
1758
1759 /* If I2 or I3 has multiple SETs, we won't know how to track
1760 register status, so don't use these insns. */
1761
1762 if (i2_code_number >= 0 && i2set && i3set)
8888fada
RK
1763 insn_code_number = recog_for_combine (&newi3pat, i3,
1764 &new_i3_notes);
c767f54b 1765
d0ab8cd3
RK
1766 if (insn_code_number >= 0)
1767 newpat = newi3pat;
1768
c767f54b 1769 /* It is possible that both insns now set the destination of I3.
22609cbf 1770 If so, we must show an extra use of it. */
c767f54b 1771
1a26b032
RK
1772 if (insn_code_number >= 0 && GET_CODE (SET_DEST (i3set)) == REG
1773 && GET_CODE (SET_DEST (i2set)) == REG
1774 && REGNO (SET_DEST (i3set)) == REGNO (SET_DEST (i2set)))
22609cbf 1775 reg_n_sets[REGNO (SET_DEST (i2set))]++;
916f14f1 1776 }
230d793d
RS
1777
1778 /* If we can split it and use I2DEST, go ahead and see if that
1779 helps things be recognized. Verify that none of the registers
1780 are set between I2 and I3. */
d0ab8cd3 1781 if (insn_code_number < 0 && (split = find_split_point (&newpat, i3)) != 0
230d793d
RS
1782#ifdef HAVE_cc0
1783 && GET_CODE (i2dest) == REG
1784#endif
1785 /* We need I2DEST in the proper mode. If it is a hard register
1786 or the only use of a pseudo, we can change its mode. */
1787 && (GET_MODE (*split) == GET_MODE (i2dest)
1788 || GET_MODE (*split) == VOIDmode
1789 || REGNO (i2dest) < FIRST_PSEUDO_REGISTER
1790 || (reg_n_sets[REGNO (i2dest)] == 1 && ! added_sets_2
1791 && ! REG_USERVAR_P (i2dest)))
1792 && (next_real_insn (i2) == i3
1793 || ! use_crosses_set_p (*split, INSN_CUID (i2)))
1794 /* We can't overwrite I2DEST if its value is still used by
1795 NEWPAT. */
1796 && ! reg_referenced_p (i2dest, newpat))
1797 {
1798 rtx newdest = i2dest;
1799
1800 /* Get NEWDEST as a register in the proper mode. We have already
1801 validated that we can do this. */
1802 if (GET_MODE (i2dest) != GET_MODE (*split)
1803 && GET_MODE (*split) != VOIDmode)
1804 {
1805 newdest = gen_rtx (REG, GET_MODE (*split), REGNO (i2dest));
1806
1807 if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
1808 SUBST (regno_reg_rtx[REGNO (i2dest)], newdest);
1809 }
1810
1811 /* If *SPLIT is a (mult FOO (const_int pow2)), convert it to
1812 an ASHIFT. This can occur if it was inside a PLUS and hence
1813 appeared to be a memory address. This is a kludge. */
1814 if (GET_CODE (*split) == MULT
1815 && GET_CODE (XEXP (*split, 1)) == CONST_INT
1816 && (i = exact_log2 (INTVAL (XEXP (*split, 1)))) >= 0)
1817 SUBST (*split, gen_rtx_combine (ASHIFT, GET_MODE (*split),
5f4f0e22 1818 XEXP (*split, 0), GEN_INT (i)));
230d793d
RS
1819
1820#ifdef INSN_SCHEDULING
1821 /* If *SPLIT is a paradoxical SUBREG, when we split it, it should
1822 be written as a ZERO_EXTEND. */
1823 if (GET_CODE (*split) == SUBREG
1824 && GET_CODE (SUBREG_REG (*split)) == MEM)
1825 SUBST (*split, gen_rtx_combine (ZERO_EXTEND, GET_MODE (*split),
1826 XEXP (*split, 0)));
1827#endif
1828
1829 newi2pat = gen_rtx_combine (SET, VOIDmode, newdest, *split);
1830 SUBST (*split, newdest);
1831 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
1832 if (i2_code_number >= 0)
1833 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1834 }
1835 }
1836
1837 /* Check for a case where we loaded from memory in a narrow mode and
1838 then sign extended it, but we need both registers. In that case,
1839 we have a PARALLEL with both loads from the same memory location.
1840 We can split this into a load from memory followed by a register-register
1841 copy. This saves at least one insn, more if register allocation can
f0343c74
RK
1842 eliminate the copy.
1843
1844 We cannot do this if the destination of the second assignment is
1845 a register that we have already assumed is zero-extended. Similarly
1846 for a SUBREG of such a register. */
230d793d
RS
1847
1848 else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
1849 && GET_CODE (newpat) == PARALLEL
1850 && XVECLEN (newpat, 0) == 2
1851 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1852 && GET_CODE (SET_SRC (XVECEXP (newpat, 0, 0))) == SIGN_EXTEND
1853 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1854 && rtx_equal_p (SET_SRC (XVECEXP (newpat, 0, 1)),
1855 XEXP (SET_SRC (XVECEXP (newpat, 0, 0)), 0))
1856 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
1857 INSN_CUID (i2))
1858 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
1859 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
f0343c74
RK
1860 && ! (temp = SET_DEST (XVECEXP (newpat, 0, 1)),
1861 (GET_CODE (temp) == REG
1862 && reg_nonzero_bits[REGNO (temp)] != 0
1863 && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
1864 && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
1865 && (reg_nonzero_bits[REGNO (temp)]
1866 != GET_MODE_MASK (word_mode))))
1867 && ! (GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == SUBREG
1868 && (temp = SUBREG_REG (SET_DEST (XVECEXP (newpat, 0, 1))),
1869 (GET_CODE (temp) == REG
1870 && reg_nonzero_bits[REGNO (temp)] != 0
1871 && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
1872 && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
1873 && (reg_nonzero_bits[REGNO (temp)]
1874 != GET_MODE_MASK (word_mode)))))
230d793d
RS
1875 && ! reg_overlap_mentioned_p (SET_DEST (XVECEXP (newpat, 0, 1)),
1876 SET_SRC (XVECEXP (newpat, 0, 1)))
1877 && ! find_reg_note (i3, REG_UNUSED,
1878 SET_DEST (XVECEXP (newpat, 0, 0))))
1879 {
472fbdd1
RK
1880 rtx ni2dest;
1881
230d793d 1882 newi2pat = XVECEXP (newpat, 0, 0);
472fbdd1 1883 ni2dest = SET_DEST (XVECEXP (newpat, 0, 0));
230d793d
RS
1884 newpat = XVECEXP (newpat, 0, 1);
1885 SUBST (SET_SRC (newpat),
472fbdd1 1886 gen_lowpart_for_combine (GET_MODE (SET_SRC (newpat)), ni2dest));
230d793d
RS
1887 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
1888 if (i2_code_number >= 0)
1889 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
5089e22e
RS
1890
1891 if (insn_code_number >= 0)
1892 {
1893 rtx insn;
1894 rtx link;
1895
1896 /* If we will be able to accept this, we have made a change to the
1897 destination of I3. This can invalidate a LOG_LINKS pointing
1898 to I3. No other part of combine.c makes such a transformation.
1899
1900 The new I3 will have a destination that was previously the
1901 destination of I1 or I2 and which was used in i2 or I3. Call
1902 distribute_links to make a LOG_LINK from the next use of
1903 that destination. */
1904
1905 PATTERN (i3) = newpat;
5f4f0e22 1906 distribute_links (gen_rtx (INSN_LIST, VOIDmode, i3, NULL_RTX));
5089e22e
RS
1907
1908 /* I3 now uses what used to be its destination and which is
1909 now I2's destination. That means we need a LOG_LINK from
1910 I3 to I2. But we used to have one, so we still will.
1911
1912 However, some later insn might be using I2's dest and have
1913 a LOG_LINK pointing at I3. We must remove this link.
1914 The simplest way to remove the link is to point it at I1,
1915 which we know will be a NOTE. */
1916
1917 for (insn = NEXT_INSN (i3);
0d4d42c3
RK
1918 insn && (this_basic_block == n_basic_blocks - 1
1919 || insn != basic_block_head[this_basic_block + 1]);
5089e22e
RS
1920 insn = NEXT_INSN (insn))
1921 {
1922 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
472fbdd1 1923 && reg_referenced_p (ni2dest, PATTERN (insn)))
5089e22e
RS
1924 {
1925 for (link = LOG_LINKS (insn); link;
1926 link = XEXP (link, 1))
1927 if (XEXP (link, 0) == i3)
1928 XEXP (link, 0) = i1;
1929
1930 break;
1931 }
1932 }
1933 }
230d793d
RS
1934 }
1935
1936 /* Similarly, check for a case where we have a PARALLEL of two independent
1937 SETs but we started with three insns. In this case, we can do the sets
1938 as two separate insns. This case occurs when some SET allows two
1939 other insns to combine, but the destination of that SET is still live. */
1940
1941 else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
1942 && GET_CODE (newpat) == PARALLEL
1943 && XVECLEN (newpat, 0) == 2
1944 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1945 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != ZERO_EXTRACT
1946 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != STRICT_LOW_PART
1947 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1948 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
1949 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
1950 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
1951 INSN_CUID (i2))
1952 /* Don't pass sets with (USE (MEM ...)) dests to the following. */
1953 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != USE
1954 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != USE
1955 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 1)),
1956 XVECEXP (newpat, 0, 0))
1957 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 0)),
1958 XVECEXP (newpat, 0, 1)))
1959 {
1960 newi2pat = XVECEXP (newpat, 0, 1);
1961 newpat = XVECEXP (newpat, 0, 0);
1962
1963 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
1964 if (i2_code_number >= 0)
1965 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1966 }
1967
1968 /* If it still isn't recognized, fail and change things back the way they
1969 were. */
1970 if ((insn_code_number < 0
1971 /* Is the result a reasonable ASM_OPERANDS? */
1972 && (! check_asm_operands (newpat) || added_sets_1 || added_sets_2)))
1973 {
1974 undo_all ();
1975 return 0;
1976 }
1977
1978 /* If we had to change another insn, make sure it is valid also. */
1979 if (undobuf.other_insn)
1980 {
1981 rtx other_notes = REG_NOTES (undobuf.other_insn);
1982 rtx other_pat = PATTERN (undobuf.other_insn);
1983 rtx new_other_notes;
1984 rtx note, next;
1985
1986 other_code_number = recog_for_combine (&other_pat, undobuf.other_insn,
1987 &new_other_notes);
1988
1989 if (other_code_number < 0 && ! check_asm_operands (other_pat))
1990 {
1991 undo_all ();
1992 return 0;
1993 }
1994
1995 PATTERN (undobuf.other_insn) = other_pat;
1996
1997 /* If any of the notes in OTHER_INSN were REG_UNUSED, ensure that they
1998 are still valid. Then add any non-duplicate notes added by
1999 recog_for_combine. */
2000 for (note = REG_NOTES (undobuf.other_insn); note; note = next)
2001 {
2002 next = XEXP (note, 1);
2003
2004 if (REG_NOTE_KIND (note) == REG_UNUSED
2005 && ! reg_set_p (XEXP (note, 0), PATTERN (undobuf.other_insn)))
1a26b032
RK
2006 {
2007 if (GET_CODE (XEXP (note, 0)) == REG)
2008 reg_n_deaths[REGNO (XEXP (note, 0))]--;
2009
2010 remove_note (undobuf.other_insn, note);
2011 }
230d793d
RS
2012 }
2013
1a26b032
RK
2014 for (note = new_other_notes; note; note = XEXP (note, 1))
2015 if (GET_CODE (XEXP (note, 0)) == REG)
2016 reg_n_deaths[REGNO (XEXP (note, 0))]++;
2017
230d793d 2018 distribute_notes (new_other_notes, undobuf.other_insn,
5f4f0e22 2019 undobuf.other_insn, NULL_RTX, NULL_RTX, NULL_RTX);
230d793d
RS
2020 }
2021
2022 /* We now know that we can do this combination. Merge the insns and
2023 update the status of registers and LOG_LINKS. */
2024
2025 {
2026 rtx i3notes, i2notes, i1notes = 0;
2027 rtx i3links, i2links, i1links = 0;
2028 rtx midnotes = 0;
2029 int all_adjacent = (next_real_insn (i2) == i3
2030 && (i1 == 0 || next_real_insn (i1) == i2));
2031 register int regno;
2032 /* Compute which registers we expect to eliminate. */
2033 rtx elim_i2 = (newi2pat || i2dest_in_i2src || i2dest_in_i1src
2034 ? 0 : i2dest);
2035 rtx elim_i1 = i1 == 0 || i1dest_in_i1src ? 0 : i1dest;
2036
2037 /* Get the old REG_NOTES and LOG_LINKS from all our insns and
2038 clear them. */
2039 i3notes = REG_NOTES (i3), i3links = LOG_LINKS (i3);
2040 i2notes = REG_NOTES (i2), i2links = LOG_LINKS (i2);
2041 if (i1)
2042 i1notes = REG_NOTES (i1), i1links = LOG_LINKS (i1);
2043
2044 /* Ensure that we do not have something that should not be shared but
2045 occurs multiple times in the new insns. Check this by first
5089e22e 2046 resetting all the `used' flags and then copying anything is shared. */
230d793d
RS
2047
2048 reset_used_flags (i3notes);
2049 reset_used_flags (i2notes);
2050 reset_used_flags (i1notes);
2051 reset_used_flags (newpat);
2052 reset_used_flags (newi2pat);
2053 if (undobuf.other_insn)
2054 reset_used_flags (PATTERN (undobuf.other_insn));
2055
2056 i3notes = copy_rtx_if_shared (i3notes);
2057 i2notes = copy_rtx_if_shared (i2notes);
2058 i1notes = copy_rtx_if_shared (i1notes);
2059 newpat = copy_rtx_if_shared (newpat);
2060 newi2pat = copy_rtx_if_shared (newi2pat);
2061 if (undobuf.other_insn)
2062 reset_used_flags (PATTERN (undobuf.other_insn));
2063
2064 INSN_CODE (i3) = insn_code_number;
2065 PATTERN (i3) = newpat;
2066 if (undobuf.other_insn)
2067 INSN_CODE (undobuf.other_insn) = other_code_number;
2068
2069 /* We had one special case above where I2 had more than one set and
2070 we replaced a destination of one of those sets with the destination
2071 of I3. In that case, we have to update LOG_LINKS of insns later
176c9e6b
JW
2072 in this basic block. Note that this (expensive) case is rare.
2073
2074 Also, in this case, we must pretend that all REG_NOTEs for I2
2075 actually came from I3, so that REG_UNUSED notes from I2 will be
2076 properly handled. */
2077
2078 if (i3_subst_into_i2)
2079 {
2080 for (i = 0; i < XVECLEN (PATTERN (i2), 0); i++)
2081 if (GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, i))) == REG
2082 && SET_DEST (XVECEXP (PATTERN (i2), 0, i)) != i2dest
2083 && ! find_reg_note (i2, REG_UNUSED,
2084 SET_DEST (XVECEXP (PATTERN (i2), 0, i))))
2085 for (temp = NEXT_INSN (i2);
2086 temp && (this_basic_block == n_basic_blocks - 1
2087 || basic_block_head[this_basic_block] != temp);
2088 temp = NEXT_INSN (temp))
2089 if (temp != i3 && GET_RTX_CLASS (GET_CODE (temp)) == 'i')
2090 for (link = LOG_LINKS (temp); link; link = XEXP (link, 1))
2091 if (XEXP (link, 0) == i2)
2092 XEXP (link, 0) = i3;
2093
2094 if (i3notes)
2095 {
2096 rtx link = i3notes;
2097 while (XEXP (link, 1))
2098 link = XEXP (link, 1);
2099 XEXP (link, 1) = i2notes;
2100 }
2101 else
2102 i3notes = i2notes;
2103 i2notes = 0;
2104 }
230d793d
RS
2105
2106 LOG_LINKS (i3) = 0;
2107 REG_NOTES (i3) = 0;
2108 LOG_LINKS (i2) = 0;
2109 REG_NOTES (i2) = 0;
2110
2111 if (newi2pat)
2112 {
2113 INSN_CODE (i2) = i2_code_number;
2114 PATTERN (i2) = newi2pat;
2115 }
2116 else
2117 {
2118 PUT_CODE (i2, NOTE);
2119 NOTE_LINE_NUMBER (i2) = NOTE_INSN_DELETED;
2120 NOTE_SOURCE_FILE (i2) = 0;
2121 }
2122
2123 if (i1)
2124 {
2125 LOG_LINKS (i1) = 0;
2126 REG_NOTES (i1) = 0;
2127 PUT_CODE (i1, NOTE);
2128 NOTE_LINE_NUMBER (i1) = NOTE_INSN_DELETED;
2129 NOTE_SOURCE_FILE (i1) = 0;
2130 }
2131
2132 /* Get death notes for everything that is now used in either I3 or
2133 I2 and used to die in a previous insn. */
2134
2135 move_deaths (newpat, i1 ? INSN_CUID (i1) : INSN_CUID (i2), i3, &midnotes);
2136 if (newi2pat)
2137 move_deaths (newi2pat, INSN_CUID (i1), i2, &midnotes);
2138
2139 /* Distribute all the LOG_LINKS and REG_NOTES from I1, I2, and I3. */
2140 if (i3notes)
5f4f0e22
CH
2141 distribute_notes (i3notes, i3, i3, newi2pat ? i2 : NULL_RTX,
2142 elim_i2, elim_i1);
230d793d 2143 if (i2notes)
5f4f0e22
CH
2144 distribute_notes (i2notes, i2, i3, newi2pat ? i2 : NULL_RTX,
2145 elim_i2, elim_i1);
230d793d 2146 if (i1notes)
5f4f0e22
CH
2147 distribute_notes (i1notes, i1, i3, newi2pat ? i2 : NULL_RTX,
2148 elim_i2, elim_i1);
230d793d 2149 if (midnotes)
5f4f0e22
CH
2150 distribute_notes (midnotes, NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2151 elim_i2, elim_i1);
230d793d
RS
2152
2153 /* Distribute any notes added to I2 or I3 by recog_for_combine. We
2154 know these are REG_UNUSED and want them to go to the desired insn,
1a26b032
RK
2155 so we always pass it as i3. We have not counted the notes in
2156 reg_n_deaths yet, so we need to do so now. */
2157
230d793d 2158 if (newi2pat && new_i2_notes)
1a26b032
RK
2159 {
2160 for (temp = new_i2_notes; temp; temp = XEXP (temp, 1))
2161 if (GET_CODE (XEXP (temp, 0)) == REG)
2162 reg_n_deaths[REGNO (XEXP (temp, 0))]++;
2163
2164 distribute_notes (new_i2_notes, i2, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2165 }
2166
230d793d 2167 if (new_i3_notes)
1a26b032
RK
2168 {
2169 for (temp = new_i3_notes; temp; temp = XEXP (temp, 1))
2170 if (GET_CODE (XEXP (temp, 0)) == REG)
2171 reg_n_deaths[REGNO (XEXP (temp, 0))]++;
2172
2173 distribute_notes (new_i3_notes, i3, i3, NULL_RTX, NULL_RTX, NULL_RTX);
2174 }
230d793d
RS
2175
2176 /* If I3DEST was used in I3SRC, it really died in I3. We may need to
1a26b032
RK
2177 put a REG_DEAD note for it somewhere. Similarly for I2 and I1.
2178 Show an additional death due to the REG_DEAD note we make here. If
2179 we discard it in distribute_notes, we will decrement it again. */
d0ab8cd3 2180
230d793d 2181 if (i3dest_killed)
1a26b032
RK
2182 {
2183 if (GET_CODE (i3dest_killed) == REG)
2184 reg_n_deaths[REGNO (i3dest_killed)]++;
2185
2186 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i3dest_killed,
2187 NULL_RTX),
2188 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2189 NULL_RTX, NULL_RTX);
2190 }
58c8c593
RK
2191
2192 /* For I2 and I1, we have to be careful. If NEWI2PAT exists and sets
2193 I2DEST or I1DEST, the death must be somewhere before I2, not I3. If
2194 we passed I3 in that case, it might delete I2. */
2195
230d793d 2196 if (i2dest_in_i2src)
58c8c593 2197 {
1a26b032
RK
2198 if (GET_CODE (i2dest) == REG)
2199 reg_n_deaths[REGNO (i2dest)]++;
2200
58c8c593
RK
2201 if (newi2pat && reg_set_p (i2dest, newi2pat))
2202 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i2dest, NULL_RTX),
2203 NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2204 else
2205 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i2dest, NULL_RTX),
2206 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2207 NULL_RTX, NULL_RTX);
2208 }
2209
230d793d 2210 if (i1dest_in_i1src)
58c8c593 2211 {
1a26b032
RK
2212 if (GET_CODE (i1dest) == REG)
2213 reg_n_deaths[REGNO (i1dest)]++;
2214
58c8c593
RK
2215 if (newi2pat && reg_set_p (i1dest, newi2pat))
2216 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i1dest, NULL_RTX),
2217 NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2218 else
2219 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i1dest, NULL_RTX),
2220 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2221 NULL_RTX, NULL_RTX);
2222 }
230d793d
RS
2223
2224 distribute_links (i3links);
2225 distribute_links (i2links);
2226 distribute_links (i1links);
2227
2228 if (GET_CODE (i2dest) == REG)
2229 {
d0ab8cd3
RK
2230 rtx link;
2231 rtx i2_insn = 0, i2_val = 0, set;
2232
2233 /* The insn that used to set this register doesn't exist, and
2234 this life of the register may not exist either. See if one of
2235 I3's links points to an insn that sets I2DEST. If it does,
2236 that is now the last known value for I2DEST. If we don't update
2237 this and I2 set the register to a value that depended on its old
230d793d
RS
2238 contents, we will get confused. If this insn is used, thing
2239 will be set correctly in combine_instructions. */
d0ab8cd3
RK
2240
2241 for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2242 if ((set = single_set (XEXP (link, 0))) != 0
2243 && rtx_equal_p (i2dest, SET_DEST (set)))
2244 i2_insn = XEXP (link, 0), i2_val = SET_SRC (set);
2245
2246 record_value_for_reg (i2dest, i2_insn, i2_val);
230d793d
RS
2247
2248 /* If the reg formerly set in I2 died only once and that was in I3,
2249 zero its use count so it won't make `reload' do any work. */
2250 if (! added_sets_2 && newi2pat == 0)
2251 {
2252 regno = REGNO (i2dest);
2253 reg_n_sets[regno]--;
2254 if (reg_n_sets[regno] == 0
5f4f0e22
CH
2255 && ! (basic_block_live_at_start[0][regno / REGSET_ELT_BITS]
2256 & ((REGSET_ELT_TYPE) 1 << (regno % REGSET_ELT_BITS))))
230d793d
RS
2257 reg_n_refs[regno] = 0;
2258 }
2259 }
2260
2261 if (i1 && GET_CODE (i1dest) == REG)
2262 {
d0ab8cd3
RK
2263 rtx link;
2264 rtx i1_insn = 0, i1_val = 0, set;
2265
2266 for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2267 if ((set = single_set (XEXP (link, 0))) != 0
2268 && rtx_equal_p (i1dest, SET_DEST (set)))
2269 i1_insn = XEXP (link, 0), i1_val = SET_SRC (set);
2270
2271 record_value_for_reg (i1dest, i1_insn, i1_val);
2272
230d793d
RS
2273 regno = REGNO (i1dest);
2274 if (! added_sets_1)
2275 {
2276 reg_n_sets[regno]--;
2277 if (reg_n_sets[regno] == 0
5f4f0e22
CH
2278 && ! (basic_block_live_at_start[0][regno / REGSET_ELT_BITS]
2279 & ((REGSET_ELT_TYPE) 1 << (regno % REGSET_ELT_BITS))))
230d793d
RS
2280 reg_n_refs[regno] = 0;
2281 }
2282 }
2283
951553af 2284 /* Update reg_nonzero_bits et al for any changes that may have been made
22609cbf
RK
2285 to this insn. */
2286
951553af 2287 note_stores (newpat, set_nonzero_bits_and_sign_copies);
22609cbf 2288 if (newi2pat)
951553af 2289 note_stores (newi2pat, set_nonzero_bits_and_sign_copies);
22609cbf 2290
230d793d
RS
2291 /* If I3 is now an unconditional jump, ensure that it has a
2292 BARRIER following it since it may have initially been a
381ee8af 2293 conditional jump. It may also be the last nonnote insn. */
230d793d
RS
2294
2295 if ((GET_CODE (newpat) == RETURN || simplejump_p (i3))
381ee8af
TW
2296 && ((temp = next_nonnote_insn (i3)) == NULL_RTX
2297 || GET_CODE (temp) != BARRIER))
230d793d
RS
2298 emit_barrier_after (i3);
2299 }
2300
2301 combine_successes++;
2302
abe6e52f
RK
2303 if (added_links_insn
2304 && (newi2pat == 0 || INSN_CUID (added_links_insn) < INSN_CUID (i2))
2305 && INSN_CUID (added_links_insn) < INSN_CUID (i3))
2306 return added_links_insn;
2307 else
2308 return newi2pat ? i2 : i3;
230d793d
RS
2309}
2310\f
2311/* Undo all the modifications recorded in undobuf. */
2312
2313static void
2314undo_all ()
2315{
2316 register int i;
2317 if (undobuf.num_undo > MAX_UNDO)
2318 undobuf.num_undo = MAX_UNDO;
2319 for (i = undobuf.num_undo - 1; i >= 0; i--)
7c046e4e
RK
2320 {
2321 if (undobuf.undo[i].is_int)
2322 *undobuf.undo[i].where.i = undobuf.undo[i].old_contents.i;
2323 else
f5393ab9 2324 *undobuf.undo[i].where.r = undobuf.undo[i].old_contents.r;
7c046e4e
RK
2325
2326 }
230d793d
RS
2327
2328 obfree (undobuf.storage);
2329 undobuf.num_undo = 0;
2330}
2331\f
2332/* Find the innermost point within the rtx at LOC, possibly LOC itself,
d0ab8cd3
RK
2333 where we have an arithmetic expression and return that point. LOC will
2334 be inside INSN.
230d793d
RS
2335
2336 try_combine will call this function to see if an insn can be split into
2337 two insns. */
2338
2339static rtx *
d0ab8cd3 2340find_split_point (loc, insn)
230d793d 2341 rtx *loc;
d0ab8cd3 2342 rtx insn;
230d793d
RS
2343{
2344 rtx x = *loc;
2345 enum rtx_code code = GET_CODE (x);
2346 rtx *split;
2347 int len = 0, pos, unsignedp;
2348 rtx inner;
2349
2350 /* First special-case some codes. */
2351 switch (code)
2352 {
2353 case SUBREG:
2354#ifdef INSN_SCHEDULING
2355 /* If we are making a paradoxical SUBREG invalid, it becomes a split
2356 point. */
2357 if (GET_CODE (SUBREG_REG (x)) == MEM)
2358 return loc;
2359#endif
d0ab8cd3 2360 return find_split_point (&SUBREG_REG (x), insn);
230d793d 2361
230d793d 2362 case MEM:
916f14f1 2363#ifdef HAVE_lo_sum
230d793d
RS
2364 /* If we have (mem (const ..)) or (mem (symbol_ref ...)), split it
2365 using LO_SUM and HIGH. */
2366 if (GET_CODE (XEXP (x, 0)) == CONST
2367 || GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
2368 {
2369 SUBST (XEXP (x, 0),
2370 gen_rtx_combine (LO_SUM, Pmode,
2371 gen_rtx_combine (HIGH, Pmode, XEXP (x, 0)),
2372 XEXP (x, 0)));
2373 return &XEXP (XEXP (x, 0), 0);
2374 }
230d793d
RS
2375#endif
2376
916f14f1
RK
2377 /* If we have a PLUS whose second operand is a constant and the
2378 address is not valid, perhaps will can split it up using
2379 the machine-specific way to split large constants. We use
d0ab8cd3 2380 the first psuedo-reg (one of the virtual regs) as a placeholder;
916f14f1
RK
2381 it will not remain in the result. */
2382 if (GET_CODE (XEXP (x, 0)) == PLUS
2383 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2384 && ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
2385 {
2386 rtx reg = regno_reg_rtx[FIRST_PSEUDO_REGISTER];
2387 rtx seq = split_insns (gen_rtx (SET, VOIDmode, reg, XEXP (x, 0)),
2388 subst_insn);
2389
2390 /* This should have produced two insns, each of which sets our
2391 placeholder. If the source of the second is a valid address,
2392 we can make put both sources together and make a split point
2393 in the middle. */
2394
2395 if (seq && XVECLEN (seq, 0) == 2
2396 && GET_CODE (XVECEXP (seq, 0, 0)) == INSN
2397 && GET_CODE (PATTERN (XVECEXP (seq, 0, 0))) == SET
2398 && SET_DEST (PATTERN (XVECEXP (seq, 0, 0))) == reg
2399 && ! reg_mentioned_p (reg,
2400 SET_SRC (PATTERN (XVECEXP (seq, 0, 0))))
2401 && GET_CODE (XVECEXP (seq, 0, 1)) == INSN
2402 && GET_CODE (PATTERN (XVECEXP (seq, 0, 1))) == SET
2403 && SET_DEST (PATTERN (XVECEXP (seq, 0, 1))) == reg
2404 && memory_address_p (GET_MODE (x),
2405 SET_SRC (PATTERN (XVECEXP (seq, 0, 1)))))
2406 {
2407 rtx src1 = SET_SRC (PATTERN (XVECEXP (seq, 0, 0)));
2408 rtx src2 = SET_SRC (PATTERN (XVECEXP (seq, 0, 1)));
2409
2410 /* Replace the placeholder in SRC2 with SRC1. If we can
2411 find where in SRC2 it was placed, that can become our
2412 split point and we can replace this address with SRC2.
2413 Just try two obvious places. */
2414
2415 src2 = replace_rtx (src2, reg, src1);
2416 split = 0;
2417 if (XEXP (src2, 0) == src1)
2418 split = &XEXP (src2, 0);
2419 else if (GET_RTX_FORMAT (GET_CODE (XEXP (src2, 0)))[0] == 'e'
2420 && XEXP (XEXP (src2, 0), 0) == src1)
2421 split = &XEXP (XEXP (src2, 0), 0);
2422
2423 if (split)
2424 {
2425 SUBST (XEXP (x, 0), src2);
2426 return split;
2427 }
2428 }
1a26b032
RK
2429
2430 /* If that didn't work, perhaps the first operand is complex and
2431 needs to be computed separately, so make a split point there.
2432 This will occur on machines that just support REG + CONST
2433 and have a constant moved through some previous computation. */
2434
2435 else if (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (x, 0), 0))) != 'o'
2436 && ! (GET_CODE (XEXP (XEXP (x, 0), 0)) == SUBREG
2437 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (XEXP (x, 0), 0))))
2438 == 'o')))
2439 return &XEXP (XEXP (x, 0), 0);
916f14f1
RK
2440 }
2441 break;
2442
230d793d
RS
2443 case SET:
2444#ifdef HAVE_cc0
2445 /* If SET_DEST is CC0 and SET_SRC is not an operand, a COMPARE, or a
2446 ZERO_EXTRACT, the most likely reason why this doesn't match is that
2447 we need to put the operand into a register. So split at that
2448 point. */
2449
2450 if (SET_DEST (x) == cc0_rtx
2451 && GET_CODE (SET_SRC (x)) != COMPARE
2452 && GET_CODE (SET_SRC (x)) != ZERO_EXTRACT
2453 && GET_RTX_CLASS (GET_CODE (SET_SRC (x))) != 'o'
2454 && ! (GET_CODE (SET_SRC (x)) == SUBREG
2455 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (SET_SRC (x)))) == 'o'))
2456 return &SET_SRC (x);
2457#endif
2458
2459 /* See if we can split SET_SRC as it stands. */
d0ab8cd3 2460 split = find_split_point (&SET_SRC (x), insn);
230d793d
RS
2461 if (split && split != &SET_SRC (x))
2462 return split;
2463
2464 /* See if this is a bitfield assignment with everything constant. If
2465 so, this is an IOR of an AND, so split it into that. */
2466 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2467 && (GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
5f4f0e22 2468 <= HOST_BITS_PER_WIDE_INT)
230d793d
RS
2469 && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT
2470 && GET_CODE (XEXP (SET_DEST (x), 2)) == CONST_INT
2471 && GET_CODE (SET_SRC (x)) == CONST_INT
2472 && ((INTVAL (XEXP (SET_DEST (x), 1))
2473 + INTVAL (XEXP (SET_DEST (x), 2)))
2474 <= GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0))))
2475 && ! side_effects_p (XEXP (SET_DEST (x), 0)))
2476 {
2477 int pos = INTVAL (XEXP (SET_DEST (x), 2));
2478 int len = INTVAL (XEXP (SET_DEST (x), 1));
2479 int src = INTVAL (SET_SRC (x));
2480 rtx dest = XEXP (SET_DEST (x), 0);
2481 enum machine_mode mode = GET_MODE (dest);
5f4f0e22 2482 unsigned HOST_WIDE_INT mask = ((HOST_WIDE_INT) 1 << len) - 1;
230d793d
RS
2483
2484#if BITS_BIG_ENDIAN
2485 pos = GET_MODE_BITSIZE (mode) - len - pos;
2486#endif
2487
2488 if (src == mask)
2489 SUBST (SET_SRC (x),
5f4f0e22 2490 gen_binary (IOR, mode, dest, GEN_INT (src << pos)));
230d793d
RS
2491 else
2492 SUBST (SET_SRC (x),
2493 gen_binary (IOR, mode,
2494 gen_binary (AND, mode, dest,
5f4f0e22
CH
2495 GEN_INT (~ (mask << pos)
2496 & GET_MODE_MASK (mode))),
2497 GEN_INT (src << pos)));
230d793d
RS
2498
2499 SUBST (SET_DEST (x), dest);
2500
d0ab8cd3 2501 split = find_split_point (&SET_SRC (x), insn);
230d793d
RS
2502 if (split && split != &SET_SRC (x))
2503 return split;
2504 }
2505
2506 /* Otherwise, see if this is an operation that we can split into two.
2507 If so, try to split that. */
2508 code = GET_CODE (SET_SRC (x));
2509
2510 switch (code)
2511 {
d0ab8cd3
RK
2512 case AND:
2513 /* If we are AND'ing with a large constant that is only a single
2514 bit and the result is only being used in a context where we
2515 need to know if it is zero or non-zero, replace it with a bit
2516 extraction. This will avoid the large constant, which might
2517 have taken more than one insn to make. If the constant were
2518 not a valid argument to the AND but took only one insn to make,
2519 this is no worse, but if it took more than one insn, it will
2520 be better. */
2521
2522 if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
2523 && GET_CODE (XEXP (SET_SRC (x), 0)) == REG
2524 && (pos = exact_log2 (INTVAL (XEXP (SET_SRC (x), 1)))) >= 7
2525 && GET_CODE (SET_DEST (x)) == REG
2526 && (split = find_single_use (SET_DEST (x), insn, NULL_PTR)) != 0
2527 && (GET_CODE (*split) == EQ || GET_CODE (*split) == NE)
2528 && XEXP (*split, 0) == SET_DEST (x)
2529 && XEXP (*split, 1) == const0_rtx)
2530 {
2531 SUBST (SET_SRC (x),
2532 make_extraction (GET_MODE (SET_DEST (x)),
2533 XEXP (SET_SRC (x), 0),
2534 pos, NULL_RTX, 1, 1, 0, 0));
2535 return find_split_point (loc, insn);
2536 }
2537 break;
2538
230d793d
RS
2539 case SIGN_EXTEND:
2540 inner = XEXP (SET_SRC (x), 0);
2541 pos = 0;
2542 len = GET_MODE_BITSIZE (GET_MODE (inner));
2543 unsignedp = 0;
2544 break;
2545
2546 case SIGN_EXTRACT:
2547 case ZERO_EXTRACT:
2548 if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
2549 && GET_CODE (XEXP (SET_SRC (x), 2)) == CONST_INT)
2550 {
2551 inner = XEXP (SET_SRC (x), 0);
2552 len = INTVAL (XEXP (SET_SRC (x), 1));
2553 pos = INTVAL (XEXP (SET_SRC (x), 2));
2554
2555#if BITS_BIG_ENDIAN
2556 pos = GET_MODE_BITSIZE (GET_MODE (inner)) - len - pos;
2557#endif
2558 unsignedp = (code == ZERO_EXTRACT);
2559 }
2560 break;
2561 }
2562
2563 if (len && pos >= 0 && pos + len <= GET_MODE_BITSIZE (GET_MODE (inner)))
2564 {
2565 enum machine_mode mode = GET_MODE (SET_SRC (x));
2566
d0ab8cd3
RK
2567 /* For unsigned, we have a choice of a shift followed by an
2568 AND or two shifts. Use two shifts for field sizes where the
2569 constant might be too large. We assume here that we can
2570 always at least get 8-bit constants in an AND insn, which is
2571 true for every current RISC. */
2572
2573 if (unsignedp && len <= 8)
230d793d
RS
2574 {
2575 SUBST (SET_SRC (x),
2576 gen_rtx_combine
2577 (AND, mode,
2578 gen_rtx_combine (LSHIFTRT, mode,
2579 gen_lowpart_for_combine (mode, inner),
5f4f0e22
CH
2580 GEN_INT (pos)),
2581 GEN_INT (((HOST_WIDE_INT) 1 << len) - 1)));
230d793d 2582
d0ab8cd3 2583 split = find_split_point (&SET_SRC (x), insn);
230d793d
RS
2584 if (split && split != &SET_SRC (x))
2585 return split;
2586 }
2587 else
2588 {
2589 SUBST (SET_SRC (x),
2590 gen_rtx_combine
d0ab8cd3 2591 (unsignedp ? LSHIFTRT : ASHIFTRT, mode,
230d793d
RS
2592 gen_rtx_combine (ASHIFT, mode,
2593 gen_lowpart_for_combine (mode, inner),
5f4f0e22
CH
2594 GEN_INT (GET_MODE_BITSIZE (mode)
2595 - len - pos)),
2596 GEN_INT (GET_MODE_BITSIZE (mode) - len)));
230d793d 2597
d0ab8cd3 2598 split = find_split_point (&SET_SRC (x), insn);
230d793d
RS
2599 if (split && split != &SET_SRC (x))
2600 return split;
2601 }
2602 }
2603
2604 /* See if this is a simple operation with a constant as the second
2605 operand. It might be that this constant is out of range and hence
2606 could be used as a split point. */
2607 if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
2608 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
2609 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<')
2610 && CONSTANT_P (XEXP (SET_SRC (x), 1))
2611 && (GET_RTX_CLASS (GET_CODE (XEXP (SET_SRC (x), 0))) == 'o'
2612 || (GET_CODE (XEXP (SET_SRC (x), 0)) == SUBREG
2613 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (SET_SRC (x), 0))))
2614 == 'o'))))
2615 return &XEXP (SET_SRC (x), 1);
2616
2617 /* Finally, see if this is a simple operation with its first operand
2618 not in a register. The operation might require this operand in a
2619 register, so return it as a split point. We can always do this
2620 because if the first operand were another operation, we would have
2621 already found it as a split point. */
2622 if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
2623 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
2624 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<'
2625 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '1')
2626 && ! register_operand (XEXP (SET_SRC (x), 0), VOIDmode))
2627 return &XEXP (SET_SRC (x), 0);
2628
2629 return 0;
2630
2631 case AND:
2632 case IOR:
2633 /* We write NOR as (and (not A) (not B)), but if we don't have a NOR,
2634 it is better to write this as (not (ior A B)) so we can split it.
2635 Similarly for IOR. */
2636 if (GET_CODE (XEXP (x, 0)) == NOT && GET_CODE (XEXP (x, 1)) == NOT)
2637 {
2638 SUBST (*loc,
2639 gen_rtx_combine (NOT, GET_MODE (x),
2640 gen_rtx_combine (code == IOR ? AND : IOR,
2641 GET_MODE (x),
2642 XEXP (XEXP (x, 0), 0),
2643 XEXP (XEXP (x, 1), 0))));
d0ab8cd3 2644 return find_split_point (loc, insn);
230d793d
RS
2645 }
2646
2647 /* Many RISC machines have a large set of logical insns. If the
2648 second operand is a NOT, put it first so we will try to split the
2649 other operand first. */
2650 if (GET_CODE (XEXP (x, 1)) == NOT)
2651 {
2652 rtx tem = XEXP (x, 0);
2653 SUBST (XEXP (x, 0), XEXP (x, 1));
2654 SUBST (XEXP (x, 1), tem);
2655 }
2656 break;
2657 }
2658
2659 /* Otherwise, select our actions depending on our rtx class. */
2660 switch (GET_RTX_CLASS (code))
2661 {
2662 case 'b': /* This is ZERO_EXTRACT and SIGN_EXTRACT. */
2663 case '3':
d0ab8cd3 2664 split = find_split_point (&XEXP (x, 2), insn);
230d793d
RS
2665 if (split)
2666 return split;
2667 /* ... fall through ... */
2668 case '2':
2669 case 'c':
2670 case '<':
d0ab8cd3 2671 split = find_split_point (&XEXP (x, 1), insn);
230d793d
RS
2672 if (split)
2673 return split;
2674 /* ... fall through ... */
2675 case '1':
2676 /* Some machines have (and (shift ...) ...) insns. If X is not
2677 an AND, but XEXP (X, 0) is, use it as our split point. */
2678 if (GET_CODE (x) != AND && GET_CODE (XEXP (x, 0)) == AND)
2679 return &XEXP (x, 0);
2680
d0ab8cd3 2681 split = find_split_point (&XEXP (x, 0), insn);
230d793d
RS
2682 if (split)
2683 return split;
2684 return loc;
2685 }
2686
2687 /* Otherwise, we don't have a split point. */
2688 return 0;
2689}
2690\f
2691/* Throughout X, replace FROM with TO, and return the result.
2692 The result is TO if X is FROM;
2693 otherwise the result is X, but its contents may have been modified.
2694 If they were modified, a record was made in undobuf so that
2695 undo_all will (among other things) return X to its original state.
2696
2697 If the number of changes necessary is too much to record to undo,
2698 the excess changes are not made, so the result is invalid.
2699 The changes already made can still be undone.
2700 undobuf.num_undo is incremented for such changes, so by testing that
2701 the caller can tell whether the result is valid.
2702
2703 `n_occurrences' is incremented each time FROM is replaced.
2704
2705 IN_DEST is non-zero if we are processing the SET_DEST of a SET.
2706
5089e22e 2707 UNIQUE_COPY is non-zero if each substitution must be unique. We do this
230d793d
RS
2708 by copying if `n_occurrences' is non-zero. */
2709
2710static rtx
2711subst (x, from, to, in_dest, unique_copy)
2712 register rtx x, from, to;
2713 int in_dest;
2714 int unique_copy;
2715{
2716 register char *fmt;
2717 register int len, i;
2718 register enum rtx_code code = GET_CODE (x), orig_code = code;
2719 rtx temp;
2720 enum machine_mode mode = GET_MODE (x);
2721 enum machine_mode op0_mode = VOIDmode;
2722 rtx other_insn;
2723 rtx *cc_use;
2724 int n_restarts = 0;
2725
2726/* FAKE_EXTEND_SAFE_P (MODE, FROM) is 1 if (subreg:MODE FROM 0) is a safe
2727 replacement for (zero_extend:MODE FROM) or (sign_extend:MODE FROM).
2728 If it is 0, that cannot be done. We can now do this for any MEM
2729 because (SUBREG (MEM...)) is guaranteed to cause the MEM to be reloaded.
2730 If not for that, MEM's would very rarely be safe. */
2731
2732/* Reject MODEs bigger than a word, because we might not be able
2733 to reference a two-register group starting with an arbitrary register
2734 (and currently gen_lowpart might crash for a SUBREG). */
2735
2736#define FAKE_EXTEND_SAFE_P(MODE, FROM) \
2737 (GET_MODE_SIZE (MODE) <= UNITS_PER_WORD)
2738
2739/* Two expressions are equal if they are identical copies of a shared
2740 RTX or if they are both registers with the same register number
2741 and mode. */
2742
2743#define COMBINE_RTX_EQUAL_P(X,Y) \
2744 ((X) == (Y) \
2745 || (GET_CODE (X) == REG && GET_CODE (Y) == REG \
2746 && REGNO (X) == REGNO (Y) && GET_MODE (X) == GET_MODE (Y)))
2747
2748 if (! in_dest && COMBINE_RTX_EQUAL_P (x, from))
2749 {
2750 n_occurrences++;
2751 return (unique_copy && n_occurrences > 1 ? copy_rtx (to) : to);
2752 }
2753
2754 /* If X and FROM are the same register but different modes, they will
2755 not have been seen as equal above. However, flow.c will make a
2756 LOG_LINKS entry for that case. If we do nothing, we will try to
2757 rerecognize our original insn and, when it succeeds, we will
2758 delete the feeding insn, which is incorrect.
2759
2760 So force this insn not to match in this (rare) case. */
2761 if (! in_dest && code == REG && GET_CODE (from) == REG
2762 && REGNO (x) == REGNO (from))
2763 return gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
2764
2765 /* If this is an object, we are done unless it is a MEM or LO_SUM, both
2766 of which may contain things that can be combined. */
2767 if (code != MEM && code != LO_SUM && GET_RTX_CLASS (code) == 'o')
2768 return x;
2769
2770 /* It is possible to have a subexpression appear twice in the insn.
2771 Suppose that FROM is a register that appears within TO.
2772 Then, after that subexpression has been scanned once by `subst',
2773 the second time it is scanned, TO may be found. If we were
2774 to scan TO here, we would find FROM within it and create a
2775 self-referent rtl structure which is completely wrong. */
2776 if (COMBINE_RTX_EQUAL_P (x, to))
2777 return to;
2778
2779 len = GET_RTX_LENGTH (code);
2780 fmt = GET_RTX_FORMAT (code);
2781
2782 /* We don't need to process a SET_DEST that is a register, CC0, or PC, so
2783 set up to skip this common case. All other cases where we want to
2784 suppress replacing something inside a SET_SRC are handled via the
2785 IN_DEST operand. */
2786 if (code == SET
2787 && (GET_CODE (SET_DEST (x)) == REG
2788 || GET_CODE (SET_DEST (x)) == CC0
2789 || GET_CODE (SET_DEST (x)) == PC))
2790 fmt = "ie";
2791
2792 /* Get the mode of operand 0 in case X is now a SIGN_EXTEND of a constant. */
2793 if (fmt[0] == 'e')
2794 op0_mode = GET_MODE (XEXP (x, 0));
2795
2796 for (i = 0; i < len; i++)
2797 {
2798 if (fmt[i] == 'E')
2799 {
2800 register int j;
2801 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2802 {
2803 register rtx new;
2804 if (COMBINE_RTX_EQUAL_P (XVECEXP (x, i, j), from))
2805 {
2806 new = (unique_copy && n_occurrences ? copy_rtx (to) : to);
2807 n_occurrences++;
2808 }
2809 else
2810 {
2811 new = subst (XVECEXP (x, i, j), from, to, 0, unique_copy);
2812
2813 /* If this substitution failed, this whole thing fails. */
2814 if (GET_CODE (new) == CLOBBER && XEXP (new, 0) == const0_rtx)
2815 return new;
2816 }
2817
2818 SUBST (XVECEXP (x, i, j), new);
2819 }
2820 }
2821 else if (fmt[i] == 'e')
2822 {
2823 register rtx new;
2824
2825 if (COMBINE_RTX_EQUAL_P (XEXP (x, i), from))
2826 {
42301240
RK
2827 /* In general, don't install a subreg involving two modes not
2828 tieable. It can worsen register allocation, and can even
2829 make invalid reload insns, since the reg inside may need to
2830 be copied from in the outside mode, and that may be invalid
2831 if it is an fp reg copied in integer mode.
2832
2833 We allow two exceptions to this: It is valid if it is inside
2834 another SUBREG and the mode of that SUBREG and the mode of
2835 the inside of TO is tieable and it is valid if X is a SET
2836 that copies FROM to CC0. */
2837 if (GET_CODE (to) == SUBREG
2838 && ! MODES_TIEABLE_P (GET_MODE (to),
2839 GET_MODE (SUBREG_REG (to)))
2840 && ! (code == SUBREG
2841 && MODES_TIEABLE_P (mode, GET_MODE (SUBREG_REG (to))))
2842#ifdef HAVE_cc0
2843 && ! (code == SET && i == 1 && XEXP (x, 0) == cc0_rtx)
2844#endif
2845 )
2846 return gen_rtx (CLOBBER, VOIDmode, const0_rtx);
2847
230d793d
RS
2848 new = (unique_copy && n_occurrences ? copy_rtx (to) : to);
2849 n_occurrences++;
2850 }
2851 else
2852 /* If we are in a SET_DEST, suppress most cases unless we
2853 have gone inside a MEM, in which case we want to
2854 simplify the address. We assume here that things that
2855 are actually part of the destination have their inner
2856 parts in the first expression. This is true for SUBREG,
2857 STRICT_LOW_PART, and ZERO_EXTRACT, which are the only
2858 things aside from REG and MEM that should appear in a
2859 SET_DEST. */
2860 new = subst (XEXP (x, i), from, to,
2861 (((in_dest
2862 && (code == SUBREG || code == STRICT_LOW_PART
2863 || code == ZERO_EXTRACT))
2864 || code == SET)
2865 && i == 0), unique_copy);
2866
2867 /* If we found that we will have to reject this combination,
2868 indicate that by returning the CLOBBER ourselves, rather than
2869 an expression containing it. This will speed things up as
2870 well as prevent accidents where two CLOBBERs are considered
2871 to be equal, thus producing an incorrect simplification. */
2872
2873 if (GET_CODE (new) == CLOBBER && XEXP (new, 0) == const0_rtx)
2874 return new;
2875
2876 SUBST (XEXP (x, i), new);
2877 }
2878 }
2879
d0ab8cd3
RK
2880 /* We come back to here if we have replaced the expression with one of
2881 a different code and it is likely that further simplification will be
2882 possible. */
2883
2884 restart:
2885
eeb43d32
RK
2886 /* If we have restarted more than 4 times, we are probably looping, so
2887 give up. */
2888 if (++n_restarts > 4)
2889 return x;
2890
2891 /* If we are restarting at all, it means that we no longer know the
2892 original mode of operand 0 (since we have probably changed the
2893 form of X). */
2894
2895 if (n_restarts > 1)
2896 op0_mode = VOIDmode;
2897
d0ab8cd3
RK
2898 code = GET_CODE (x);
2899
230d793d
RS
2900 /* If this is a commutative operation, put a constant last and a complex
2901 expression first. We don't need to do this for comparisons here. */
2902 if (GET_RTX_CLASS (code) == 'c'
2903 && ((CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
2904 || (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == 'o'
2905 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o')
2906 || (GET_CODE (XEXP (x, 0)) == SUBREG
2907 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0)))) == 'o'
2908 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o')))
2909 {
2910 temp = XEXP (x, 0);
2911 SUBST (XEXP (x, 0), XEXP (x, 1));
2912 SUBST (XEXP (x, 1), temp);
2913 }
2914
22609cbf
RK
2915 /* If this is a PLUS, MINUS, or MULT, and the first operand is the
2916 sign extension of a PLUS with a constant, reverse the order of the sign
2917 extension and the addition. Note that this not the same as the original
2918 code, but overflow is undefined for signed values. Also note that the
2919 PLUS will have been partially moved "inside" the sign-extension, so that
2920 the first operand of X will really look like:
2921 (ashiftrt (plus (ashift A C4) C5) C4).
2922 We convert this to
2923 (plus (ashiftrt (ashift A C4) C2) C4)
2924 and replace the first operand of X with that expression. Later parts
2925 of this function may simplify the expression further.
2926
2927 For example, if we start with (mult (sign_extend (plus A C1)) C2),
2928 we swap the SIGN_EXTEND and PLUS. Later code will apply the
2929 distributive law to produce (plus (mult (sign_extend X) C1) C3).
2930
2931 We do this to simplify address expressions. */
2932
2933 if ((code == PLUS || code == MINUS || code == MULT)
2934 && GET_CODE (XEXP (x, 0)) == ASHIFTRT
2935 && GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS
2936 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ASHIFT
2937 && GET_CODE (XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1)) == CONST_INT
2938 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2939 && XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1) == XEXP (XEXP (x, 0), 1)
2940 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
2941 && (temp = simplify_binary_operation (ASHIFTRT, mode,
2942 XEXP (XEXP (XEXP (x, 0), 0), 1),
2943 XEXP (XEXP (x, 0), 1))) != 0)
2944 {
2945 rtx new
2946 = simplify_shift_const (NULL_RTX, ASHIFT, mode,
2947 XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 0),
2948 INTVAL (XEXP (XEXP (x, 0), 1)));
2949
2950 new = simplify_shift_const (NULL_RTX, ASHIFTRT, mode, new,
2951 INTVAL (XEXP (XEXP (x, 0), 1)));
2952
2953 SUBST (XEXP (x, 0), gen_binary (PLUS, mode, new, temp));
2954 }
2955
d0ab8cd3
RK
2956 /* If this is a simple operation applied to an IF_THEN_ELSE, try
2957 applying it to the arms of the IF_THEN_ELSE. This often simplifies
abe6e52f
RK
2958 things. Check for cases where both arms are testing the same
2959 condition.
2960
2961 Don't do anything if all operands are very simple. */
2962
2963 if (((GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c'
2964 || GET_RTX_CLASS (code) == '<')
2965 && ((GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) != 'o'
2966 && ! (GET_CODE (XEXP (x, 0)) == SUBREG
2967 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0))))
2968 == 'o')))
2969 || (GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o'
2970 && ! (GET_CODE (XEXP (x, 1)) == SUBREG
2971 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 1))))
2972 == 'o')))))
2973 || (GET_RTX_CLASS (code) == '1'
2974 && ((GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) != 'o'
2975 && ! (GET_CODE (XEXP (x, 0)) == SUBREG
2976 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0))))
2977 == 'o'))))))
d0ab8cd3 2978 {
abe6e52f
RK
2979 rtx cond, true, false;
2980
2981 cond = if_then_else_cond (x, &true, &false);
2982 if (cond != 0)
2983 {
2984 rtx cop1 = const0_rtx;
2985 enum rtx_code cond_code = simplify_comparison (NE, &cond, &cop1);
2986
2987 /* If the result values are STORE_FLAG_VALUE and zero, we can
2988 just make the comparison operation. */
2989 if (true == const_true_rtx && false == const0_rtx)
2990 x = gen_binary (cond_code, mode, cond, cop1);
2991 else if (true == const0_rtx && false == const_true_rtx)
2992 x = gen_binary (reverse_condition (cond_code), mode, cond, cop1);
2993
2994 /* Likewise, we can make the negate of a comparison operation
2995 if the result values are - STORE_FLAG_VALUE and zero. */
2996 else if (GET_CODE (true) == CONST_INT
2997 && INTVAL (true) == - STORE_FLAG_VALUE
2998 && false == const0_rtx)
2999 x = gen_unary (NEG, mode,
3000 gen_binary (cond_code, mode, cond, cop1));
3001 else if (GET_CODE (false) == CONST_INT
3002 && INTVAL (false) == - STORE_FLAG_VALUE
3003 && true == const0_rtx)
3004 x = gen_unary (NEG, mode,
3005 gen_binary (reverse_condition (cond_code),
3006 mode, cond, cop1));
3007 else
3008 x = gen_rtx (IF_THEN_ELSE, mode,
3009 gen_binary (cond_code, VOIDmode, cond, cop1),
3010 subst (true, pc_rtx, pc_rtx, 0, 0),
3011 subst (false, pc_rtx, pc_rtx, 0, 0));
5109d49f 3012
abe6e52f
RK
3013 goto restart;
3014 }
d0ab8cd3
RK
3015 }
3016
230d793d
RS
3017 /* Try to fold this expression in case we have constants that weren't
3018 present before. */
3019 temp = 0;
3020 switch (GET_RTX_CLASS (code))
3021 {
3022 case '1':
3023 temp = simplify_unary_operation (code, mode, XEXP (x, 0), op0_mode);
3024 break;
3025 case '<':
3026 temp = simplify_relational_operation (code, op0_mode,
3027 XEXP (x, 0), XEXP (x, 1));
77fa0940
RK
3028#ifdef FLOAT_STORE_FLAG_VALUE
3029 if (temp != 0 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
3030 temp = ((temp == const0_rtx) ? CONST0_RTX (GET_MODE (x))
3031 : immed_real_const_1 (FLOAT_STORE_FLAG_VALUE, GET_MODE (x)));
3032#endif
230d793d
RS
3033 break;
3034 case 'c':
3035 case '2':
3036 temp = simplify_binary_operation (code, mode, XEXP (x, 0), XEXP (x, 1));
3037 break;
3038 case 'b':
3039 case '3':
3040 temp = simplify_ternary_operation (code, mode, op0_mode, XEXP (x, 0),
3041 XEXP (x, 1), XEXP (x, 2));
3042 break;
3043 }
3044
3045 if (temp)
d0ab8cd3 3046 x = temp, code = GET_CODE (temp);
230d793d 3047
230d793d 3048 /* First see if we can apply the inverse distributive law. */
224eeff2
RK
3049 if (code == PLUS || code == MINUS
3050 || code == AND || code == IOR || code == XOR)
230d793d
RS
3051 {
3052 x = apply_distributive_law (x);
3053 code = GET_CODE (x);
3054 }
3055
3056 /* If CODE is an associative operation not otherwise handled, see if we
3057 can associate some operands. This can win if they are constants or
3058 if they are logically related (i.e. (a & b) & a. */
3059 if ((code == PLUS || code == MINUS
3060 || code == MULT || code == AND || code == IOR || code == XOR
3061 || code == DIV || code == UDIV
3062 || code == SMAX || code == SMIN || code == UMAX || code == UMIN)
3ad2180a 3063 && INTEGRAL_MODE_P (mode))
230d793d
RS
3064 {
3065 if (GET_CODE (XEXP (x, 0)) == code)
3066 {
3067 rtx other = XEXP (XEXP (x, 0), 0);
3068 rtx inner_op0 = XEXP (XEXP (x, 0), 1);
3069 rtx inner_op1 = XEXP (x, 1);
3070 rtx inner;
3071
3072 /* Make sure we pass the constant operand if any as the second
3073 one if this is a commutative operation. */
3074 if (CONSTANT_P (inner_op0) && GET_RTX_CLASS (code) == 'c')
3075 {
3076 rtx tem = inner_op0;
3077 inner_op0 = inner_op1;
3078 inner_op1 = tem;
3079 }
3080 inner = simplify_binary_operation (code == MINUS ? PLUS
3081 : code == DIV ? MULT
3082 : code == UDIV ? MULT
3083 : code,
3084 mode, inner_op0, inner_op1);
3085
3086 /* For commutative operations, try the other pair if that one
3087 didn't simplify. */
3088 if (inner == 0 && GET_RTX_CLASS (code) == 'c')
3089 {
3090 other = XEXP (XEXP (x, 0), 1);
3091 inner = simplify_binary_operation (code, mode,
3092 XEXP (XEXP (x, 0), 0),
3093 XEXP (x, 1));
3094 }
3095
3096 if (inner)
3097 {
3098 x = gen_binary (code, mode, other, inner);
3099 goto restart;
3100
3101 }
3102 }
3103 }
3104
3105 /* A little bit of algebraic simplification here. */
3106 switch (code)
3107 {
3108 case MEM:
3109 /* Ensure that our address has any ASHIFTs converted to MULT in case
3110 address-recognizing predicates are called later. */
3111 temp = make_compound_operation (XEXP (x, 0), MEM);
3112 SUBST (XEXP (x, 0), temp);
3113 break;
3114
3115 case SUBREG:
3116 /* (subreg:A (mem:B X) N) becomes a modified MEM unless the SUBREG
3117 is paradoxical. If we can't do that safely, then it becomes
3118 something nonsensical so that this combination won't take place. */
3119
3120 if (GET_CODE (SUBREG_REG (x)) == MEM
3121 && (GET_MODE_SIZE (mode)
3122 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
3123 {
3124 rtx inner = SUBREG_REG (x);
3125 int endian_offset = 0;
3126 /* Don't change the mode of the MEM
3127 if that would change the meaning of the address. */
3128 if (MEM_VOLATILE_P (SUBREG_REG (x))
3129 || mode_dependent_address_p (XEXP (inner, 0)))
3130 return gen_rtx (CLOBBER, mode, const0_rtx);
3131
3132#if BYTES_BIG_ENDIAN
3133 if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
3134 endian_offset += UNITS_PER_WORD - GET_MODE_SIZE (mode);
3135 if (GET_MODE_SIZE (GET_MODE (inner)) < UNITS_PER_WORD)
3136 endian_offset -= UNITS_PER_WORD - GET_MODE_SIZE (GET_MODE (inner));
3137#endif
3138 /* Note if the plus_constant doesn't make a valid address
3139 then this combination won't be accepted. */
3140 x = gen_rtx (MEM, mode,
3141 plus_constant (XEXP (inner, 0),
3142 (SUBREG_WORD (x) * UNITS_PER_WORD
3143 + endian_offset)));
3144 MEM_VOLATILE_P (x) = MEM_VOLATILE_P (inner);
3145 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (inner);
3146 MEM_IN_STRUCT_P (x) = MEM_IN_STRUCT_P (inner);
3147 return x;
3148 }
3149
3150 /* If we are in a SET_DEST, these other cases can't apply. */
3151 if (in_dest)
3152 return x;
3153
3154 /* Changing mode twice with SUBREG => just change it once,
3155 or not at all if changing back to starting mode. */
3156 if (GET_CODE (SUBREG_REG (x)) == SUBREG)
3157 {
3158 if (mode == GET_MODE (SUBREG_REG (SUBREG_REG (x)))
3159 && SUBREG_WORD (x) == 0 && SUBREG_WORD (SUBREG_REG (x)) == 0)
3160 return SUBREG_REG (SUBREG_REG (x));
3161
3162 SUBST_INT (SUBREG_WORD (x),
3163 SUBREG_WORD (x) + SUBREG_WORD (SUBREG_REG (x)));
3164 SUBST (SUBREG_REG (x), SUBREG_REG (SUBREG_REG (x)));
3165 }
3166
3167 /* SUBREG of a hard register => just change the register number
3168 and/or mode. If the hard register is not valid in that mode,
26ecfc76
RK
3169 suppress this combination. If the hard register is the stack,
3170 frame, or argument pointer, leave this as a SUBREG. */
230d793d
RS
3171
3172 if (GET_CODE (SUBREG_REG (x)) == REG
26ecfc76
RK
3173 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
3174 && REGNO (SUBREG_REG (x)) != FRAME_POINTER_REGNUM
6d7096b0
DE
3175#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3176 && REGNO (SUBREG_REG (x)) != HARD_FRAME_POINTER_REGNUM
3177#endif
26ecfc76
RK
3178#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3179 && REGNO (SUBREG_REG (x)) != ARG_POINTER_REGNUM
3180#endif
3181 && REGNO (SUBREG_REG (x)) != STACK_POINTER_REGNUM)
230d793d
RS
3182 {
3183 if (HARD_REGNO_MODE_OK (REGNO (SUBREG_REG (x)) + SUBREG_WORD (x),
3184 mode))
3185 return gen_rtx (REG, mode,
3186 REGNO (SUBREG_REG (x)) + SUBREG_WORD (x));
3187 else
3188 return gen_rtx (CLOBBER, mode, const0_rtx);
3189 }
3190
3191 /* For a constant, try to pick up the part we want. Handle a full
a4bde0b1
RK
3192 word and low-order part. Only do this if we are narrowing
3193 the constant; if it is being widened, we have no idea what
3194 the extra bits will have been set to. */
230d793d
RS
3195
3196 if (CONSTANT_P (SUBREG_REG (x)) && op0_mode != VOIDmode
3197 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
a4bde0b1 3198 && GET_MODE_SIZE (op0_mode) < UNITS_PER_WORD
230d793d
RS
3199 && GET_MODE_CLASS (mode) == MODE_INT)
3200 {
3201 temp = operand_subword (SUBREG_REG (x), SUBREG_WORD (x),
5f4f0e22 3202 0, op0_mode);
230d793d
RS
3203 if (temp)
3204 return temp;
3205 }
3206
19808e22
RS
3207 /* If we want a subreg of a constant, at offset 0,
3208 take the low bits. On a little-endian machine, that's
3209 always valid. On a big-endian machine, it's valid
3210 only if the constant's mode fits in one word. */
a4bde0b1 3211 if (CONSTANT_P (SUBREG_REG (x)) && subreg_lowpart_p (x)
19808e22
RS
3212 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (op0_mode)
3213#if WORDS_BIG_ENDIAN
097e45d1 3214 && GET_MODE_BITSIZE (op0_mode) <= BITS_PER_WORD
19808e22
RS
3215#endif
3216 )
230d793d
RS
3217 return gen_lowpart_for_combine (mode, SUBREG_REG (x));
3218
ff12fdac
RK
3219 /* If we are narrowing an integral object, we need to see if we can
3220 simplify the expression for the object knowing that we only need the
d0ab8cd3
RK
3221 low-order bits. */
3222
ff12fdac
RK
3223 if (GET_MODE_CLASS (mode) == MODE_INT
3224 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
3225 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
d0ab8cd3 3226 && subreg_lowpart_p (x))
6139ff20 3227 return force_to_mode (SUBREG_REG (x), mode, GET_MODE_MASK (mode),
e3d616e3 3228 NULL_RTX, 0);
230d793d
RS
3229 break;
3230
3231 case NOT:
3232 /* (not (plus X -1)) can become (neg X). */
3233 if (GET_CODE (XEXP (x, 0)) == PLUS
3234 && XEXP (XEXP (x, 0), 1) == constm1_rtx)
3235 {
3236 x = gen_rtx_combine (NEG, mode, XEXP (XEXP (x, 0), 0));
3237 goto restart;
3238 }
3239
3240 /* Similarly, (not (neg X)) is (plus X -1). */
3241 if (GET_CODE (XEXP (x, 0)) == NEG)
3242 {
3243 x = gen_rtx_combine (PLUS, mode, XEXP (XEXP (x, 0), 0), constm1_rtx);
3244 goto restart;
3245 }
3246
d0ab8cd3
RK
3247 /* (not (xor X C)) for C constant is (xor X D) with D = ~ C. */
3248 if (GET_CODE (XEXP (x, 0)) == XOR
3249 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3250 && (temp = simplify_unary_operation (NOT, mode,
3251 XEXP (XEXP (x, 0), 1),
3252 mode)) != 0)
3253 {
3254 SUBST (XEXP (XEXP (x, 0), 1), temp);
3255 return XEXP (x, 0);
3256 }
3257
230d793d
RS
3258 /* (not (ashift 1 X)) is (rotate ~1 X). We used to do this for operands
3259 other than 1, but that is not valid. We could do a similar
3260 simplification for (not (lshiftrt C X)) where C is just the sign bit,
3261 but this doesn't seem common enough to bother with. */
3262 if (GET_CODE (XEXP (x, 0)) == ASHIFT
3263 && XEXP (XEXP (x, 0), 0) == const1_rtx)
3264 {
3265 x = gen_rtx (ROTATE, mode, gen_unary (NOT, mode, const1_rtx),
3266 XEXP (XEXP (x, 0), 1));
3267 goto restart;
3268 }
3269
3270 if (GET_CODE (XEXP (x, 0)) == SUBREG
3271 && subreg_lowpart_p (XEXP (x, 0))
3272 && (GET_MODE_SIZE (GET_MODE (XEXP (x, 0)))
3273 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (x, 0)))))
3274 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == ASHIFT
3275 && XEXP (SUBREG_REG (XEXP (x, 0)), 0) == const1_rtx)
3276 {
3277 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (XEXP (x, 0)));
3278
3279 x = gen_rtx (ROTATE, inner_mode,
3280 gen_unary (NOT, inner_mode, const1_rtx),
3281 XEXP (SUBREG_REG (XEXP (x, 0)), 1));
3282 x = gen_lowpart_for_combine (mode, x);
3283 goto restart;
3284 }
3285
3286#if STORE_FLAG_VALUE == -1
3287 /* (not (comparison foo bar)) can be done by reversing the comparison
3288 code if valid. */
3289 if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3290 && reversible_comparison_p (XEXP (x, 0)))
3291 return gen_rtx_combine (reverse_condition (GET_CODE (XEXP (x, 0))),
3292 mode, XEXP (XEXP (x, 0), 0),
3293 XEXP (XEXP (x, 0), 1));
500c518b
RK
3294
3295 /* (ashiftrt foo C) where C is the number of bits in FOO minus 1
3296 is (lt foo (const_int 0)), so we can perform the above
3297 simplification. */
3298
3299 if (XEXP (x, 1) == const1_rtx
3300 && GET_CODE (XEXP (x, 0)) == ASHIFTRT
3301 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3302 && INTVAL (XEXP (XEXP (x, 0), 1)) == GET_MODE_BITSIZE (mode) - 1)
3303 return gen_rtx_combine (GE, mode, XEXP (XEXP (x, 0), 0), const0_rtx);
230d793d
RS
3304#endif
3305
3306 /* Apply De Morgan's laws to reduce number of patterns for machines
3307 with negating logical insns (and-not, nand, etc.). If result has
3308 only one NOT, put it first, since that is how the patterns are
3309 coded. */
3310
3311 if (GET_CODE (XEXP (x, 0)) == IOR || GET_CODE (XEXP (x, 0)) == AND)
3312 {
3313 rtx in1 = XEXP (XEXP (x, 0), 0), in2 = XEXP (XEXP (x, 0), 1);
3314
3315 if (GET_CODE (in1) == NOT)
3316 in1 = XEXP (in1, 0);
3317 else
3318 in1 = gen_rtx_combine (NOT, GET_MODE (in1), in1);
3319
3320 if (GET_CODE (in2) == NOT)
3321 in2 = XEXP (in2, 0);
3322 else if (GET_CODE (in2) == CONST_INT
5f4f0e22
CH
3323 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3324 in2 = GEN_INT (GET_MODE_MASK (mode) & ~ INTVAL (in2));
230d793d
RS
3325 else
3326 in2 = gen_rtx_combine (NOT, GET_MODE (in2), in2);
3327
3328 if (GET_CODE (in2) == NOT)
3329 {
3330 rtx tem = in2;
3331 in2 = in1; in1 = tem;
3332 }
3333
3334 x = gen_rtx_combine (GET_CODE (XEXP (x, 0)) == IOR ? AND : IOR,
3335 mode, in1, in2);
3336 goto restart;
3337 }
3338 break;
3339
3340 case NEG:
3341 /* (neg (plus X 1)) can become (not X). */
3342 if (GET_CODE (XEXP (x, 0)) == PLUS
3343 && XEXP (XEXP (x, 0), 1) == const1_rtx)
3344 {
3345 x = gen_rtx_combine (NOT, mode, XEXP (XEXP (x, 0), 0));
3346 goto restart;
3347 }
3348
3349 /* Similarly, (neg (not X)) is (plus X 1). */
3350 if (GET_CODE (XEXP (x, 0)) == NOT)
3351 {
5109d49f 3352 x = plus_constant (XEXP (XEXP (x, 0), 0), 1);
230d793d
RS
3353 goto restart;
3354 }
3355
230d793d
RS
3356 /* (neg (minus X Y)) can become (minus Y X). */
3357 if (GET_CODE (XEXP (x, 0)) == MINUS
3ad2180a 3358 && (! FLOAT_MODE_P (mode)
230d793d 3359 /* x-y != -(y-x) with IEEE floating point. */
7e2a0d8e
RK
3360 || TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3361 || flag_fast_math))
230d793d
RS
3362 {
3363 x = gen_binary (MINUS, mode, XEXP (XEXP (x, 0), 1),
3364 XEXP (XEXP (x, 0), 0));
3365 goto restart;
3366 }
3367
d0ab8cd3
RK
3368 /* (neg (xor A 1)) is (plus A -1) if A is known to be either 0 or 1. */
3369 if (GET_CODE (XEXP (x, 0)) == XOR && XEXP (XEXP (x, 0), 1) == const1_rtx
951553af 3370 && nonzero_bits (XEXP (XEXP (x, 0), 0), mode) == 1)
d0ab8cd3
RK
3371 {
3372 x = gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0), constm1_rtx);
3373 goto restart;
3374 }
3375
230d793d
RS
3376 /* NEG commutes with ASHIFT since it is multiplication. Only do this
3377 if we can then eliminate the NEG (e.g.,
3378 if the operand is a constant). */
3379
3380 if (GET_CODE (XEXP (x, 0)) == ASHIFT)
3381 {
3382 temp = simplify_unary_operation (NEG, mode,
3383 XEXP (XEXP (x, 0), 0), mode);
3384 if (temp)
3385 {
3386 SUBST (XEXP (XEXP (x, 0), 0), temp);
3387 return XEXP (x, 0);
3388 }
3389 }
3390
3391 temp = expand_compound_operation (XEXP (x, 0));
3392
3393 /* For C equal to the width of MODE minus 1, (neg (ashiftrt X C)) can be
3394 replaced by (lshiftrt X C). This will convert
3395 (neg (sign_extract X 1 Y)) to (zero_extract X 1 Y). */
3396
3397 if (GET_CODE (temp) == ASHIFTRT
3398 && GET_CODE (XEXP (temp, 1)) == CONST_INT
3399 && INTVAL (XEXP (temp, 1)) == GET_MODE_BITSIZE (mode) - 1)
3400 {
3401 x = simplify_shift_const (temp, LSHIFTRT, mode, XEXP (temp, 0),
3402 INTVAL (XEXP (temp, 1)));
3403 goto restart;
3404 }
3405
951553af 3406 /* If X has only a single bit that might be nonzero, say, bit I, convert
230d793d
RS
3407 (neg X) to (ashiftrt (ashift X C-I) C-I) where C is the bitsize of
3408 MODE minus 1. This will convert (neg (zero_extract X 1 Y)) to
3409 (sign_extract X 1 Y). But only do this if TEMP isn't a register
3410 or a SUBREG of one since we'd be making the expression more
3411 complex if it was just a register. */
3412
3413 if (GET_CODE (temp) != REG
3414 && ! (GET_CODE (temp) == SUBREG
3415 && GET_CODE (SUBREG_REG (temp)) == REG)
951553af 3416 && (i = exact_log2 (nonzero_bits (temp, mode))) >= 0)
230d793d
RS
3417 {
3418 rtx temp1 = simplify_shift_const
5f4f0e22
CH
3419 (NULL_RTX, ASHIFTRT, mode,
3420 simplify_shift_const (NULL_RTX, ASHIFT, mode, temp,
230d793d
RS
3421 GET_MODE_BITSIZE (mode) - 1 - i),
3422 GET_MODE_BITSIZE (mode) - 1 - i);
3423
3424 /* If all we did was surround TEMP with the two shifts, we
3425 haven't improved anything, so don't use it. Otherwise,
3426 we are better off with TEMP1. */
3427 if (GET_CODE (temp1) != ASHIFTRT
3428 || GET_CODE (XEXP (temp1, 0)) != ASHIFT
3429 || XEXP (XEXP (temp1, 0), 0) != temp)
3430 {
3431 x = temp1;
3432 goto restart;
3433 }
3434 }
3435 break;
3436
3437 case FLOAT_TRUNCATE:
3438 /* (float_truncate:SF (float_extend:DF foo:SF)) = foo:SF. */
3439 if (GET_CODE (XEXP (x, 0)) == FLOAT_EXTEND
3440 && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode)
3441 return XEXP (XEXP (x, 0), 0);
3442 break;
3443
3444#ifdef HAVE_cc0
3445 case COMPARE:
3446 /* Convert (compare FOO (const_int 0)) to FOO unless we aren't
3447 using cc0, in which case we want to leave it as a COMPARE
3448 so we can distinguish it from a register-register-copy. */
3449 if (XEXP (x, 1) == const0_rtx)
3450 return XEXP (x, 0);
3451
3452 /* In IEEE floating point, x-0 is not the same as x. */
3453 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
7e2a0d8e
RK
3454 || ! FLOAT_MODE_P (GET_MODE (XEXP (x, 0)))
3455 || flag_fast_math)
230d793d
RS
3456 && XEXP (x, 1) == CONST0_RTX (GET_MODE (XEXP (x, 0))))
3457 return XEXP (x, 0);
3458 break;
3459#endif
3460
3461 case CONST:
3462 /* (const (const X)) can become (const X). Do it this way rather than
3463 returning the inner CONST since CONST can be shared with a
3464 REG_EQUAL note. */
3465 if (GET_CODE (XEXP (x, 0)) == CONST)
3466 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
3467 break;
3468
3469#ifdef HAVE_lo_sum
3470 case LO_SUM:
3471 /* Convert (lo_sum (high FOO) FOO) to FOO. This is necessary so we
3472 can add in an offset. find_split_point will split this address up
3473 again if it doesn't match. */
3474 if (GET_CODE (XEXP (x, 0)) == HIGH
3475 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1)))
3476 return XEXP (x, 1);
3477 break;
3478#endif
3479
3480 case PLUS:
3481 /* If we have (plus (plus (A const) B)), associate it so that CONST is
3482 outermost. That's because that's the way indexed addresses are
3483 supposed to appear. This code used to check many more cases, but
3484 they are now checked elsewhere. */
3485 if (GET_CODE (XEXP (x, 0)) == PLUS
3486 && CONSTANT_ADDRESS_P (XEXP (XEXP (x, 0), 1)))
3487 return gen_binary (PLUS, mode,
3488 gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0),
3489 XEXP (x, 1)),
3490 XEXP (XEXP (x, 0), 1));
3491
3492 /* (plus (xor (and <foo> (const_int pow2 - 1)) <c>) <-c>)
3493 when c is (const_int (pow2 + 1) / 2) is a sign extension of a
3494 bit-field and can be replaced by either a sign_extend or a
3495 sign_extract. The `and' may be a zero_extend. */
3496 if (GET_CODE (XEXP (x, 0)) == XOR
3497 && GET_CODE (XEXP (x, 1)) == CONST_INT
3498 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3499 && INTVAL (XEXP (x, 1)) == - INTVAL (XEXP (XEXP (x, 0), 1))
3500 && (i = exact_log2 (INTVAL (XEXP (XEXP (x, 0), 1)))) >= 0
5f4f0e22 3501 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
230d793d
RS
3502 && ((GET_CODE (XEXP (XEXP (x, 0), 0)) == AND
3503 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
3504 && (INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))
5f4f0e22 3505 == ((HOST_WIDE_INT) 1 << (i + 1)) - 1))
230d793d
RS
3506 || (GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND
3507 && (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)))
3508 == i + 1))))
3509 {
3510 x = simplify_shift_const
5f4f0e22
CH
3511 (NULL_RTX, ASHIFTRT, mode,
3512 simplify_shift_const (NULL_RTX, ASHIFT, mode,
230d793d
RS
3513 XEXP (XEXP (XEXP (x, 0), 0), 0),
3514 GET_MODE_BITSIZE (mode) - (i + 1)),
3515 GET_MODE_BITSIZE (mode) - (i + 1));
3516 goto restart;
3517 }
3518
bc0776c6
RK
3519 /* (plus (comparison A B) C) can become (neg (rev-comp A B)) if
3520 C is 1 and STORE_FLAG_VALUE is -1 or if C is -1 and STORE_FLAG_VALUE
3521 is 1. This produces better code than the alternative immediately
3522 below. */
3523 if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3524 && reversible_comparison_p (XEXP (x, 0))
3525 && ((STORE_FLAG_VALUE == -1 && XEXP (x, 1) == const1_rtx)
3526 || (STORE_FLAG_VALUE == 1 && XEXP (x, 1) == constm1_rtx)))
3527 {
3528 x = gen_binary (reverse_condition (GET_CODE (XEXP (x, 0))),
3529 mode, XEXP (XEXP (x, 0), 0), XEXP (XEXP (x, 0), 1));
3530 x = gen_unary (NEG, mode, x);
3531 goto restart;
3532 }
3533
3534 /* If only the low-order bit of X is possibly nonzero, (plus x -1)
230d793d
RS
3535 can become (ashiftrt (ashift (xor x 1) C) C) where C is
3536 the bitsize of the mode - 1. This allows simplification of
3537 "a = (b & 8) == 0;" */
3538 if (XEXP (x, 1) == constm1_rtx
3539 && GET_CODE (XEXP (x, 0)) != REG
3540 && ! (GET_CODE (XEXP (x,0)) == SUBREG
3541 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG)
951553af 3542 && nonzero_bits (XEXP (x, 0), mode) == 1)
230d793d
RS
3543 {
3544 x = simplify_shift_const
5f4f0e22
CH
3545 (NULL_RTX, ASHIFTRT, mode,
3546 simplify_shift_const (NULL_RTX, ASHIFT, mode,
230d793d
RS
3547 gen_rtx_combine (XOR, mode,
3548 XEXP (x, 0), const1_rtx),
3549 GET_MODE_BITSIZE (mode) - 1),
3550 GET_MODE_BITSIZE (mode) - 1);
3551 goto restart;
3552 }
02f4ada4
RK
3553
3554 /* If we are adding two things that have no bits in common, convert
3555 the addition into an IOR. This will often be further simplified,
3556 for example in cases like ((a & 1) + (a & 2)), which can
3557 become a & 3. */
3558
ac49a949 3559 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
951553af
RK
3560 && (nonzero_bits (XEXP (x, 0), mode)
3561 & nonzero_bits (XEXP (x, 1), mode)) == 0)
02f4ada4
RK
3562 {
3563 x = gen_binary (IOR, mode, XEXP (x, 0), XEXP (x, 1));
3564 goto restart;
3565 }
230d793d
RS
3566 break;
3567
3568 case MINUS:
5109d49f
RK
3569#if STORE_FLAG_VALUE == 1
3570 /* (minus 1 (comparison foo bar)) can be done by reversing the comparison
3571 code if valid. */
3572 if (XEXP (x, 0) == const1_rtx
3573 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<'
3574 && reversible_comparison_p (XEXP (x, 1)))
3575 return gen_binary (reverse_condition (GET_CODE (XEXP (x, 1))),
3576 mode, XEXP (XEXP (x, 1), 0),
3577 XEXP (XEXP (x, 1), 1));
3578#endif
3579
230d793d
RS
3580 /* (minus <foo> (and <foo> (const_int -pow2))) becomes
3581 (and <foo> (const_int pow2-1)) */
3582 if (GET_CODE (XEXP (x, 1)) == AND
3583 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
3584 && exact_log2 (- INTVAL (XEXP (XEXP (x, 1), 1))) >= 0
3585 && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0)))
3586 {
5f4f0e22 3587 x = simplify_and_const_int (NULL_RTX, mode, XEXP (x, 0),
230d793d
RS
3588 - INTVAL (XEXP (XEXP (x, 1), 1)) - 1);
3589 goto restart;
3590 }
7bef8680
RK
3591
3592 /* Canonicalize (minus A (plus B C)) to (minus (minus A B) C) for
3593 integers. */
3594 if (GET_CODE (XEXP (x, 1)) == PLUS && INTEGRAL_MODE_P (mode))
3595 {
3596 x = gen_binary (MINUS, mode,
3597 gen_binary (MINUS, mode, XEXP (x, 0),
3598 XEXP (XEXP (x, 1), 0)),
3599 XEXP (XEXP (x, 1), 1));
3600 goto restart;
3601 }
230d793d
RS
3602 break;
3603
3604 case MULT:
3605 /* If we have (mult (plus A B) C), apply the distributive law and then
3606 the inverse distributive law to see if things simplify. This
3607 occurs mostly in addresses, often when unrolling loops. */
3608
3609 if (GET_CODE (XEXP (x, 0)) == PLUS)
3610 {
3611 x = apply_distributive_law
3612 (gen_binary (PLUS, mode,
3613 gen_binary (MULT, mode,
3614 XEXP (XEXP (x, 0), 0), XEXP (x, 1)),
3615 gen_binary (MULT, mode,
3616 XEXP (XEXP (x, 0), 1), XEXP (x, 1))));
3617
3618 if (GET_CODE (x) != MULT)
3619 goto restart;
3620 }
3621
3622 /* If this is multiplication by a power of two and its first operand is
3623 a shift, treat the multiply as a shift to allow the shifts to
3624 possibly combine. */
3625 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3626 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0
3627 && (GET_CODE (XEXP (x, 0)) == ASHIFT
3628 || GET_CODE (XEXP (x, 0)) == LSHIFTRT
3629 || GET_CODE (XEXP (x, 0)) == ASHIFTRT
3630 || GET_CODE (XEXP (x, 0)) == ROTATE
3631 || GET_CODE (XEXP (x, 0)) == ROTATERT))
3632 {
5f4f0e22 3633 x = simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (x, 0), i);
230d793d
RS
3634 goto restart;
3635 }
3636
3637 /* Convert (mult (ashift (const_int 1) A) B) to (ashift B A). */
3638 if (GET_CODE (XEXP (x, 0)) == ASHIFT
3639 && XEXP (XEXP (x, 0), 0) == const1_rtx)
3640 return gen_rtx_combine (ASHIFT, mode, XEXP (x, 1),
3641 XEXP (XEXP (x, 0), 1));
3642 break;
3643
3644 case UDIV:
3645 /* If this is a divide by a power of two, treat it as a shift if
3646 its first operand is a shift. */
3647 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3648 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0
3649 && (GET_CODE (XEXP (x, 0)) == ASHIFT
3650 || GET_CODE (XEXP (x, 0)) == LSHIFTRT
3651 || GET_CODE (XEXP (x, 0)) == ASHIFTRT
3652 || GET_CODE (XEXP (x, 0)) == ROTATE
3653 || GET_CODE (XEXP (x, 0)) == ROTATERT))
3654 {
5f4f0e22 3655 x = simplify_shift_const (NULL_RTX, LSHIFTRT, mode, XEXP (x, 0), i);
230d793d
RS
3656 goto restart;
3657 }
3658 break;
3659
3660 case EQ: case NE:
3661 case GT: case GTU: case GE: case GEU:
3662 case LT: case LTU: case LE: case LEU:
3663 /* If the first operand is a condition code, we can't do anything
3664 with it. */
3665 if (GET_CODE (XEXP (x, 0)) == COMPARE
3666 || (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) != MODE_CC
3667#ifdef HAVE_cc0
3668 && XEXP (x, 0) != cc0_rtx
3669#endif
3670 ))
3671 {
3672 rtx op0 = XEXP (x, 0);
3673 rtx op1 = XEXP (x, 1);
3674 enum rtx_code new_code;
3675
3676 if (GET_CODE (op0) == COMPARE)
3677 op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
3678
3679 /* Simplify our comparison, if possible. */
3680 new_code = simplify_comparison (code, &op0, &op1);
3681
3682#if STORE_FLAG_VALUE == 1
3683 /* If STORE_FLAG_VALUE is 1, we can convert (ne x 0) to simply X
951553af 3684 if only the low-order bit is possibly nonzero in X (such as when
5109d49f
RK
3685 X is a ZERO_EXTRACT of one bit). Similarly, we can convert EQ to
3686 (xor X 1) or (minus 1 X); we use the former. Finally, if X is
3687 known to be either 0 or -1, NE becomes a NEG and EQ becomes
3688 (plus X 1).
3689
3690 Remove any ZERO_EXTRACT we made when thinking this was a
3691 comparison. It may now be simpler to use, e.g., an AND. If a
3692 ZERO_EXTRACT is indeed appropriate, it will be placed back by
3693 the call to make_compound_operation in the SET case. */
3694
3f508eca 3695 if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
230d793d 3696 && op1 == const0_rtx
5109d49f 3697 && nonzero_bits (op0, mode) == 1)
818b11b9
RK
3698 return gen_lowpart_for_combine (mode,
3699 expand_compound_operation (op0));
5109d49f
RK
3700
3701 else if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
3702 && op1 == const0_rtx
3703 && (num_sign_bit_copies (op0, mode)
3704 == GET_MODE_BITSIZE (mode)))
3705 {
3706 op0 = expand_compound_operation (op0);
3707 x = gen_unary (NEG, mode, gen_lowpart_for_combine (mode, op0));
3708 goto restart;
3709 }
3710
3f508eca 3711 else if (new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
230d793d 3712 && op1 == const0_rtx
5109d49f 3713 && nonzero_bits (op0, mode) == 1)
818b11b9
RK
3714 {
3715 op0 = expand_compound_operation (op0);
5109d49f
RK
3716 x = gen_binary (XOR, mode,
3717 gen_lowpart_for_combine (mode, op0),
3718 const1_rtx);
3719 goto restart;
3720 }
818b11b9 3721
5109d49f
RK
3722 else if (new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
3723 && op1 == const0_rtx
3724 && (num_sign_bit_copies (op0, mode)
3725 == GET_MODE_BITSIZE (mode)))
3726 {
3727 op0 = expand_compound_operation (op0);
3728 x = plus_constant (gen_lowpart_for_combine (mode, op0), 1);
818b11b9
RK
3729 goto restart;
3730 }
230d793d
RS
3731#endif
3732
3733#if STORE_FLAG_VALUE == -1
5109d49f
RK
3734 /* If STORE_FLAG_VALUE is -1, we have cases similar to
3735 those above. */
3f508eca 3736 if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
230d793d 3737 && op1 == const0_rtx
5109d49f
RK
3738 && (num_sign_bit_copies (op0, mode)
3739 == GET_MODE_BITSIZE (mode)))
3740 return gen_lowpart_for_combine (mode,
3741 expand_compound_operation (op0));
3742
3743 else if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
3744 && op1 == const0_rtx
3745 && nonzero_bits (op0, mode) == 1)
3746 {
3747 op0 = expand_compound_operation (op0);
3748 x = gen_unary (NEG, mode, gen_lowpart_for_combine (mode, op0));
3749 goto restart;
3750 }
3751
3752 else if (new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
3753 && op1 == const0_rtx
3754 && (num_sign_bit_copies (op0, mode)
3755 == GET_MODE_BITSIZE (mode)))
230d793d 3756 {
818b11b9 3757 op0 = expand_compound_operation (op0);
5109d49f
RK
3758 x = gen_unary (NOT, mode, gen_lowpart_for_combine (mode, op0));
3759 goto restart;
3760 }
3761
3762 /* If X is 0/1, (eq X 0) is X-1. */
3763 else if (new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
3764 && op1 == const0_rtx
3765 && nonzero_bits (op0, mode) == 1)
3766 {
3767 op0 = expand_compound_operation (op0);
3768 x = plus_constant (gen_lowpart_for_combine (mode, op0), -1);
230d793d
RS
3769 goto restart;
3770 }
3771#endif
3772
3773 /* If STORE_FLAG_VALUE says to just test the sign bit and X has just
951553af
RK
3774 one bit that might be nonzero, we can convert (ne x 0) to
3775 (ashift x c) where C puts the bit in the sign bit. Remove any
3776 AND with STORE_FLAG_VALUE when we are done, since we are only
3777 going to test the sign bit. */
3f508eca 3778 if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
5f4f0e22
CH
3779 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3780 && (STORE_FLAG_VALUE
3781 == (HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
230d793d
RS
3782 && op1 == const0_rtx
3783 && mode == GET_MODE (op0)
5109d49f 3784 && (i = exact_log2 (nonzero_bits (op0, mode))) >= 0)
230d793d 3785 {
818b11b9
RK
3786 x = simplify_shift_const (NULL_RTX, ASHIFT, mode,
3787 expand_compound_operation (op0),
230d793d
RS
3788 GET_MODE_BITSIZE (mode) - 1 - i);
3789 if (GET_CODE (x) == AND && XEXP (x, 1) == const_true_rtx)
3790 return XEXP (x, 0);
3791 else
3792 return x;
3793 }
3794
3795 /* If the code changed, return a whole new comparison. */
3796 if (new_code != code)
3797 return gen_rtx_combine (new_code, mode, op0, op1);
3798
3799 /* Otherwise, keep this operation, but maybe change its operands.
3800 This also converts (ne (compare FOO BAR) 0) to (ne FOO BAR). */
3801 SUBST (XEXP (x, 0), op0);
3802 SUBST (XEXP (x, 1), op1);
3803 }
3804 break;
3805
3806 case IF_THEN_ELSE:
1a26b032
RK
3807 /* Sometimes we can simplify the arm of an IF_THEN_ELSE if a register
3808 used in it is being compared against certain values. Get the
3809 true and false comparisons and see if that says anything about the
3810 value of each arm. */
d0ab8cd3 3811
1a26b032
RK
3812 if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3813 && reversible_comparison_p (XEXP (x, 0))
d0ab8cd3
RK
3814 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG)
3815 {
951553af 3816 HOST_WIDE_INT nzb;
d0ab8cd3 3817 rtx from = XEXP (XEXP (x, 0), 0);
1a26b032
RK
3818 enum rtx_code true_code = GET_CODE (XEXP (x, 0));
3819 enum rtx_code false_code = reverse_condition (true_code);
3820 rtx true_val = XEXP (XEXP (x, 0), 1);
3821 rtx false_val = true_val;
3822 rtx true_arm = XEXP (x, 1);
3823 rtx false_arm = XEXP (x, 2);
3824 int swapped = 0;
3825
3826 /* If FALSE_CODE is EQ, swap the codes and arms. */
3827
3828 if (false_code == EQ)
3829 {
3830 swapped = 1, true_code = EQ, false_code = NE;
3831 true_arm = XEXP (x, 2), false_arm = XEXP (x, 1);
3832 }
d0ab8cd3 3833
1a26b032 3834 /* If we are comparing against zero and the expression being tested
951553af
RK
3835 has only a single bit that might be nonzero, that is its value
3836 when it is not equal to zero. Similarly if it is known to be
3837 -1 or 0. */
d0ab8cd3 3838
1a26b032 3839 if (true_code == EQ && true_val == const0_rtx
951553af
RK
3840 && exact_log2 (nzb = nonzero_bits (from, GET_MODE (from))) >= 0)
3841 false_code = EQ, false_val = GEN_INT (nzb);
1a26b032 3842 else if (true_code == EQ && true_val == const0_rtx
d0ab8cd3
RK
3843 && (num_sign_bit_copies (from, GET_MODE (from))
3844 == GET_MODE_BITSIZE (GET_MODE (from))))
1a26b032 3845 false_code = EQ, false_val = constm1_rtx;
d0ab8cd3
RK
3846
3847 /* Now simplify an arm if we know the value of the register
3848 in the branch and it is used in the arm. Be carefull due to
3849 the potential of locally-shared RTL. */
3850
1a26b032
RK
3851 if (reg_mentioned_p (from, true_arm))
3852 true_arm = subst (known_cond (copy_rtx (true_arm), true_code,
3853 from, true_val),
3854 pc_rtx, pc_rtx, 0, 0);
3855 if (reg_mentioned_p (from, false_arm))
3856 false_arm = subst (known_cond (copy_rtx (false_arm), false_code,
3857 from, false_val),
3858 pc_rtx, pc_rtx, 0, 0);
3859
3860 SUBST (XEXP (x, 1), swapped ? false_arm : true_arm);
3861 SUBST (XEXP (x, 2), swapped ? true_arm : false_arm);
d0ab8cd3
RK
3862 }
3863
230d793d
RS
3864 /* If we have (if_then_else FOO (pc) (label_ref BAR)) and FOO can be
3865 reversed, do so to avoid needing two sets of patterns for
d0ab8cd3 3866 subtract-and-branch insns. Similarly if we have a constant in that
1a26b032
RK
3867 position or if the third operand is the same as the first operand
3868 of the comparison. */
3869
3870 if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3871 && reversible_comparison_p (XEXP (x, 0))
3872 && (XEXP (x, 1) == pc_rtx || GET_CODE (XEXP (x, 1)) == CONST_INT
3873 || rtx_equal_p (XEXP (x, 2), XEXP (XEXP (x, 0), 0))))
230d793d
RS
3874 {
3875 SUBST (XEXP (x, 0),
d0ab8cd3
RK
3876 gen_binary (reverse_condition (GET_CODE (XEXP (x, 0))),
3877 GET_MODE (XEXP (x, 0)),
3878 XEXP (XEXP (x, 0), 0), XEXP (XEXP (x, 0), 1)));
3879
3880 temp = XEXP (x, 1);
230d793d 3881 SUBST (XEXP (x, 1), XEXP (x, 2));
d0ab8cd3 3882 SUBST (XEXP (x, 2), temp);
230d793d 3883 }
1a26b032
RK
3884
3885 /* If the two arms are identical, we don't need the comparison. */
3886
3887 if (rtx_equal_p (XEXP (x, 1), XEXP (x, 2))
3888 && ! side_effects_p (XEXP (x, 0)))
3889 return XEXP (x, 1);
3890
3891 /* Look for cases where we have (abs x) or (neg (abs X)). */
3892
3893 if (GET_MODE_CLASS (mode) == MODE_INT
3894 && GET_CODE (XEXP (x, 2)) == NEG
3895 && rtx_equal_p (XEXP (x, 1), XEXP (XEXP (x, 2), 0))
3896 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3897 && rtx_equal_p (XEXP (x, 1), XEXP (XEXP (x, 0), 0))
3898 && ! side_effects_p (XEXP (x, 1)))
3899 switch (GET_CODE (XEXP (x, 0)))
3900 {
3901 case GT:
3902 case GE:
3903 x = gen_unary (ABS, mode, XEXP (x, 1));
3904 goto restart;
3905 case LT:
3906 case LE:
3907 x = gen_unary (NEG, mode, gen_unary (ABS, mode, XEXP (x, 1)));
3908 goto restart;
3909 }
3910
3911 /* Look for MIN or MAX. */
3912
3ad2180a 3913 if (! FLOAT_MODE_P (mode)
1a26b032
RK
3914 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3915 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1))
3916 && rtx_equal_p (XEXP (XEXP (x, 0), 1), XEXP (x, 2))
3917 && ! side_effects_p (XEXP (x, 0)))
3918 switch (GET_CODE (XEXP (x, 0)))
3919 {
3920 case GE:
3921 case GT:
3922 x = gen_binary (SMAX, mode, XEXP (x, 1), XEXP (x, 2));
3923 goto restart;
3924 case LE:
3925 case LT:
3926 x = gen_binary (SMIN, mode, XEXP (x, 1), XEXP (x, 2));
3927 goto restart;
3928 case GEU:
3929 case GTU:
3930 x = gen_binary (UMAX, mode, XEXP (x, 1), XEXP (x, 2));
3931 goto restart;
3932 case LEU:
3933 case LTU:
3934 x = gen_binary (UMIN, mode, XEXP (x, 1), XEXP (x, 2));
3935 goto restart;
3936 }
3937
5109d49f
RK
3938#if STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1
3939
3940 /* If we have (if_then_else COND (OP Z C1) Z) and OP is an identity when
3941 its second operand is zero, this can be done as (OP Z (mult COND C2))
3942 where C2 = C1 * STORE_FLAG_VALUE. Similarly if OP has an outer
3943 ZERO_EXTEND or SIGN_EXTEND as long as Z is already extended (so
3944 we don't destroy it). We can do this kind of thing in some
3945 cases when STORE_FLAG_VALUE is neither of the above, but it isn't
3946 worth checking for. */
3947
3948 if (mode != VOIDmode && ! side_effects_p (x))
1a26b032 3949 {
5109d49f
RK
3950 rtx t = make_compound_operation (XEXP (x, 1), SET);
3951 rtx f = make_compound_operation (XEXP (x, 2), SET);
3952 rtx cond_op0 = XEXP (XEXP (x, 0), 0);
3953 rtx cond_op1 = XEXP (XEXP (x, 0), 1);
3954 enum rtx_code cond_op = GET_CODE (XEXP (x, 0));
3955 enum rtx_code op, extend_op = NIL;
1a26b032 3956 enum machine_mode m = mode;
5109d49f
RK
3957 rtx z = 0, c1, c2;
3958
3959 if ((GET_CODE (t) == PLUS || GET_CODE (t) == MINUS
3960 || GET_CODE (t) == IOR || GET_CODE (t) == XOR
3961 || GET_CODE (t) == ASHIFT
3962 || GET_CODE (t) == LSHIFTRT || GET_CODE (t) == ASHIFTRT)
3963 && rtx_equal_p (XEXP (t, 0), f))
3964 c1 = XEXP (t, 1), op = GET_CODE (t), z = f;
3965 else if (GET_CODE (t) == SIGN_EXTEND
3966 && (GET_CODE (XEXP (t, 0)) == PLUS
3967 || GET_CODE (XEXP (t, 0)) == MINUS
3968 || GET_CODE (XEXP (t, 0)) == IOR
3969 || GET_CODE (XEXP (t, 0)) == XOR
3970 || GET_CODE (XEXP (t, 0)) == ASHIFT
3971 || GET_CODE (XEXP (t, 0)) == LSHIFTRT
3972 || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
3973 && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
3974 && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
3975 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
3976 && (num_sign_bit_copies (f, GET_MODE (f))
3977 > (GET_MODE_BITSIZE (mode)
3978 - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 0))))))
1a26b032 3979 {
5109d49f 3980 c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
1a26b032 3981 extend_op = SIGN_EXTEND;
5109d49f 3982 m = GET_MODE (XEXP (t, 0));
1a26b032 3983 }
5109d49f
RK
3984 else if (GET_CODE (t) == ZERO_EXTEND
3985 && (GET_CODE (XEXP (t, 0)) == PLUS
3986 || GET_CODE (XEXP (t, 0)) == MINUS
3987 || GET_CODE (XEXP (t, 0)) == IOR
3988 || GET_CODE (XEXP (t, 0)) == XOR
3989 || GET_CODE (XEXP (t, 0)) == ASHIFT
3990 || GET_CODE (XEXP (t, 0)) == LSHIFTRT
3991 || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
3992 && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
1a26b032 3993 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5109d49f
RK
3994 && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
3995 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
3996 && ((nonzero_bits (f, GET_MODE (f))
3997 & ~ GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 0))))
1a26b032
RK
3998 == 0))
3999 {
5109d49f 4000 c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
1a26b032 4001 extend_op = ZERO_EXTEND;
5109d49f 4002 m = GET_MODE (XEXP (t, 0));
1a26b032
RK
4003 }
4004
5109d49f
RK
4005 if (reversible_comparison_p (XEXP (x, 0))
4006 && (GET_CODE (f) == PLUS || GET_CODE (f) == MINUS
4007 || GET_CODE (f) == IOR || GET_CODE (f) == XOR
4008 || GET_CODE (f) == ASHIFT
4009 || GET_CODE (f) == LSHIFTRT || GET_CODE (f) == ASHIFTRT)
4010 && rtx_equal_p (XEXP (f, 0), t))
4011 {
4012 c1 = XEXP (f, 1), op = GET_CODE (f), z = t;
4013 cond_op = reverse_condition (cond_op);
4014 }
4015 else if (GET_CODE (f) == SIGN_EXTEND
4016 && (GET_CODE (XEXP (f, 0)) == PLUS
4017 || GET_CODE (XEXP (f, 0)) == MINUS
4018 || GET_CODE (XEXP (f, 0)) == IOR
4019 || GET_CODE (XEXP (f, 0)) == XOR
4020 || GET_CODE (XEXP (f, 0)) == ASHIFT
4021 || GET_CODE (XEXP (f, 0)) == LSHIFTRT
4022 || GET_CODE (XEXP (f, 0)) == ASHIFTRT)
4023 && GET_CODE (XEXP (XEXP (f, 0), 0)) == SUBREG
4024 && subreg_lowpart_p (XEXP (XEXP (f, 0), 0))
4025 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (f, 0), 0)), f)
4026 && (num_sign_bit_copies (t, GET_MODE (t))
4027 > (GET_MODE_BITSIZE (mode)
4028 - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (f, 0), 0))))))
4029 {
4030 c1 = XEXP (XEXP (f, 0), 1); z = t; op = GET_CODE (XEXP (f, 0));
4031 extend_op = SIGN_EXTEND;
4032 m = GET_MODE (XEXP (f, 0));
4033 cond_op = reverse_condition (cond_op);
4034 }
4035 else if (GET_CODE (f) == ZERO_EXTEND
4036 && (GET_CODE (XEXP (f, 0)) == PLUS
4037 || GET_CODE (XEXP (f, 0)) == MINUS
4038 || GET_CODE (XEXP (f, 0)) == IOR
4039 || GET_CODE (XEXP (f, 0)) == XOR
4040 || GET_CODE (XEXP (f, 0)) == ASHIFT
4041 || GET_CODE (XEXP (f, 0)) == LSHIFTRT
4042 || GET_CODE (XEXP (f, 0)) == ASHIFTRT)
4043 && GET_CODE (XEXP (XEXP (f, 0), 0)) == SUBREG
4044 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4045 && subreg_lowpart_p (XEXP (XEXP (f, 0), 0))
4046 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (f, 0), 0)), t)
4047 && ((nonzero_bits (t, GET_MODE (t))
4048 & ~ GET_MODE_MASK (GET_MODE (XEXP (XEXP (f, 0), 0))))
4049 == 0))
1a26b032 4050 {
5109d49f
RK
4051 c1 = XEXP (XEXP (f, 0), 1); z = t; op = GET_CODE (XEXP (f, 0));
4052 extend_op = ZERO_EXTEND;
4053 m = GET_MODE (XEXP (f, 0));
4054 cond_op = reverse_condition (cond_op);
4055 }
4056
4057 if (z)
4058 {
4059 temp = subst (gen_binary (cond_op, m, cond_op0, cond_op1),
4060 pc_rtx, pc_rtx, 0, 0);
4061
4062
4063 temp = gen_binary (MULT, m, temp,
4064 gen_binary (MULT, m, c1,
4065 GEN_INT (STORE_FLAG_VALUE)));
1a26b032 4066
abe6e52f
RK
4067 temp = subst (temp, pc_rtx, pc_rtx, 0, 0);
4068
1a26b032
RK
4069 temp = gen_binary (op, m, gen_lowpart_for_combine (m, z), temp);
4070
5109d49f 4071 if (extend_op != NIL)
1a26b032
RK
4072 temp = gen_unary (extend_op, mode, temp);
4073
4074 return temp;
4075 }
4076 }
5109d49f 4077#endif
224eeff2
RK
4078
4079 /* If we have (if_then_else (ne A 0) C1 0) and either A is known to
4080 be 0 or 1 and C1 is a single bit or A is known to be 0 or -1 and
4081 C1 is the negation of a single bit, we can convert this operation
4082 to a shift. We can actually do this in more general cases, but it
4083 doesn't seem worth it. */
4084
4085 if (GET_CODE (XEXP (x, 0)) == NE && XEXP (XEXP (x, 0), 1) == const0_rtx
4086 && XEXP (x, 2) == const0_rtx && GET_CODE (XEXP (x, 1)) == CONST_INT
3e61c219 4087 && ((1 == nonzero_bits (XEXP (XEXP (x, 0), 0), mode)
224eeff2 4088 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
3e61c219
RK
4089 || ((num_sign_bit_copies (XEXP (XEXP (x, 0), 0), mode)
4090 == GET_MODE_BITSIZE (mode))
224eeff2
RK
4091 && (i = exact_log2 (- INTVAL (XEXP (x, 1)))) >= 0)))
4092 return
4093 simplify_shift_const (NULL_RTX, ASHIFT, mode,
4094 gen_lowpart_for_combine (mode,
4095 XEXP (XEXP (x, 0), 0)),
4096 i);
230d793d
RS
4097 break;
4098
4099 case ZERO_EXTRACT:
4100 case SIGN_EXTRACT:
4101 case ZERO_EXTEND:
4102 case SIGN_EXTEND:
4103 /* If we are processing SET_DEST, we are done. */
4104 if (in_dest)
4105 return x;
4106
4107 x = expand_compound_operation (x);
4108 if (GET_CODE (x) != code)
4109 goto restart;
4110 break;
4111
4112 case SET:
4113 /* (set (pc) (return)) gets written as (return). */
4114 if (GET_CODE (SET_DEST (x)) == PC && GET_CODE (SET_SRC (x)) == RETURN)
4115 return SET_SRC (x);
4116
4117 /* Convert this into a field assignment operation, if possible. */
4118 x = make_field_assignment (x);
4119
230d793d
RS
4120 /* If we are setting CC0 or if the source is a COMPARE, look for the
4121 use of the comparison result and try to simplify it unless we already
4122 have used undobuf.other_insn. */
4123 if ((GET_CODE (SET_SRC (x)) == COMPARE
4124#ifdef HAVE_cc0
4125 || SET_DEST (x) == cc0_rtx
4126#endif
4127 )
4128 && (cc_use = find_single_use (SET_DEST (x), subst_insn,
4129 &other_insn)) != 0
4130 && (undobuf.other_insn == 0 || other_insn == undobuf.other_insn)
4131 && GET_RTX_CLASS (GET_CODE (*cc_use)) == '<'
4132 && XEXP (*cc_use, 0) == SET_DEST (x))
4133 {
4134 enum rtx_code old_code = GET_CODE (*cc_use);
4135 enum rtx_code new_code;
4136 rtx op0, op1;
4137 int other_changed = 0;
4138 enum machine_mode compare_mode = GET_MODE (SET_DEST (x));
4139
4140 if (GET_CODE (SET_SRC (x)) == COMPARE)
4141 op0 = XEXP (SET_SRC (x), 0), op1 = XEXP (SET_SRC (x), 1);
4142 else
4143 op0 = SET_SRC (x), op1 = const0_rtx;
4144
4145 /* Simplify our comparison, if possible. */
4146 new_code = simplify_comparison (old_code, &op0, &op1);
4147
c141a106 4148#ifdef EXTRA_CC_MODES
230d793d
RS
4149 /* If this machine has CC modes other than CCmode, check to see
4150 if we need to use a different CC mode here. */
77fa0940 4151 compare_mode = SELECT_CC_MODE (new_code, op0, op1);
c141a106 4152#endif /* EXTRA_CC_MODES */
230d793d 4153
c141a106 4154#if !defined (HAVE_cc0) && defined (EXTRA_CC_MODES)
230d793d
RS
4155 /* If the mode changed, we have to change SET_DEST, the mode
4156 in the compare, and the mode in the place SET_DEST is used.
4157 If SET_DEST is a hard register, just build new versions with
4158 the proper mode. If it is a pseudo, we lose unless it is only
4159 time we set the pseudo, in which case we can safely change
4160 its mode. */
4161 if (compare_mode != GET_MODE (SET_DEST (x)))
4162 {
4163 int regno = REGNO (SET_DEST (x));
4164 rtx new_dest = gen_rtx (REG, compare_mode, regno);
4165
4166 if (regno < FIRST_PSEUDO_REGISTER
4167 || (reg_n_sets[regno] == 1
4168 && ! REG_USERVAR_P (SET_DEST (x))))
4169 {
4170 if (regno >= FIRST_PSEUDO_REGISTER)
4171 SUBST (regno_reg_rtx[regno], new_dest);
4172
4173 SUBST (SET_DEST (x), new_dest);
4174 SUBST (XEXP (*cc_use, 0), new_dest);
4175 other_changed = 1;
4176 }
4177 }
4178#endif
4179
4180 /* If the code changed, we have to build a new comparison
4181 in undobuf.other_insn. */
4182 if (new_code != old_code)
4183 {
951553af 4184 unsigned HOST_WIDE_INT mask;
230d793d
RS
4185
4186 SUBST (*cc_use, gen_rtx_combine (new_code, GET_MODE (*cc_use),
4187 SET_DEST (x), const0_rtx));
4188
4189 /* If the only change we made was to change an EQ into an
951553af 4190 NE or vice versa, OP0 has only one bit that might be nonzero,
230d793d
RS
4191 and OP1 is zero, check if changing the user of the condition
4192 code will produce a valid insn. If it won't, we can keep
4193 the original code in that insn by surrounding our operation
4194 with an XOR. */
4195
4196 if (((old_code == NE && new_code == EQ)
4197 || (old_code == EQ && new_code == NE))
4198 && ! other_changed && op1 == const0_rtx
5f4f0e22
CH
4199 && (GET_MODE_BITSIZE (GET_MODE (op0))
4200 <= HOST_BITS_PER_WIDE_INT)
951553af 4201 && (exact_log2 (mask = nonzero_bits (op0, GET_MODE (op0)))
230d793d
RS
4202 >= 0))
4203 {
4204 rtx pat = PATTERN (other_insn), note = 0;
4205
6e2a4e3c 4206 if ((recog_for_combine (&pat, other_insn, &note) < 0
230d793d
RS
4207 && ! check_asm_operands (pat)))
4208 {
4209 PUT_CODE (*cc_use, old_code);
4210 other_insn = 0;
4211
4212 op0 = gen_binary (XOR, GET_MODE (op0), op0,
5f4f0e22 4213 GEN_INT (mask));
230d793d
RS
4214 }
4215 }
4216
4217 other_changed = 1;
4218 }
4219
4220 if (other_changed)
4221 undobuf.other_insn = other_insn;
4222
4223#ifdef HAVE_cc0
4224 /* If we are now comparing against zero, change our source if
4225 needed. If we do not use cc0, we always have a COMPARE. */
4226 if (op1 == const0_rtx && SET_DEST (x) == cc0_rtx)
4227 SUBST (SET_SRC (x), op0);
4228 else
4229#endif
4230
4231 /* Otherwise, if we didn't previously have a COMPARE in the
4232 correct mode, we need one. */
4233 if (GET_CODE (SET_SRC (x)) != COMPARE
4234 || GET_MODE (SET_SRC (x)) != compare_mode)
4235 SUBST (SET_SRC (x), gen_rtx_combine (COMPARE, compare_mode,
4236 op0, op1));
4237 else
4238 {
4239 /* Otherwise, update the COMPARE if needed. */
4240 SUBST (XEXP (SET_SRC (x), 0), op0);
4241 SUBST (XEXP (SET_SRC (x), 1), op1);
4242 }
4243 }
4244 else
4245 {
4246 /* Get SET_SRC in a form where we have placed back any
4247 compound expressions. Then do the checks below. */
4248 temp = make_compound_operation (SET_SRC (x), SET);
4249 SUBST (SET_SRC (x), temp);
4250 }
4251
df62f951
RK
4252 /* If we have (set x (subreg:m1 (op:m2 ...) 0)) with OP being some
4253 operation, and X being a REG or (subreg (reg)), we may be able to
4254 convert this to (set (subreg:m2 x) (op)).
4255
4256 We can always do this if M1 is narrower than M2 because that
4257 means that we only care about the low bits of the result.
4258
8baf60bb
RK
4259 However, on machines without WORD_REGISTER_OPERATIONS defined,
4260 we cannot perform a narrower operation that requested since the
4261 high-order bits will be undefined. On machine where it is defined,
4262 this transformation is safe as long as M1 and M2 have the same
4263 number of words. */
df62f951
RK
4264
4265 if (GET_CODE (SET_SRC (x)) == SUBREG
4266 && subreg_lowpart_p (SET_SRC (x))
4267 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (SET_SRC (x)))) != 'o'
4268 && (((GET_MODE_SIZE (GET_MODE (SET_SRC (x))) + (UNITS_PER_WORD - 1))
4269 / UNITS_PER_WORD)
4270 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_SRC (x))))
4271 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
8baf60bb 4272#ifndef WORD_REGISTER_OPERATIONS
df62f951
RK
4273 && (GET_MODE_SIZE (GET_MODE (SET_SRC (x)))
4274 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_SRC (x)))))
4275#endif
4276 && (GET_CODE (SET_DEST (x)) == REG
4277 || (GET_CODE (SET_DEST (x)) == SUBREG
4278 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG)))
4279 {
df62f951 4280 SUBST (SET_DEST (x),
d0ab8cd3
RK
4281 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (SET_SRC (x))),
4282 SET_DEST (x)));
df62f951
RK
4283 SUBST (SET_SRC (x), SUBREG_REG (SET_SRC (x)));
4284 }
4285
8baf60bb 4286#ifdef LOAD_EXTEND_OP
230d793d
RS
4287 /* If we have (set FOO (subreg:M (mem:N BAR) 0)) with
4288 M wider than N, this would require a paradoxical subreg.
4289 Replace the subreg with a zero_extend to avoid the reload that
4290 would otherwise be required. */
c6dc70d6 4291
230d793d 4292 if (GET_CODE (SET_SRC (x)) == SUBREG
8baf60bb 4293 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (SET_SRC (x)))) != NIL
230d793d
RS
4294 && subreg_lowpart_p (SET_SRC (x))
4295 && SUBREG_WORD (SET_SRC (x)) == 0
4296 && (GET_MODE_SIZE (GET_MODE (SET_SRC (x)))
4297 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_SRC (x)))))
4298 && GET_CODE (SUBREG_REG (SET_SRC (x))) == MEM)
8baf60bb
RK
4299 SUBST (SET_SRC (x),
4300 gen_rtx_combine (LOAD_EXTEND_OP (GET_MODE
4301 (SUBREG_REG (SET_SRC (x)))),
4302 GET_MODE (SET_SRC (x)),
4303 XEXP (SET_SRC (x), 0)));
230d793d
RS
4304#endif
4305
1a26b032
RK
4306#ifndef HAVE_conditional_move
4307
4308 /* If we don't have a conditional move, SET_SRC is an IF_THEN_ELSE,
4309 and we are comparing an item known to be 0 or -1 against 0, use a
4310 logical operation instead. Check for one of the arms being an IOR
4311 of the other arm with some value. We compute three terms to be
4312 IOR'ed together. In practice, at most two will be nonzero. Then
4313 we do the IOR's. */
4314
696223d7
TW
4315 if (GET_CODE (SET_DEST (x)) != PC
4316 && GET_CODE (SET_SRC (x)) == IF_THEN_ELSE
1a26b032
RK
4317 && (GET_CODE (XEXP (SET_SRC (x), 0)) == EQ
4318 || GET_CODE (XEXP (SET_SRC (x), 0)) == NE)
4319 && XEXP (XEXP (SET_SRC (x), 0), 1) == const0_rtx
4320 && (num_sign_bit_copies (XEXP (XEXP (SET_SRC (x), 0), 0),
4321 GET_MODE (XEXP (XEXP (SET_SRC (x), 0), 0)))
4322 == GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (SET_SRC (x), 0), 0))))
4323 && ! side_effects_p (SET_SRC (x)))
4324 {
4325 rtx true = (GET_CODE (XEXP (SET_SRC (x), 0)) == NE
4326 ? XEXP (SET_SRC (x), 1) : XEXP (SET_SRC (x), 2));
4327 rtx false = (GET_CODE (XEXP (SET_SRC (x), 0)) == NE
4328 ? XEXP (SET_SRC (x), 2) : XEXP (SET_SRC (x), 1));
4329 rtx term1 = const0_rtx, term2, term3;
4330
4331 if (GET_CODE (true) == IOR && rtx_equal_p (XEXP (true, 0), false))
4332 term1 = false, true = XEXP (true, 1), false = const0_rtx;
4333 else if (GET_CODE (true) == IOR
4334 && rtx_equal_p (XEXP (true, 1), false))
4335 term1 = false, true = XEXP (true, 0), false = const0_rtx;
4336 else if (GET_CODE (false) == IOR
4337 && rtx_equal_p (XEXP (false, 0), true))
4338 term1 = true, false = XEXP (false, 1), true = const0_rtx;
4339 else if (GET_CODE (false) == IOR
4340 && rtx_equal_p (XEXP (false, 1), true))
4341 term1 = true, false = XEXP (false, 0), true = const0_rtx;
4342
4343 term2 = gen_binary (AND, GET_MODE (SET_SRC (x)),
4344 XEXP (XEXP (SET_SRC (x), 0), 0), true);
4345 term3 = gen_binary (AND, GET_MODE (SET_SRC (x)),
4346 gen_unary (NOT, GET_MODE (SET_SRC (x)),
4347 XEXP (XEXP (SET_SRC (x), 0), 0)),
4348 false);
4349
4350 SUBST (SET_SRC (x),
4351 gen_binary (IOR, GET_MODE (SET_SRC (x)),
4352 gen_binary (IOR, GET_MODE (SET_SRC (x)),
4353 term1, term2),
4354 term3));
4355 }
4356#endif
230d793d
RS
4357 break;
4358
4359 case AND:
4360 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
4361 {
4362 x = simplify_and_const_int (x, mode, XEXP (x, 0),
4363 INTVAL (XEXP (x, 1)));
4364
4365 /* If we have (ior (and (X C1) C2)) and the next restart would be
4366 the last, simplify this by making C1 as small as possible
4367 and then exit. */
4368 if (n_restarts >= 3 && GET_CODE (x) == IOR
4369 && GET_CODE (XEXP (x, 0)) == AND
4370 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4371 && GET_CODE (XEXP (x, 1)) == CONST_INT)
4372 {
4373 temp = gen_binary (AND, mode, XEXP (XEXP (x, 0), 0),
5f4f0e22
CH
4374 GEN_INT (INTVAL (XEXP (XEXP (x, 0), 1))
4375 & ~ INTVAL (XEXP (x, 1))));
230d793d
RS
4376 return gen_binary (IOR, mode, temp, XEXP (x, 1));
4377 }
4378
4379 if (GET_CODE (x) != AND)
4380 goto restart;
4381 }
4382
4383 /* Convert (A | B) & A to A. */
4384 if (GET_CODE (XEXP (x, 0)) == IOR
4385 && (rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1))
4386 || rtx_equal_p (XEXP (XEXP (x, 0), 1), XEXP (x, 1)))
4387 && ! side_effects_p (XEXP (XEXP (x, 0), 0))
4388 && ! side_effects_p (XEXP (XEXP (x, 0), 1)))
4389 return XEXP (x, 1);
4390
4391 /* Convert (A ^ B) & A to A & (~ B) since the latter is often a single
4392 insn (and may simplify more). */
4393 else if (GET_CODE (XEXP (x, 0)) == XOR
4394 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1))
4395 && ! side_effects_p (XEXP (x, 1)))
4396 {
4397 x = gen_binary (AND, mode,
4398 gen_unary (NOT, mode, XEXP (XEXP (x, 0), 1)),
4399 XEXP (x, 1));
4400 goto restart;
4401 }
4402 else if (GET_CODE (XEXP (x, 0)) == XOR
4403 && rtx_equal_p (XEXP (XEXP (x, 0), 1), XEXP (x, 1))
4404 && ! side_effects_p (XEXP (x, 1)))
4405 {
4406 x = gen_binary (AND, mode,
4407 gen_unary (NOT, mode, XEXP (XEXP (x, 0), 0)),
4408 XEXP (x, 1));
4409 goto restart;
4410 }
4411
4412 /* Similarly for (~ (A ^ B)) & A. */
4413 else if (GET_CODE (XEXP (x, 0)) == NOT
4414 && GET_CODE (XEXP (XEXP (x, 0), 0)) == XOR
4415 && rtx_equal_p (XEXP (XEXP (XEXP (x, 0), 0), 0), XEXP (x, 1))
4416 && ! side_effects_p (XEXP (x, 1)))
4417 {
4418 x = gen_binary (AND, mode, XEXP (XEXP (XEXP (x, 0), 0), 1),
4419 XEXP (x, 1));
4420 goto restart;
4421 }
4422 else if (GET_CODE (XEXP (x, 0)) == NOT
4423 && GET_CODE (XEXP (XEXP (x, 0), 0)) == XOR
4424 && rtx_equal_p (XEXP (XEXP (XEXP (x, 0), 0), 1), XEXP (x, 1))
4425 && ! side_effects_p (XEXP (x, 1)))
4426 {
4427 x = gen_binary (AND, mode, XEXP (XEXP (XEXP (x, 0), 0), 0),
4428 XEXP (x, 1));
4429 goto restart;
4430 }
4431
d0ab8cd3 4432 /* In the following group of tests (and those in case IOR below),
230d793d
RS
4433 we start with some combination of logical operations and apply
4434 the distributive law followed by the inverse distributive law.
4435 Most of the time, this results in no change. However, if some of
4436 the operands are the same or inverses of each other, simplifications
4437 will result.
4438
4439 For example, (and (ior A B) (not B)) can occur as the result of
4440 expanding a bit field assignment. When we apply the distributive
4441 law to this, we get (ior (and (A (not B))) (and (B (not B)))),
4442 which then simplifies to (and (A (not B))). */
4443
4444 /* If we have (and (ior A B) C), apply the distributive law and then
4445 the inverse distributive law to see if things simplify. */
4446
4447 if (GET_CODE (XEXP (x, 0)) == IOR || GET_CODE (XEXP (x, 0)) == XOR)
4448 {
4449 x = apply_distributive_law
4450 (gen_binary (GET_CODE (XEXP (x, 0)), mode,
4451 gen_binary (AND, mode,
4452 XEXP (XEXP (x, 0), 0), XEXP (x, 1)),
4453 gen_binary (AND, mode,
4454 XEXP (XEXP (x, 0), 1), XEXP (x, 1))));
4455 if (GET_CODE (x) != AND)
4456 goto restart;
4457 }
4458
4459 if (GET_CODE (XEXP (x, 1)) == IOR || GET_CODE (XEXP (x, 1)) == XOR)
4460 {
4461 x = apply_distributive_law
4462 (gen_binary (GET_CODE (XEXP (x, 1)), mode,
4463 gen_binary (AND, mode,
4464 XEXP (XEXP (x, 1), 0), XEXP (x, 0)),
4465 gen_binary (AND, mode,
4466 XEXP (XEXP (x, 1), 1), XEXP (x, 0))));
4467 if (GET_CODE (x) != AND)
4468 goto restart;
4469 }
4470
4471 /* Similarly, taking advantage of the fact that
4472 (and (not A) (xor B C)) == (xor (ior A B) (ior A C)) */
4473
4474 if (GET_CODE (XEXP (x, 0)) == NOT && GET_CODE (XEXP (x, 1)) == XOR)
4475 {
4476 x = apply_distributive_law
4477 (gen_binary (XOR, mode,
4478 gen_binary (IOR, mode, XEXP (XEXP (x, 0), 0),
4479 XEXP (XEXP (x, 1), 0)),
4480 gen_binary (IOR, mode, XEXP (XEXP (x, 0), 0),
4481 XEXP (XEXP (x, 1), 1))));
4482 if (GET_CODE (x) != AND)
4483 goto restart;
4484 }
4485
4486 else if (GET_CODE (XEXP (x, 1)) == NOT && GET_CODE (XEXP (x, 0)) == XOR)
4487 {
4488 x = apply_distributive_law
4489 (gen_binary (XOR, mode,
4490 gen_binary (IOR, mode, XEXP (XEXP (x, 1), 0),
4491 XEXP (XEXP (x, 0), 0)),
4492 gen_binary (IOR, mode, XEXP (XEXP (x, 1), 0),
4493 XEXP (XEXP (x, 0), 1))));
4494 if (GET_CODE (x) != AND)
4495 goto restart;
4496 }
4497 break;
4498
4499 case IOR:
951553af 4500 /* (ior A C) is C if all bits of A that might be nonzero are on in C. */
d0ab8cd3 4501 if (GET_CODE (XEXP (x, 1)) == CONST_INT
ac49a949 4502 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
951553af 4503 && (nonzero_bits (XEXP (x, 0), mode) & ~ INTVAL (XEXP (x, 1))) == 0)
d0ab8cd3
RK
4504 return XEXP (x, 1);
4505
230d793d
RS
4506 /* Convert (A & B) | A to A. */
4507 if (GET_CODE (XEXP (x, 0)) == AND
4508 && (rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1))
4509 || rtx_equal_p (XEXP (XEXP (x, 0), 1), XEXP (x, 1)))
4510 && ! side_effects_p (XEXP (XEXP (x, 0), 0))
4511 && ! side_effects_p (XEXP (XEXP (x, 0), 1)))
4512 return XEXP (x, 1);
4513
4514 /* If we have (ior (and A B) C), apply the distributive law and then
4515 the inverse distributive law to see if things simplify. */
4516
4517 if (GET_CODE (XEXP (x, 0)) == AND)
4518 {
4519 x = apply_distributive_law
4520 (gen_binary (AND, mode,
4521 gen_binary (IOR, mode,
4522 XEXP (XEXP (x, 0), 0), XEXP (x, 1)),
4523 gen_binary (IOR, mode,
4524 XEXP (XEXP (x, 0), 1), XEXP (x, 1))));
4525
4526 if (GET_CODE (x) != IOR)
4527 goto restart;
4528 }
4529
4530 if (GET_CODE (XEXP (x, 1)) == AND)
4531 {
4532 x = apply_distributive_law
4533 (gen_binary (AND, mode,
4534 gen_binary (IOR, mode,
4535 XEXP (XEXP (x, 1), 0), XEXP (x, 0)),
4536 gen_binary (IOR, mode,
4537 XEXP (XEXP (x, 1), 1), XEXP (x, 0))));
4538
4539 if (GET_CODE (x) != IOR)
4540 goto restart;
4541 }
4542
4543 /* Convert (ior (ashift A CX) (lshiftrt A CY)) where CX+CY equals the
4544 mode size to (rotate A CX). */
4545
4546 if (((GET_CODE (XEXP (x, 0)) == ASHIFT
4547 && GET_CODE (XEXP (x, 1)) == LSHIFTRT)
4548 || (GET_CODE (XEXP (x, 1)) == ASHIFT
4549 && GET_CODE (XEXP (x, 0)) == LSHIFTRT))
4550 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (XEXP (x, 1), 0))
4551 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4552 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
4553 && (INTVAL (XEXP (XEXP (x, 0), 1)) + INTVAL (XEXP (XEXP (x, 1), 1))
4554 == GET_MODE_BITSIZE (mode)))
4555 {
4556 rtx shift_count;
4557
4558 if (GET_CODE (XEXP (x, 0)) == ASHIFT)
4559 shift_count = XEXP (XEXP (x, 0), 1);
4560 else
4561 shift_count = XEXP (XEXP (x, 1), 1);
4562 x = gen_rtx (ROTATE, mode, XEXP (XEXP (x, 0), 0), shift_count);
4563 goto restart;
4564 }
4565 break;
4566
4567 case XOR:
4568 /* Convert (XOR (NOT x) (NOT y)) to (XOR x y).
4569 Also convert (XOR (NOT x) y) to (NOT (XOR x y)), similarly for
4570 (NOT y). */
4571 {
4572 int num_negated = 0;
4573 rtx in1 = XEXP (x, 0), in2 = XEXP (x, 1);
4574
4575 if (GET_CODE (in1) == NOT)
4576 num_negated++, in1 = XEXP (in1, 0);
4577 if (GET_CODE (in2) == NOT)
4578 num_negated++, in2 = XEXP (in2, 0);
4579
4580 if (num_negated == 2)
4581 {
4582 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4583 SUBST (XEXP (x, 1), XEXP (XEXP (x, 1), 0));
4584 }
4585 else if (num_negated == 1)
d0ab8cd3
RK
4586 {
4587 x = gen_unary (NOT, mode,
4588 gen_binary (XOR, mode, in1, in2));
4589 goto restart;
4590 }
230d793d
RS
4591 }
4592
4593 /* Convert (xor (and A B) B) to (and (not A) B). The latter may
4594 correspond to a machine insn or result in further simplifications
4595 if B is a constant. */
4596
4597 if (GET_CODE (XEXP (x, 0)) == AND
4598 && rtx_equal_p (XEXP (XEXP (x, 0), 1), XEXP (x, 1))
4599 && ! side_effects_p (XEXP (x, 1)))
4600 {
4601 x = gen_binary (AND, mode,
4602 gen_unary (NOT, mode, XEXP (XEXP (x, 0), 0)),
4603 XEXP (x, 1));
4604 goto restart;
4605 }
4606 else if (GET_CODE (XEXP (x, 0)) == AND
4607 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1))
4608 && ! side_effects_p (XEXP (x, 1)))
4609 {
4610 x = gen_binary (AND, mode,
4611 gen_unary (NOT, mode, XEXP (XEXP (x, 0), 1)),
4612 XEXP (x, 1));
4613 goto restart;
4614 }
4615
4616
4617#if STORE_FLAG_VALUE == 1
4618 /* (xor (comparison foo bar) (const_int 1)) can become the reversed
4619 comparison. */
4620 if (XEXP (x, 1) == const1_rtx
4621 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
4622 && reversible_comparison_p (XEXP (x, 0)))
4623 return gen_rtx_combine (reverse_condition (GET_CODE (XEXP (x, 0))),
4624 mode, XEXP (XEXP (x, 0), 0),
4625 XEXP (XEXP (x, 0), 1));
500c518b
RK
4626
4627 /* (lshiftrt foo C) where C is the number of bits in FOO minus 1
4628 is (lt foo (const_int 0)), so we can perform the above
4629 simplification. */
4630
4631 if (XEXP (x, 1) == const1_rtx
4632 && GET_CODE (XEXP (x, 0)) == LSHIFTRT
4633 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4634 && INTVAL (XEXP (XEXP (x, 0), 1)) == GET_MODE_BITSIZE (mode) - 1)
4635 return gen_rtx_combine (GE, mode, XEXP (XEXP (x, 0), 0), const0_rtx);
230d793d
RS
4636#endif
4637
4638 /* (xor (comparison foo bar) (const_int sign-bit))
4639 when STORE_FLAG_VALUE is the sign bit. */
5f4f0e22
CH
4640 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4641 && (STORE_FLAG_VALUE
4642 == (HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
230d793d
RS
4643 && XEXP (x, 1) == const_true_rtx
4644 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
4645 && reversible_comparison_p (XEXP (x, 0)))
4646 return gen_rtx_combine (reverse_condition (GET_CODE (XEXP (x, 0))),
4647 mode, XEXP (XEXP (x, 0), 0),
4648 XEXP (XEXP (x, 0), 1));
4649 break;
4650
4651 case ABS:
4652 /* (abs (neg <foo>)) -> (abs <foo>) */
4653 if (GET_CODE (XEXP (x, 0)) == NEG)
4654 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4655
4656 /* If operand is something known to be positive, ignore the ABS. */
4657 if (GET_CODE (XEXP (x, 0)) == FFS || GET_CODE (XEXP (x, 0)) == ABS
5f4f0e22
CH
4658 || ((GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4659 <= HOST_BITS_PER_WIDE_INT)
951553af 4660 && ((nonzero_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
5f4f0e22
CH
4661 & ((HOST_WIDE_INT) 1
4662 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1)))
230d793d
RS
4663 == 0)))
4664 return XEXP (x, 0);
4665
4666
4667 /* If operand is known to be only -1 or 0, convert ABS to NEG. */
d0ab8cd3 4668 if (num_sign_bit_copies (XEXP (x, 0), mode) == GET_MODE_BITSIZE (mode))
230d793d
RS
4669 {
4670 x = gen_rtx_combine (NEG, mode, XEXP (x, 0));
4671 goto restart;
4672 }
4673 break;
4674
a7c99304
RK
4675 case FFS:
4676 /* (ffs (*_extend <X>)) = (ffs <X>) */
4677 if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND
4678 || GET_CODE (XEXP (x, 0)) == ZERO_EXTEND)
4679 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4680 break;
4681
230d793d
RS
4682 case FLOAT:
4683 /* (float (sign_extend <X>)) = (float <X>). */
4684 if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND)
4685 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4686 break;
4687
4688 case LSHIFT:
4689 case ASHIFT:
4690 case LSHIFTRT:
4691 case ASHIFTRT:
4692 case ROTATE:
4693 case ROTATERT:
230d793d
RS
4694 /* If this is a shift by a constant amount, simplify it. */
4695 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
4696 {
4697 x = simplify_shift_const (x, code, mode, XEXP (x, 0),
4698 INTVAL (XEXP (x, 1)));
4699 if (GET_CODE (x) != code)
4700 goto restart;
4701 }
77fa0940
RK
4702
4703#ifdef SHIFT_COUNT_TRUNCATED
5541136b 4704 else if (SHIFT_COUNT_TRUNCATED && GET_CODE (XEXP (x, 1)) != REG)
77fa0940
RK
4705 SUBST (XEXP (x, 1),
4706 force_to_mode (XEXP (x, 1), GET_MODE (x),
6139ff20
RK
4707 ((HOST_WIDE_INT) 1
4708 << exact_log2 (GET_MODE_BITSIZE (GET_MODE (x))))
4709 - 1,
e3d616e3 4710 NULL_RTX, 0));
77fa0940
RK
4711#endif
4712
230d793d
RS
4713 break;
4714 }
4715
4716 return x;
4717}
4718\f
4719/* We consider ZERO_EXTRACT, SIGN_EXTRACT, and SIGN_EXTEND as "compound
4720 operations" because they can be replaced with two more basic operations.
4721 ZERO_EXTEND is also considered "compound" because it can be replaced with
4722 an AND operation, which is simpler, though only one operation.
4723
4724 The function expand_compound_operation is called with an rtx expression
4725 and will convert it to the appropriate shifts and AND operations,
4726 simplifying at each stage.
4727
4728 The function make_compound_operation is called to convert an expression
4729 consisting of shifts and ANDs into the equivalent compound expression.
4730 It is the inverse of this function, loosely speaking. */
4731
4732static rtx
4733expand_compound_operation (x)
4734 rtx x;
4735{
4736 int pos = 0, len;
4737 int unsignedp = 0;
4738 int modewidth;
4739 rtx tem;
4740
4741 switch (GET_CODE (x))
4742 {
4743 case ZERO_EXTEND:
4744 unsignedp = 1;
4745 case SIGN_EXTEND:
75473182
RS
4746 /* We can't necessarily use a const_int for a multiword mode;
4747 it depends on implicitly extending the value.
4748 Since we don't know the right way to extend it,
4749 we can't tell whether the implicit way is right.
4750
4751 Even for a mode that is no wider than a const_int,
4752 we can't win, because we need to sign extend one of its bits through
4753 the rest of it, and we don't know which bit. */
230d793d 4754 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
75473182 4755 return x;
230d793d
RS
4756
4757 if (! FAKE_EXTEND_SAFE_P (GET_MODE (XEXP (x, 0)), XEXP (x, 0)))
4758 return x;
4759
4760 len = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)));
4761 /* If the inner object has VOIDmode (the only way this can happen
4762 is if it is a ASM_OPERANDS), we can't do anything since we don't
4763 know how much masking to do. */
4764 if (len == 0)
4765 return x;
4766
4767 break;
4768
4769 case ZERO_EXTRACT:
4770 unsignedp = 1;
4771 case SIGN_EXTRACT:
4772 /* If the operand is a CLOBBER, just return it. */
4773 if (GET_CODE (XEXP (x, 0)) == CLOBBER)
4774 return XEXP (x, 0);
4775
4776 if (GET_CODE (XEXP (x, 1)) != CONST_INT
4777 || GET_CODE (XEXP (x, 2)) != CONST_INT
4778 || GET_MODE (XEXP (x, 0)) == VOIDmode)
4779 return x;
4780
4781 len = INTVAL (XEXP (x, 1));
4782 pos = INTVAL (XEXP (x, 2));
4783
4784 /* If this goes outside the object being extracted, replace the object
4785 with a (use (mem ...)) construct that only combine understands
4786 and is used only for this purpose. */
4787 if (len + pos > GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
4788 SUBST (XEXP (x, 0), gen_rtx (USE, GET_MODE (x), XEXP (x, 0)));
4789
4790#if BITS_BIG_ENDIAN
4791 pos = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - len - pos;
4792#endif
4793 break;
4794
4795 default:
4796 return x;
4797 }
4798
4799 /* If we reach here, we want to return a pair of shifts. The inner
4800 shift is a left shift of BITSIZE - POS - LEN bits. The outer
4801 shift is a right shift of BITSIZE - LEN bits. It is arithmetic or
4802 logical depending on the value of UNSIGNEDP.
4803
4804 If this was a ZERO_EXTEND or ZERO_EXTRACT, this pair of shifts will be
4805 converted into an AND of a shift.
4806
4807 We must check for the case where the left shift would have a negative
4808 count. This can happen in a case like (x >> 31) & 255 on machines
4809 that can't shift by a constant. On those machines, we would first
4810 combine the shift with the AND to produce a variable-position
4811 extraction. Then the constant of 31 would be substituted in to produce
4812 a such a position. */
4813
4814 modewidth = GET_MODE_BITSIZE (GET_MODE (x));
4815 if (modewidth >= pos - len)
5f4f0e22 4816 tem = simplify_shift_const (NULL_RTX, unsignedp ? LSHIFTRT : ASHIFTRT,
230d793d 4817 GET_MODE (x),
5f4f0e22
CH
4818 simplify_shift_const (NULL_RTX, ASHIFT,
4819 GET_MODE (x),
230d793d
RS
4820 XEXP (x, 0),
4821 modewidth - pos - len),
4822 modewidth - len);
4823
5f4f0e22
CH
4824 else if (unsignedp && len < HOST_BITS_PER_WIDE_INT)
4825 tem = simplify_and_const_int (NULL_RTX, GET_MODE (x),
4826 simplify_shift_const (NULL_RTX, LSHIFTRT,
230d793d
RS
4827 GET_MODE (x),
4828 XEXP (x, 0), pos),
5f4f0e22 4829 ((HOST_WIDE_INT) 1 << len) - 1);
230d793d
RS
4830 else
4831 /* Any other cases we can't handle. */
4832 return x;
4833
4834
4835 /* If we couldn't do this for some reason, return the original
4836 expression. */
4837 if (GET_CODE (tem) == CLOBBER)
4838 return x;
4839
4840 return tem;
4841}
4842\f
4843/* X is a SET which contains an assignment of one object into
4844 a part of another (such as a bit-field assignment, STRICT_LOW_PART,
4845 or certain SUBREGS). If possible, convert it into a series of
4846 logical operations.
4847
4848 We half-heartedly support variable positions, but do not at all
4849 support variable lengths. */
4850
4851static rtx
4852expand_field_assignment (x)
4853 rtx x;
4854{
4855 rtx inner;
4856 rtx pos; /* Always counts from low bit. */
4857 int len;
4858 rtx mask;
4859 enum machine_mode compute_mode;
4860
4861 /* Loop until we find something we can't simplify. */
4862 while (1)
4863 {
4864 if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
4865 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG)
4866 {
4867 inner = SUBREG_REG (XEXP (SET_DEST (x), 0));
4868 len = GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)));
4869 pos = const0_rtx;
4870 }
4871 else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
4872 && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT)
4873 {
4874 inner = XEXP (SET_DEST (x), 0);
4875 len = INTVAL (XEXP (SET_DEST (x), 1));
4876 pos = XEXP (SET_DEST (x), 2);
4877
4878 /* If the position is constant and spans the width of INNER,
4879 surround INNER with a USE to indicate this. */
4880 if (GET_CODE (pos) == CONST_INT
4881 && INTVAL (pos) + len > GET_MODE_BITSIZE (GET_MODE (inner)))
4882 inner = gen_rtx (USE, GET_MODE (SET_DEST (x)), inner);
4883
4884#if BITS_BIG_ENDIAN
4885 if (GET_CODE (pos) == CONST_INT)
5f4f0e22
CH
4886 pos = GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner)) - len
4887 - INTVAL (pos));
230d793d
RS
4888 else if (GET_CODE (pos) == MINUS
4889 && GET_CODE (XEXP (pos, 1)) == CONST_INT
4890 && (INTVAL (XEXP (pos, 1))
4891 == GET_MODE_BITSIZE (GET_MODE (inner)) - len))
4892 /* If position is ADJUST - X, new position is X. */
4893 pos = XEXP (pos, 0);
4894 else
4895 pos = gen_binary (MINUS, GET_MODE (pos),
5f4f0e22
CH
4896 GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner))
4897 - len),
4898 pos);
230d793d
RS
4899#endif
4900 }
4901
4902 /* A SUBREG between two modes that occupy the same numbers of words
4903 can be done by moving the SUBREG to the source. */
4904 else if (GET_CODE (SET_DEST (x)) == SUBREG
4905 && (((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
4906 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
4907 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
4908 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
4909 {
4910 x = gen_rtx (SET, VOIDmode, SUBREG_REG (SET_DEST (x)),
4911 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (SET_DEST (x))),
4912 SET_SRC (x)));
4913 continue;
4914 }
4915 else
4916 break;
4917
4918 while (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
4919 inner = SUBREG_REG (inner);
4920
4921 compute_mode = GET_MODE (inner);
4922
4923 /* Compute a mask of LEN bits, if we can do this on the host machine. */
5f4f0e22
CH
4924 if (len < HOST_BITS_PER_WIDE_INT)
4925 mask = GEN_INT (((HOST_WIDE_INT) 1 << len) - 1);
230d793d
RS
4926 else
4927 break;
4928
4929 /* Now compute the equivalent expression. Make a copy of INNER
4930 for the SET_DEST in case it is a MEM into which we will substitute;
4931 we don't want shared RTL in that case. */
4932 x = gen_rtx (SET, VOIDmode, copy_rtx (inner),
4933 gen_binary (IOR, compute_mode,
4934 gen_binary (AND, compute_mode,
4935 gen_unary (NOT, compute_mode,
4936 gen_binary (ASHIFT,
4937 compute_mode,
4938 mask, pos)),
4939 inner),
4940 gen_binary (ASHIFT, compute_mode,
4941 gen_binary (AND, compute_mode,
4942 gen_lowpart_for_combine
4943 (compute_mode,
4944 SET_SRC (x)),
4945 mask),
4946 pos)));
4947 }
4948
4949 return x;
4950}
4951\f
8999a12e
RK
4952/* Return an RTX for a reference to LEN bits of INNER. If POS_RTX is nonzero,
4953 it is an RTX that represents a variable starting position; otherwise,
4954 POS is the (constant) starting bit position (counted from the LSB).
230d793d
RS
4955
4956 INNER may be a USE. This will occur when we started with a bitfield
4957 that went outside the boundary of the object in memory, which is
4958 allowed on most machines. To isolate this case, we produce a USE
4959 whose mode is wide enough and surround the MEM with it. The only
4960 code that understands the USE is this routine. If it is not removed,
4961 it will cause the resulting insn not to match.
4962
4963 UNSIGNEDP is non-zero for an unsigned reference and zero for a
4964 signed reference.
4965
4966 IN_DEST is non-zero if this is a reference in the destination of a
4967 SET. This is used when a ZERO_ or SIGN_EXTRACT isn't needed. If non-zero,
4968 a STRICT_LOW_PART will be used, if zero, ZERO_EXTEND or SIGN_EXTEND will
4969 be used.
4970
4971 IN_COMPARE is non-zero if we are in a COMPARE. This means that a
4972 ZERO_EXTRACT should be built even for bits starting at bit 0.
4973
4974 MODE is the desired mode of the result (if IN_DEST == 0). */
4975
4976static rtx
4977make_extraction (mode, inner, pos, pos_rtx, len,
4978 unsignedp, in_dest, in_compare)
4979 enum machine_mode mode;
4980 rtx inner;
4981 int pos;
4982 rtx pos_rtx;
4983 int len;
4984 int unsignedp;
4985 int in_dest, in_compare;
4986{
94b4b17a
RS
4987 /* This mode describes the size of the storage area
4988 to fetch the overall value from. Within that, we
4989 ignore the POS lowest bits, etc. */
230d793d
RS
4990 enum machine_mode is_mode = GET_MODE (inner);
4991 enum machine_mode inner_mode;
4992 enum machine_mode wanted_mem_mode = byte_mode;
4993 enum machine_mode pos_mode = word_mode;
4994 enum machine_mode extraction_mode = word_mode;
4995 enum machine_mode tmode = mode_for_size (len, MODE_INT, 1);
4996 int spans_byte = 0;
4997 rtx new = 0;
8999a12e 4998 rtx orig_pos_rtx = pos_rtx;
6139ff20 4999 int orig_pos;
230d793d
RS
5000
5001 /* Get some information about INNER and get the innermost object. */
5002 if (GET_CODE (inner) == USE)
94b4b17a 5003 /* (use:SI (mem:QI foo)) stands for (mem:SI foo). */
230d793d
RS
5004 /* We don't need to adjust the position because we set up the USE
5005 to pretend that it was a full-word object. */
5006 spans_byte = 1, inner = XEXP (inner, 0);
5007 else if (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
94b4b17a
RS
5008 {
5009 /* If going from (subreg:SI (mem:QI ...)) to (mem:QI ...),
5010 consider just the QI as the memory to extract from.
5011 The subreg adds or removes high bits; its mode is
5012 irrelevant to the meaning of this extraction,
5013 since POS and LEN count from the lsb. */
5014 if (GET_CODE (SUBREG_REG (inner)) == MEM)
5015 is_mode = GET_MODE (SUBREG_REG (inner));
5016 inner = SUBREG_REG (inner);
5017 }
230d793d
RS
5018
5019 inner_mode = GET_MODE (inner);
5020
5021 if (pos_rtx && GET_CODE (pos_rtx) == CONST_INT)
8999a12e 5022 pos = INTVAL (pos_rtx), pos_rtx = 0;
230d793d
RS
5023
5024 /* See if this can be done without an extraction. We never can if the
5025 width of the field is not the same as that of some integer mode. For
5026 registers, we can only avoid the extraction if the position is at the
5027 low-order bit and this is either not in the destination or we have the
5028 appropriate STRICT_LOW_PART operation available.
5029
5030 For MEM, we can avoid an extract if the field starts on an appropriate
5031 boundary and we can change the mode of the memory reference. However,
5032 we cannot directly access the MEM if we have a USE and the underlying
5033 MEM is not TMODE. This combination means that MEM was being used in a
5034 context where bits outside its mode were being referenced; that is only
5035 valid in bit-field insns. */
5036
5037 if (tmode != BLKmode
5038 && ! (spans_byte && inner_mode != tmode)
8999a12e 5039 && ((pos_rtx == 0 && pos == 0 && GET_CODE (inner) != MEM
230d793d 5040 && (! in_dest
df62f951
RK
5041 || (GET_CODE (inner) == REG
5042 && (movstrict_optab->handlers[(int) tmode].insn_code
5043 != CODE_FOR_nothing))))
8999a12e 5044 || (GET_CODE (inner) == MEM && pos_rtx == 0
dfbe1b2f
RK
5045 && (pos
5046 % (STRICT_ALIGNMENT ? GET_MODE_ALIGNMENT (tmode)
5047 : BITS_PER_UNIT)) == 0
230d793d
RS
5048 /* We can't do this if we are widening INNER_MODE (it
5049 may not be aligned, for one thing). */
5050 && GET_MODE_BITSIZE (inner_mode) >= GET_MODE_BITSIZE (tmode)
5051 && (inner_mode == tmode
5052 || (! mode_dependent_address_p (XEXP (inner, 0))
5053 && ! MEM_VOLATILE_P (inner))))))
5054 {
230d793d
RS
5055 /* If INNER is a MEM, make a new MEM that encompasses just the desired
5056 field. If the original and current mode are the same, we need not
5057 adjust the offset. Otherwise, we do if bytes big endian.
5058
5059 If INNER is not a MEM, get a piece consisting of the just the field
df62f951 5060 of interest (in this case POS must be 0). */
230d793d
RS
5061
5062 if (GET_CODE (inner) == MEM)
5063 {
94b4b17a
RS
5064 int offset;
5065 /* POS counts from lsb, but make OFFSET count in memory order. */
5066 if (BYTES_BIG_ENDIAN)
5067 offset = (GET_MODE_BITSIZE (is_mode) - len - pos) / BITS_PER_UNIT;
5068 else
5069 offset = pos / BITS_PER_UNIT;
230d793d
RS
5070
5071 new = gen_rtx (MEM, tmode, plus_constant (XEXP (inner, 0), offset));
5072 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (inner);
5073 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (inner);
5074 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (inner);
5075 }
df62f951 5076 else if (GET_CODE (inner) == REG)
77fa0940
RK
5077 /* We can't call gen_lowpart_for_combine here since we always want
5078 a SUBREG and it would sometimes return a new hard register. */
5079 new = gen_rtx (SUBREG, tmode, inner,
5080 (WORDS_BIG_ENDIAN
3e3ea975
RS
5081 && GET_MODE_SIZE (inner_mode) > UNITS_PER_WORD
5082 ? ((GET_MODE_SIZE (inner_mode) - GET_MODE_SIZE (tmode))
6ba17bb0
RS
5083 / UNITS_PER_WORD)
5084 : 0));
230d793d 5085 else
6139ff20
RK
5086 new = force_to_mode (inner, tmode,
5087 len >= HOST_BITS_PER_WIDE_INT
5088 ? GET_MODE_MASK (tmode)
5089 : ((HOST_WIDE_INT) 1 << len) - 1,
e3d616e3 5090 NULL_RTX, 0);
230d793d
RS
5091
5092 /* If this extraction is going into the destination of a SET,
5093 make a STRICT_LOW_PART unless we made a MEM. */
5094
5095 if (in_dest)
5096 return (GET_CODE (new) == MEM ? new
77fa0940
RK
5097 : (GET_CODE (new) != SUBREG
5098 ? gen_rtx (CLOBBER, tmode, const0_rtx)
5099 : gen_rtx_combine (STRICT_LOW_PART, VOIDmode, new)));
230d793d
RS
5100
5101 /* Otherwise, sign- or zero-extend unless we already are in the
5102 proper mode. */
5103
5104 return (mode == tmode ? new
5105 : gen_rtx_combine (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
5106 mode, new));
5107 }
5108
cc471082
RS
5109 /* Unless this is a COMPARE or we have a funny memory reference,
5110 don't do anything with zero-extending field extracts starting at
5111 the low-order bit since they are simple AND operations. */
8999a12e
RK
5112 if (pos_rtx == 0 && pos == 0 && ! in_dest
5113 && ! in_compare && ! spans_byte && unsignedp)
230d793d
RS
5114 return 0;
5115
5116 /* Get the mode to use should INNER be a MEM, the mode for the position,
5117 and the mode for the result. */
5118#ifdef HAVE_insv
5119 if (in_dest)
5120 {
5121 wanted_mem_mode = insn_operand_mode[(int) CODE_FOR_insv][0];
5122 pos_mode = insn_operand_mode[(int) CODE_FOR_insv][2];
5123 extraction_mode = insn_operand_mode[(int) CODE_FOR_insv][3];
5124 }
5125#endif
5126
5127#ifdef HAVE_extzv
5128 if (! in_dest && unsignedp)
5129 {
5130 wanted_mem_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
5131 pos_mode = insn_operand_mode[(int) CODE_FOR_extzv][3];
5132 extraction_mode = insn_operand_mode[(int) CODE_FOR_extzv][0];
5133 }
5134#endif
5135
5136#ifdef HAVE_extv
5137 if (! in_dest && ! unsignedp)
5138 {
5139 wanted_mem_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
5140 pos_mode = insn_operand_mode[(int) CODE_FOR_extv][3];
5141 extraction_mode = insn_operand_mode[(int) CODE_FOR_extv][0];
5142 }
5143#endif
5144
5145 /* Never narrow an object, since that might not be safe. */
5146
5147 if (mode != VOIDmode
5148 && GET_MODE_SIZE (extraction_mode) < GET_MODE_SIZE (mode))
5149 extraction_mode = mode;
5150
5151 if (pos_rtx && GET_MODE (pos_rtx) != VOIDmode
5152 && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
5153 pos_mode = GET_MODE (pos_rtx);
5154
5155 /* If this is not from memory or we have to change the mode of memory and
5156 cannot, the desired mode is EXTRACTION_MODE. */
5157 if (GET_CODE (inner) != MEM
5158 || (inner_mode != wanted_mem_mode
5159 && (mode_dependent_address_p (XEXP (inner, 0))
5160 || MEM_VOLATILE_P (inner))))
5161 wanted_mem_mode = extraction_mode;
5162
6139ff20
RK
5163 orig_pos = pos;
5164
230d793d
RS
5165#if BITS_BIG_ENDIAN
5166 /* If position is constant, compute new position. Otherwise, build
5167 subtraction. */
8999a12e 5168 if (pos_rtx == 0)
230d793d
RS
5169 pos = (MAX (GET_MODE_BITSIZE (is_mode), GET_MODE_BITSIZE (wanted_mem_mode))
5170 - len - pos);
5171 else
5172 pos_rtx
5173 = gen_rtx_combine (MINUS, GET_MODE (pos_rtx),
5f4f0e22
CH
5174 GEN_INT (MAX (GET_MODE_BITSIZE (is_mode),
5175 GET_MODE_BITSIZE (wanted_mem_mode))
5176 - len),
5177 pos_rtx);
230d793d
RS
5178#endif
5179
5180 /* If INNER has a wider mode, make it smaller. If this is a constant
5181 extract, try to adjust the byte to point to the byte containing
5182 the value. */
5183 if (wanted_mem_mode != VOIDmode
5184 && GET_MODE_SIZE (wanted_mem_mode) < GET_MODE_SIZE (is_mode)
5185 && ((GET_CODE (inner) == MEM
5186 && (inner_mode == wanted_mem_mode
5187 || (! mode_dependent_address_p (XEXP (inner, 0))
5188 && ! MEM_VOLATILE_P (inner))))))
5189 {
5190 int offset = 0;
5191
5192 /* The computations below will be correct if the machine is big
5193 endian in both bits and bytes or little endian in bits and bytes.
5194 If it is mixed, we must adjust. */
5195
230d793d
RS
5196 /* If bytes are big endian and we had a paradoxical SUBREG, we must
5197 adjust OFFSET to compensate. */
5198#if BYTES_BIG_ENDIAN
5199 if (! spans_byte
5200 && GET_MODE_SIZE (inner_mode) < GET_MODE_SIZE (is_mode))
5201 offset -= GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (inner_mode);
5202#endif
5203
5204 /* If this is a constant position, we can move to the desired byte. */
8999a12e 5205 if (pos_rtx == 0)
230d793d
RS
5206 {
5207 offset += pos / BITS_PER_UNIT;
5208 pos %= GET_MODE_BITSIZE (wanted_mem_mode);
5209 }
5210
c6b3f1f2
JW
5211#if BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
5212 if (! spans_byte && is_mode != wanted_mem_mode)
5213 offset = (GET_MODE_SIZE (is_mode)
5214 - GET_MODE_SIZE (wanted_mem_mode) - offset);
5215#endif
5216
230d793d
RS
5217 if (offset != 0 || inner_mode != wanted_mem_mode)
5218 {
5219 rtx newmem = gen_rtx (MEM, wanted_mem_mode,
5220 plus_constant (XEXP (inner, 0), offset));
5221 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (inner);
5222 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (inner);
5223 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (inner);
5224 inner = newmem;
5225 }
5226 }
5227
5228 /* If INNER is not memory, we can always get it into the proper mode. */
5229 else if (GET_CODE (inner) != MEM)
d0ab8cd3 5230 inner = force_to_mode (inner, extraction_mode,
6139ff20
RK
5231 pos_rtx || len + orig_pos >= HOST_BITS_PER_WIDE_INT
5232 ? GET_MODE_MASK (extraction_mode)
5233 : (((HOST_WIDE_INT) 1 << len) - 1) << orig_pos,
e3d616e3 5234 NULL_RTX, 0);
230d793d
RS
5235
5236 /* Adjust mode of POS_RTX, if needed. If we want a wider mode, we
5237 have to zero extend. Otherwise, we can just use a SUBREG. */
8999a12e 5238 if (pos_rtx != 0
230d793d
RS
5239 && GET_MODE_SIZE (pos_mode) > GET_MODE_SIZE (GET_MODE (pos_rtx)))
5240 pos_rtx = gen_rtx_combine (ZERO_EXTEND, pos_mode, pos_rtx);
8999a12e 5241 else if (pos_rtx != 0
230d793d
RS
5242 && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
5243 pos_rtx = gen_lowpart_for_combine (pos_mode, pos_rtx);
5244
8999a12e
RK
5245 /* Make POS_RTX unless we already have it and it is correct. If we don't
5246 have a POS_RTX but we do have an ORIG_POS_RTX, the latter must
5247 be a CONST_INT. */
5248 if (pos_rtx == 0 && orig_pos_rtx != 0 && INTVAL (orig_pos_rtx) == pos)
5249 pos_rtx = orig_pos_rtx;
5250
5251 else if (pos_rtx == 0)
5f4f0e22 5252 pos_rtx = GEN_INT (pos);
230d793d
RS
5253
5254 /* Make the required operation. See if we can use existing rtx. */
5255 new = gen_rtx_combine (unsignedp ? ZERO_EXTRACT : SIGN_EXTRACT,
5f4f0e22 5256 extraction_mode, inner, GEN_INT (len), pos_rtx);
230d793d
RS
5257 if (! in_dest)
5258 new = gen_lowpart_for_combine (mode, new);
5259
5260 return new;
5261}
5262\f
5263/* Look at the expression rooted at X. Look for expressions
5264 equivalent to ZERO_EXTRACT, SIGN_EXTRACT, ZERO_EXTEND, SIGN_EXTEND.
5265 Form these expressions.
5266
5267 Return the new rtx, usually just X.
5268
5269 Also, for machines like the Vax that don't have logical shift insns,
5270 try to convert logical to arithmetic shift operations in cases where
5271 they are equivalent. This undoes the canonicalizations to logical
5272 shifts done elsewhere.
5273
5274 We try, as much as possible, to re-use rtl expressions to save memory.
5275
5276 IN_CODE says what kind of expression we are processing. Normally, it is
42495ca0
RK
5277 SET. In a memory address (inside a MEM, PLUS or minus, the latter two
5278 being kludges), it is MEM. When processing the arguments of a comparison
230d793d
RS
5279 or a COMPARE against zero, it is COMPARE. */
5280
5281static rtx
5282make_compound_operation (x, in_code)
5283 rtx x;
5284 enum rtx_code in_code;
5285{
5286 enum rtx_code code = GET_CODE (x);
5287 enum machine_mode mode = GET_MODE (x);
5288 int mode_width = GET_MODE_BITSIZE (mode);
5289 enum rtx_code next_code;
d0ab8cd3 5290 int i, count;
230d793d 5291 rtx new = 0;
280f58ba 5292 rtx tem;
230d793d
RS
5293 char *fmt;
5294
5295 /* Select the code to be used in recursive calls. Once we are inside an
5296 address, we stay there. If we have a comparison, set to COMPARE,
5297 but once inside, go back to our default of SET. */
5298
42495ca0 5299 next_code = (code == MEM || code == PLUS || code == MINUS ? MEM
230d793d
RS
5300 : ((code == COMPARE || GET_RTX_CLASS (code) == '<')
5301 && XEXP (x, 1) == const0_rtx) ? COMPARE
5302 : in_code == COMPARE ? SET : in_code);
5303
5304 /* Process depending on the code of this operation. If NEW is set
5305 non-zero, it will be returned. */
5306
5307 switch (code)
5308 {
5309 case ASHIFT:
5310 case LSHIFT:
5311 /* Convert shifts by constants into multiplications if inside
5312 an address. */
5313 if (in_code == MEM && GET_CODE (XEXP (x, 1)) == CONST_INT
5f4f0e22 5314 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
230d793d 5315 && INTVAL (XEXP (x, 1)) >= 0)
280f58ba
RK
5316 {
5317 new = make_compound_operation (XEXP (x, 0), next_code);
5318 new = gen_rtx_combine (MULT, mode, new,
5319 GEN_INT ((HOST_WIDE_INT) 1
5320 << INTVAL (XEXP (x, 1))));
5321 }
230d793d
RS
5322 break;
5323
5324 case AND:
5325 /* If the second operand is not a constant, we can't do anything
5326 with it. */
5327 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5328 break;
5329
5330 /* If the constant is a power of two minus one and the first operand
5331 is a logical right shift, make an extraction. */
5332 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
5333 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
280f58ba
RK
5334 {
5335 new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
5336 new = make_extraction (mode, new, 0, XEXP (XEXP (x, 0), 1), i, 1,
5337 0, in_code == COMPARE);
5338 }
dfbe1b2f 5339
230d793d
RS
5340 /* Same as previous, but for (subreg (lshiftrt ...)) in first op. */
5341 else if (GET_CODE (XEXP (x, 0)) == SUBREG
5342 && subreg_lowpart_p (XEXP (x, 0))
5343 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == LSHIFTRT
5344 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
280f58ba
RK
5345 {
5346 new = make_compound_operation (XEXP (SUBREG_REG (XEXP (x, 0)), 0),
5347 next_code);
5348 new = make_extraction (GET_MODE (SUBREG_REG (XEXP (x, 0))), new, 0,
5349 XEXP (SUBREG_REG (XEXP (x, 0)), 1), i, 1,
5350 0, in_code == COMPARE);
5351 }
c2f9f64e
JW
5352 /* Same as previous, but for (xor/ior (lshift...) (lshift...)). */
5353 else if ((GET_CODE (XEXP (x, 0)) == XOR
5354 || GET_CODE (XEXP (x, 0)) == IOR)
5355 && GET_CODE (XEXP (XEXP (x, 0), 0)) == LSHIFTRT
5356 && GET_CODE (XEXP (XEXP (x, 0), 1)) == LSHIFTRT
5357 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
5358 {
5359 /* Apply the distributive law, and then try to make extractions. */
5360 new = gen_rtx_combine (GET_CODE (XEXP (x, 0)), mode,
5361 gen_rtx (AND, mode, XEXP (XEXP (x, 0), 0),
5362 XEXP (x, 1)),
5363 gen_rtx (AND, mode, XEXP (XEXP (x, 0), 1),
5364 XEXP (x, 1)));
5365 new = make_compound_operation (new, in_code);
5366 }
a7c99304
RK
5367
5368 /* If we are have (and (rotate X C) M) and C is larger than the number
5369 of bits in M, this is an extraction. */
5370
5371 else if (GET_CODE (XEXP (x, 0)) == ROTATE
5372 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5373 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0
5374 && i <= INTVAL (XEXP (XEXP (x, 0), 1)))
280f58ba
RK
5375 {
5376 new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
5377 new = make_extraction (mode, new,
5378 (GET_MODE_BITSIZE (mode)
5379 - INTVAL (XEXP (XEXP (x, 0), 1))),
5380 NULL_RTX, i, 1, 0, in_code == COMPARE);
5381 }
a7c99304
RK
5382
5383 /* On machines without logical shifts, if the operand of the AND is
230d793d
RS
5384 a logical shift and our mask turns off all the propagated sign
5385 bits, we can replace the logical shift with an arithmetic shift. */
d0ab8cd3
RK
5386 else if (ashr_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
5387 && (lshr_optab->handlers[(int) mode].insn_code
5388 == CODE_FOR_nothing)
230d793d
RS
5389 && GET_CODE (XEXP (x, 0)) == LSHIFTRT
5390 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5391 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
5f4f0e22
CH
5392 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
5393 && mode_width <= HOST_BITS_PER_WIDE_INT)
230d793d 5394 {
5f4f0e22 5395 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
230d793d
RS
5396
5397 mask >>= INTVAL (XEXP (XEXP (x, 0), 1));
5398 if ((INTVAL (XEXP (x, 1)) & ~mask) == 0)
5399 SUBST (XEXP (x, 0),
280f58ba
RK
5400 gen_rtx_combine (ASHIFTRT, mode,
5401 make_compound_operation (XEXP (XEXP (x, 0), 0),
5402 next_code),
230d793d
RS
5403 XEXP (XEXP (x, 0), 1)));
5404 }
5405
5406 /* If the constant is one less than a power of two, this might be
5407 representable by an extraction even if no shift is present.
5408 If it doesn't end up being a ZERO_EXTEND, we will ignore it unless
5409 we are in a COMPARE. */
5410 else if ((i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
280f58ba
RK
5411 new = make_extraction (mode,
5412 make_compound_operation (XEXP (x, 0),
5413 next_code),
5414 0, NULL_RTX, i, 1, 0, in_code == COMPARE);
230d793d
RS
5415
5416 /* If we are in a comparison and this is an AND with a power of two,
5417 convert this into the appropriate bit extract. */
5418 else if (in_code == COMPARE
5419 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
280f58ba
RK
5420 new = make_extraction (mode,
5421 make_compound_operation (XEXP (x, 0),
5422 next_code),
5423 i, NULL_RTX, 1, 1, 0, 1);
230d793d
RS
5424
5425 break;
5426
5427 case LSHIFTRT:
5428 /* If the sign bit is known to be zero, replace this with an
5429 arithmetic shift. */
d0ab8cd3
RK
5430 if (ashr_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing
5431 && lshr_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
5f4f0e22 5432 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 5433 && (nonzero_bits (XEXP (x, 0), mode) & (1 << (mode_width - 1))) == 0)
230d793d 5434 {
280f58ba
RK
5435 new = gen_rtx_combine (ASHIFTRT, mode,
5436 make_compound_operation (XEXP (x, 0),
5437 next_code),
5438 XEXP (x, 1));
230d793d
RS
5439 break;
5440 }
5441
5442 /* ... fall through ... */
5443
5444 case ASHIFTRT:
5445 /* If we have (ashiftrt (ashift foo C1) C2) with C2 >= C1,
5446 this is a SIGN_EXTRACT. */
5447 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5448 && GET_CODE (XEXP (x, 0)) == ASHIFT
5449 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5450 && INTVAL (XEXP (x, 1)) >= INTVAL (XEXP (XEXP (x, 0), 1)))
280f58ba
RK
5451 {
5452 new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
5453 new = make_extraction (mode, new,
5454 (INTVAL (XEXP (x, 1))
5455 - INTVAL (XEXP (XEXP (x, 0), 1))),
5456 NULL_RTX, mode_width - INTVAL (XEXP (x, 1)),
5457 code == LSHIFTRT, 0, in_code == COMPARE);
5458 }
d0ab8cd3
RK
5459
5460 /* Similarly if we have (ashifrt (OP (ashift foo C1) C3) C2). In these
5461 cases, we are better off returning a SIGN_EXTEND of the operation. */
5462
5463 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5464 && (GET_CODE (XEXP (x, 0)) == IOR || GET_CODE (XEXP (x, 0)) == AND
5465 || GET_CODE (XEXP (x, 0)) == XOR
5466 || GET_CODE (XEXP (x, 0)) == PLUS)
5467 && GET_CODE (XEXP (XEXP (x, 0), 0)) == ASHIFT
5468 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
d0ab8cd3
RK
5469 && INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1)) < HOST_BITS_PER_WIDE_INT
5470 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
0a5cbff6
RK
5471 && 0 == (INTVAL (XEXP (XEXP (x, 0), 1))
5472 & (((HOST_WIDE_INT) 1
5473 << (MIN (INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1)),
5474 INTVAL (XEXP (x, 1)))
5475 - 1)))))
d0ab8cd3 5476 {
0a5cbff6
RK
5477 rtx c1 = XEXP (XEXP (XEXP (x, 0), 0), 1);
5478 rtx c2 = XEXP (x, 1);
5479 rtx c3 = XEXP (XEXP (x, 0), 1);
5480 HOST_WIDE_INT newop1;
5481 rtx inner = XEXP (XEXP (XEXP (x, 0), 0), 0);
5482
5483 /* If C1 > C2, INNER needs to have the shift performed on it
5484 for C1-C2 bits. */
5485 if (INTVAL (c1) > INTVAL (c2))
5486 {
5487 inner = gen_binary (ASHIFT, mode, inner,
5488 GEN_INT (INTVAL (c1) - INTVAL (c2)));
5489 c1 = c2;
5490 }
d0ab8cd3 5491
0a5cbff6
RK
5492 newop1 = INTVAL (c3) >> INTVAL (c1);
5493 new = make_compound_operation (inner,
5494 GET_CODE (XEXP (x, 0)) == PLUS
5495 ? MEM : GET_CODE (XEXP (x, 0)));
d0ab8cd3 5496 new = make_extraction (mode,
280f58ba 5497 gen_binary (GET_CODE (XEXP (x, 0)), mode, new,
d0ab8cd3 5498 GEN_INT (newop1)),
0a5cbff6 5499 INTVAL (c2) - INTVAL (c1),
239db5fc 5500 NULL_RTX, mode_width - INTVAL (c2),
d0ab8cd3
RK
5501 code == LSHIFTRT, 0, in_code == COMPARE);
5502 }
5503
d0dcc580
RK
5504 /* Similarly for (ashiftrt (neg (ashift FOO C1)) C2). */
5505 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5506 && GET_CODE (XEXP (x, 0)) == NEG
5507 && GET_CODE (XEXP (XEXP (x, 0), 0)) == ASHIFT
5508 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
5509 && INTVAL (XEXP (x, 1)) >= INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1)))
280f58ba
RK
5510 {
5511 new = make_compound_operation (XEXP (XEXP (XEXP (x, 0), 0), 0),
5512 next_code);
5513 new = make_extraction (mode,
fe2db4fb 5514 gen_unary (GET_CODE (XEXP (x, 0)), mode, new),
280f58ba
RK
5515 (INTVAL (XEXP (x, 1))
5516 - INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))),
5517 NULL_RTX, mode_width - INTVAL (XEXP (x, 1)),
5518 code == LSHIFTRT, 0, in_code == COMPARE);
5519 }
230d793d 5520 break;
280f58ba
RK
5521
5522 case SUBREG:
5523 /* Call ourselves recursively on the inner expression. If we are
5524 narrowing the object and it has a different RTL code from
5525 what it originally did, do this SUBREG as a force_to_mode. */
5526
0a5cbff6 5527 tem = make_compound_operation (SUBREG_REG (x), in_code);
280f58ba
RK
5528 if (GET_CODE (tem) != GET_CODE (SUBREG_REG (x))
5529 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (tem))
5530 && subreg_lowpart_p (x))
0a5cbff6
RK
5531 {
5532 rtx newer = force_to_mode (tem, mode,
e3d616e3 5533 GET_MODE_MASK (mode), NULL_RTX, 0);
0a5cbff6
RK
5534
5535 /* If we have something other than a SUBREG, we might have
5536 done an expansion, so rerun outselves. */
5537 if (GET_CODE (newer) != SUBREG)
5538 newer = make_compound_operation (newer, in_code);
5539
5540 return newer;
5541 }
230d793d
RS
5542 }
5543
5544 if (new)
5545 {
df62f951 5546 x = gen_lowpart_for_combine (mode, new);
230d793d
RS
5547 code = GET_CODE (x);
5548 }
5549
5550 /* Now recursively process each operand of this operation. */
5551 fmt = GET_RTX_FORMAT (code);
5552 for (i = 0; i < GET_RTX_LENGTH (code); i++)
5553 if (fmt[i] == 'e')
5554 {
5555 new = make_compound_operation (XEXP (x, i), next_code);
5556 SUBST (XEXP (x, i), new);
5557 }
5558
5559 return x;
5560}
5561\f
5562/* Given M see if it is a value that would select a field of bits
5563 within an item, but not the entire word. Return -1 if not.
5564 Otherwise, return the starting position of the field, where 0 is the
5565 low-order bit.
5566
5567 *PLEN is set to the length of the field. */
5568
5569static int
5570get_pos_from_mask (m, plen)
5f4f0e22 5571 unsigned HOST_WIDE_INT m;
230d793d
RS
5572 int *plen;
5573{
5574 /* Get the bit number of the first 1 bit from the right, -1 if none. */
5575 int pos = exact_log2 (m & - m);
5576
5577 if (pos < 0)
5578 return -1;
5579
5580 /* Now shift off the low-order zero bits and see if we have a power of
5581 two minus 1. */
5582 *plen = exact_log2 ((m >> pos) + 1);
5583
5584 if (*plen <= 0)
5585 return -1;
5586
5587 return pos;
5588}
5589\f
6139ff20
RK
5590/* See if X can be simplified knowing that we will only refer to it in
5591 MODE and will only refer to those bits that are nonzero in MASK.
5592 If other bits are being computed or if masking operations are done
5593 that select a superset of the bits in MASK, they can sometimes be
5594 ignored.
5595
5596 Return a possibly simplified expression, but always convert X to
5597 MODE. If X is a CONST_INT, AND the CONST_INT with MASK.
dfbe1b2f
RK
5598
5599 Also, if REG is non-zero and X is a register equal in value to REG,
e3d616e3
RK
5600 replace X with REG.
5601
5602 If JUST_SELECT is nonzero, don't optimize by noticing that bits in MASK
5603 are all off in X. This is used when X will be complemented, by either
5604 NOT or XOR. */
dfbe1b2f
RK
5605
5606static rtx
e3d616e3 5607force_to_mode (x, mode, mask, reg, just_select)
dfbe1b2f
RK
5608 rtx x;
5609 enum machine_mode mode;
6139ff20 5610 unsigned HOST_WIDE_INT mask;
dfbe1b2f 5611 rtx reg;
e3d616e3 5612 int just_select;
dfbe1b2f
RK
5613{
5614 enum rtx_code code = GET_CODE (x);
e3d616e3 5615 int next_select = just_select || code == XOR || code == NOT;
ef026f91
RS
5616 enum machine_mode op_mode;
5617 unsigned HOST_WIDE_INT fuller_mask, nonzero;
6139ff20
RK
5618 rtx op0, op1, temp;
5619
5620 /* We want to perform the operation is its present mode unless we know
5621 that the operation is valid in MODE, in which case we do the operation
5622 in MODE. */
ef026f91
RS
5623 op_mode = ((code_to_optab[(int) code] != 0
5624 && (code_to_optab[(int) code]->handlers[(int) mode].insn_code
5625 != CODE_FOR_nothing))
5626 ? mode : GET_MODE (x));
e3d616e3 5627
aa988991
RS
5628 /* It is not valid to do a right-shift in a narrower mode
5629 than the one it came in with. */
5630 if ((code == LSHIFTRT || code == ASHIFTRT)
5631 && GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (GET_MODE (x)))
5632 op_mode = GET_MODE (x);
ef026f91
RS
5633
5634 /* Truncate MASK to fit OP_MODE. */
5635 if (op_mode)
5636 mask &= GET_MODE_MASK (op_mode);
6139ff20
RK
5637
5638 /* When we have an arithmetic operation, or a shift whose count we
5639 do not know, we need to assume that all bit the up to the highest-order
5640 bit in MASK will be needed. This is how we form such a mask. */
ef026f91
RS
5641 if (op_mode)
5642 fuller_mask = (GET_MODE_BITSIZE (op_mode) >= HOST_BITS_PER_WIDE_INT
5643 ? GET_MODE_MASK (op_mode)
5644 : ((HOST_WIDE_INT) 1 << (floor_log2 (mask) + 1)) - 1);
5645 else
5646 fuller_mask = ~ (HOST_WIDE_INT) 0;
5647
5648 /* Determine what bits of X are guaranteed to be (non)zero. */
5649 nonzero = nonzero_bits (x, mode);
6139ff20
RK
5650
5651 /* If none of the bits in X are needed, return a zero. */
e3d616e3 5652 if (! just_select && (nonzero & mask) == 0)
6139ff20 5653 return const0_rtx;
dfbe1b2f 5654
6139ff20
RK
5655 /* If X is a CONST_INT, return a new one. Do this here since the
5656 test below will fail. */
5657 if (GET_CODE (x) == CONST_INT)
ceb7983c
RK
5658 {
5659 HOST_WIDE_INT cval = INTVAL (x) & mask;
5660 int width = GET_MODE_BITSIZE (mode);
5661
5662 /* If MODE is narrower that HOST_WIDE_INT and CVAL is a negative
5663 number, sign extend it. */
5664 if (width > 0 && width < HOST_BITS_PER_WIDE_INT
5665 && (cval & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
5666 cval |= (HOST_WIDE_INT) -1 << width;
5667
5668 return GEN_INT (cval);
5669 }
dfbe1b2f 5670
6139ff20
RK
5671 /* If X is narrower than MODE, just get X in the proper mode. */
5672 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode))
dfbe1b2f
RK
5673 return gen_lowpart_for_combine (mode, x);
5674
6139ff20
RK
5675 /* If we aren't changing the mode and all zero bits in MASK are already
5676 known to be zero in X, we need not do anything. */
5677 if (GET_MODE (x) == mode && (~ mask & nonzero) == 0)
5678 return x;
5679
dfbe1b2f
RK
5680 switch (code)
5681 {
6139ff20
RK
5682 case CLOBBER:
5683 /* If X is a (clobber (const_int)), return it since we know we are
5684 generating something that won't match. */
5685 return x;
5686
5687#if ! BITS_BIG_ENDIAN
5688 case USE:
5689 /* X is a (use (mem ..)) that was made from a bit-field extraction that
5690 spanned the boundary of the MEM. If we are now masking so it is
5691 within that boundary, we don't need the USE any more. */
5692 if ((mask & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
e3d616e3 5693 return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
6139ff20
RK
5694#endif
5695
dfbe1b2f
RK
5696 case SIGN_EXTEND:
5697 case ZERO_EXTEND:
5698 case ZERO_EXTRACT:
5699 case SIGN_EXTRACT:
5700 x = expand_compound_operation (x);
5701 if (GET_CODE (x) != code)
e3d616e3 5702 return force_to_mode (x, mode, mask, reg, next_select);
dfbe1b2f
RK
5703 break;
5704
5705 case REG:
5706 if (reg != 0 && (rtx_equal_p (get_last_value (reg), x)
5707 || rtx_equal_p (reg, get_last_value (x))))
5708 x = reg;
5709 break;
5710
dfbe1b2f 5711 case SUBREG:
6139ff20
RK
5712 if (subreg_lowpart_p (x)
5713 /* We can ignore the effect this SUBREG if it narrows the mode or,
8baf60bb
RK
5714 on machines where register operations are performed on the full
5715 word, if the constant masks to zero all the bits the mode
5716 doesn't have. */
6139ff20
RK
5717 && ((GET_MODE_SIZE (GET_MODE (x))
5718 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
8baf60bb 5719#ifdef WORD_REGISTER_OPERATIONS
6139ff20
RK
5720 || (0 == (mask
5721 & GET_MODE_MASK (GET_MODE (x))
5722 & ~ GET_MODE_MASK (GET_MODE (SUBREG_REG (x)))))
5723#endif
5724 ))
e3d616e3 5725 return force_to_mode (SUBREG_REG (x), mode, mask, reg, next_select);
dfbe1b2f
RK
5726 break;
5727
5728 case AND:
6139ff20
RK
5729 /* If this is an AND with a constant, convert it into an AND
5730 whose constant is the AND of that constant with MASK. If it
5731 remains an AND of MASK, delete it since it is redundant. */
dfbe1b2f 5732
6139ff20
RK
5733 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5734 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
dfbe1b2f 5735 {
6139ff20
RK
5736 x = simplify_and_const_int (x, op_mode, XEXP (x, 0),
5737 mask & INTVAL (XEXP (x, 1)));
dfbe1b2f
RK
5738
5739 /* If X is still an AND, see if it is an AND with a mask that
5740 is just some low-order bits. If so, and it is BITS wide (it
5741 can't be wider), we don't need it. */
5742
5743 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
6139ff20 5744 && INTVAL (XEXP (x, 1)) == mask)
dfbe1b2f 5745 x = XEXP (x, 0);
d0ab8cd3
RK
5746
5747 break;
dfbe1b2f
RK
5748 }
5749
6139ff20 5750 goto binop;
dfbe1b2f
RK
5751
5752 case PLUS:
6139ff20
RK
5753 /* In (and (plus FOO C1) M), if M is a mask that just turns off
5754 low-order bits (as in an alignment operation) and FOO is already
5755 aligned to that boundary, mask C1 to that boundary as well.
5756 This may eliminate that PLUS and, later, the AND. */
5757 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5758 && exact_log2 (- mask) >= 0
5759 && (nonzero_bits (XEXP (x, 0), mode) & ~ mask) == 0
5760 && (INTVAL (XEXP (x, 1)) & ~ mask) != 0)
5761 return force_to_mode (plus_constant (XEXP (x, 0),
5762 INTVAL (XEXP (x, 1)) & mask),
e3d616e3 5763 mode, mask, reg, next_select);
6139ff20
RK
5764
5765 /* ... fall through ... */
5766
dfbe1b2f
RK
5767 case MINUS:
5768 case MULT:
6139ff20
RK
5769 /* For PLUS, MINUS and MULT, we need any bits less significant than the
5770 most significant bit in MASK since carries from those bits will
5771 affect the bits we are interested in. */
5772 mask = fuller_mask;
5773 goto binop;
5774
dfbe1b2f
RK
5775 case IOR:
5776 case XOR:
6139ff20
RK
5777 /* If X is (ior (lshiftrt FOO C1) C2), try to commute the IOR and
5778 LSHIFTRT so we end up with an (and (lshiftrt (ior ...) ...) ...)
5779 operation which may be a bitfield extraction. Ensure that the
5780 constant we form is not wider than the mode of X. */
5781
5782 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
5783 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5784 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
5785 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
5786 && GET_CODE (XEXP (x, 1)) == CONST_INT
5787 && ((INTVAL (XEXP (XEXP (x, 0), 1))
5788 + floor_log2 (INTVAL (XEXP (x, 1))))
5789 < GET_MODE_BITSIZE (GET_MODE (x)))
5790 && (INTVAL (XEXP (x, 1))
5791 & ~ nonzero_bits (XEXP (x, 0), GET_MODE (x)) == 0))
5792 {
5793 temp = GEN_INT ((INTVAL (XEXP (x, 1)) & mask)
5794 << INTVAL (XEXP (XEXP (x, 0), 1)));
5795 temp = gen_binary (GET_CODE (x), GET_MODE (x),
5796 XEXP (XEXP (x, 0), 0), temp);
5797 x = gen_binary (LSHIFTRT, GET_MODE (x), temp, XEXP (x, 1));
e3d616e3 5798 return force_to_mode (x, mode, mask, reg, next_select);
6139ff20
RK
5799 }
5800
5801 binop:
dfbe1b2f 5802 /* For most binary operations, just propagate into the operation and
6139ff20
RK
5803 change the mode if we have an operation of that mode. */
5804
e3d616e3
RK
5805 op0 = gen_lowpart_for_combine (op_mode,
5806 force_to_mode (XEXP (x, 0), mode, mask,
5807 reg, next_select));
5808 op1 = gen_lowpart_for_combine (op_mode,
5809 force_to_mode (XEXP (x, 1), mode, mask,
5810 reg, next_select));
6139ff20 5811
2dd484ed
RK
5812 /* If OP1 is a CONST_INT and X is an IOR or XOR, clear bits outside
5813 MASK since OP1 might have been sign-extended but we never want
5814 to turn on extra bits, since combine might have previously relied
5815 on them being off. */
5816 if (GET_CODE (op1) == CONST_INT && (code == IOR || code == XOR)
5817 && (INTVAL (op1) & mask) != 0)
5818 op1 = GEN_INT (INTVAL (op1) & mask);
5819
6139ff20
RK
5820 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
5821 x = gen_binary (code, op_mode, op0, op1);
d0ab8cd3 5822 break;
dfbe1b2f
RK
5823
5824 case ASHIFT:
5825 case LSHIFT:
5826 /* For left shifts, do the same, but just for the first operand.
f6785026
RK
5827 However, we cannot do anything with shifts where we cannot
5828 guarantee that the counts are smaller than the size of the mode
5829 because such a count will have a different meaning in a
6139ff20 5830 wider mode. */
f6785026
RK
5831
5832 if (! (GET_CODE (XEXP (x, 1)) == CONST_INT
6139ff20 5833 && INTVAL (XEXP (x, 1)) >= 0
f6785026
RK
5834 && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (mode))
5835 && ! (GET_MODE (XEXP (x, 1)) != VOIDmode
5836 && (nonzero_bits (XEXP (x, 1), GET_MODE (XEXP (x, 1)))
adb7a1cb 5837 < (unsigned HOST_WIDE_INT) GET_MODE_BITSIZE (mode))))
f6785026
RK
5838 break;
5839
6139ff20
RK
5840 /* If the shift count is a constant and we can do arithmetic in
5841 the mode of the shift, refine which bits we need. Otherwise, use the
5842 conservative form of the mask. */
5843 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5844 && INTVAL (XEXP (x, 1)) >= 0
5845 && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (op_mode)
5846 && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
5847 mask >>= INTVAL (XEXP (x, 1));
5848 else
5849 mask = fuller_mask;
5850
5851 op0 = gen_lowpart_for_combine (op_mode,
5852 force_to_mode (XEXP (x, 0), op_mode,
e3d616e3 5853 mask, reg, next_select));
6139ff20
RK
5854
5855 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
5856 x = gen_binary (code, op_mode, op0, XEXP (x, 1));
d0ab8cd3 5857 break;
dfbe1b2f
RK
5858
5859 case LSHIFTRT:
1347292b
JW
5860 /* Here we can only do something if the shift count is a constant,
5861 this shift constant is valid for the host, and we can do arithmetic
5862 in OP_MODE. */
dfbe1b2f
RK
5863
5864 if (GET_CODE (XEXP (x, 1)) == CONST_INT
1347292b 5865 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
6139ff20 5866 && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
d0ab8cd3 5867 {
6139ff20
RK
5868 rtx inner = XEXP (x, 0);
5869
5870 /* Select the mask of the bits we need for the shift operand. */
5871 mask <<= INTVAL (XEXP (x, 1));
d0ab8cd3 5872
6139ff20
RK
5873 /* We can only change the mode of the shift if we can do arithmetic
5874 in the mode of the shift and MASK is no wider than the width of
5875 OP_MODE. */
5876 if (GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT
5877 || (mask & ~ GET_MODE_MASK (op_mode)) != 0)
d0ab8cd3
RK
5878 op_mode = GET_MODE (x);
5879
e3d616e3 5880 inner = force_to_mode (inner, op_mode, mask, reg, next_select);
6139ff20
RK
5881
5882 if (GET_MODE (x) != op_mode || inner != XEXP (x, 0))
5883 x = gen_binary (LSHIFTRT, op_mode, inner, XEXP (x, 1));
d0ab8cd3 5884 }
6139ff20
RK
5885
5886 /* If we have (and (lshiftrt FOO C1) C2) where the combination of the
5887 shift and AND produces only copies of the sign bit (C2 is one less
5888 than a power of two), we can do this with just a shift. */
5889
5890 if (GET_CODE (x) == LSHIFTRT
5891 && GET_CODE (XEXP (x, 1)) == CONST_INT
5892 && ((INTVAL (XEXP (x, 1))
5893 + num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0))))
5894 >= GET_MODE_BITSIZE (GET_MODE (x)))
5895 && exact_log2 (mask + 1) >= 0
5896 && (num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
5897 >= exact_log2 (mask + 1)))
5898 x = gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0),
5899 GEN_INT (GET_MODE_BITSIZE (GET_MODE (x))
5900 - exact_log2 (mask + 1)));
d0ab8cd3
RK
5901 break;
5902
5903 case ASHIFTRT:
6139ff20
RK
5904 /* If we are just looking for the sign bit, we don't need this shift at
5905 all, even if it has a variable count. */
5906 if (mask == ((HOST_WIDE_INT) 1
5907 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1)))
e3d616e3 5908 return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
6139ff20
RK
5909
5910 /* If this is a shift by a constant, get a mask that contains those bits
5911 that are not copies of the sign bit. We then have two cases: If
5912 MASK only includes those bits, this can be a logical shift, which may
5913 allow simplifications. If MASK is a single-bit field not within
5914 those bits, we are requesting a copy of the sign bit and hence can
5915 shift the sign bit to the appropriate location. */
5916
5917 if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) >= 0
5918 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
5919 {
5920 int i = -1;
5921
5922 nonzero = GET_MODE_MASK (GET_MODE (x));
5923 nonzero >>= INTVAL (XEXP (x, 1));
5924
5925 if ((mask & ~ nonzero) == 0
5926 || (i = exact_log2 (mask)) >= 0)
5927 {
5928 x = simplify_shift_const
5929 (x, LSHIFTRT, GET_MODE (x), XEXP (x, 0),
5930 i < 0 ? INTVAL (XEXP (x, 1))
5931 : GET_MODE_BITSIZE (GET_MODE (x)) - 1 - i);
5932
5933 if (GET_CODE (x) != ASHIFTRT)
e3d616e3 5934 return force_to_mode (x, mode, mask, reg, next_select);
6139ff20
RK
5935 }
5936 }
5937
5938 /* If MASK is 1, convert this to a LSHIFTRT. This can be done
5939 even if the shift count isn't a constant. */
5940 if (mask == 1)
5941 x = gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0), XEXP (x, 1));
5942
d0ab8cd3 5943 /* If this is a sign-extension operation that just affects bits
4c002f29
RK
5944 we don't care about, remove it. Be sure the call above returned
5945 something that is still a shift. */
d0ab8cd3 5946
4c002f29
RK
5947 if ((GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ASHIFTRT)
5948 && GET_CODE (XEXP (x, 1)) == CONST_INT
d0ab8cd3 5949 && INTVAL (XEXP (x, 1)) >= 0
6139ff20
RK
5950 && (INTVAL (XEXP (x, 1))
5951 <= GET_MODE_BITSIZE (GET_MODE (x)) - (floor_log2 (mask) + 1))
d0ab8cd3
RK
5952 && GET_CODE (XEXP (x, 0)) == ASHIFT
5953 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5954 && INTVAL (XEXP (XEXP (x, 0), 1)) == INTVAL (XEXP (x, 1)))
e3d616e3
RK
5955 return force_to_mode (XEXP (XEXP (x, 0), 0), mode, mask,
5956 reg, next_select);
6139ff20 5957
dfbe1b2f
RK
5958 break;
5959
6139ff20
RK
5960 case ROTATE:
5961 case ROTATERT:
5962 /* If the shift count is constant and we can do computations
5963 in the mode of X, compute where the bits we care about are.
5964 Otherwise, we can't do anything. Don't change the mode of
5965 the shift or propagate MODE into the shift, though. */
5966 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5967 && INTVAL (XEXP (x, 1)) >= 0)
5968 {
5969 temp = simplify_binary_operation (code == ROTATE ? ROTATERT : ROTATE,
5970 GET_MODE (x), GEN_INT (mask),
5971 XEXP (x, 1));
5972 if (temp)
5973 SUBST (XEXP (x, 0),
5974 force_to_mode (XEXP (x, 0), GET_MODE (x),
e3d616e3 5975 INTVAL (temp), reg, next_select));
6139ff20
RK
5976 }
5977 break;
5978
dfbe1b2f 5979 case NEG:
6139ff20
RK
5980 /* We need any bits less significant than the most significant bit in
5981 MASK since carries from those bits will affect the bits we are
5982 interested in. */
5983 mask = fuller_mask;
5984 goto unop;
5985
dfbe1b2f 5986 case NOT:
6139ff20
RK
5987 /* (not FOO) is (xor FOO CONST), so if FOO is an LSHIFTRT, we can do the
5988 same as the XOR case above. Ensure that the constant we form is not
5989 wider than the mode of X. */
5990
5991 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
5992 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5993 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
5994 && (INTVAL (XEXP (XEXP (x, 0), 1)) + floor_log2 (mask)
5995 < GET_MODE_BITSIZE (GET_MODE (x)))
5996 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT)
5997 {
5998 temp = GEN_INT (mask << INTVAL (XEXP (XEXP (x, 0), 1)));
5999 temp = gen_binary (XOR, GET_MODE (x), XEXP (XEXP (x, 0), 0), temp);
6000 x = gen_binary (LSHIFTRT, GET_MODE (x), temp, XEXP (XEXP (x, 0), 1));
6001
e3d616e3 6002 return force_to_mode (x, mode, mask, reg, next_select);
6139ff20
RK
6003 }
6004
6005 unop:
e3d616e3
RK
6006 op0 = gen_lowpart_for_combine (op_mode,
6007 force_to_mode (XEXP (x, 0), mode, mask,
6008 reg, next_select));
6139ff20
RK
6009 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
6010 x = gen_unary (code, op_mode, op0);
6011 break;
6012
6013 case NE:
6014 /* (and (ne FOO 0) CONST) can be (and FOO CONST) if CONST is included
6015 in STORE_FLAG_VALUE and FOO has no bits that might be nonzero not
6016 in CONST. */
6017 if ((mask & ~ STORE_FLAG_VALUE) == 0 && XEXP (x, 0) == const0_rtx
6018 && (nonzero_bits (XEXP (x, 0), mode) & ~ mask) == 0)
e3d616e3 6019 return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
6139ff20 6020
d0ab8cd3
RK
6021 break;
6022
6023 case IF_THEN_ELSE:
6024 /* We have no way of knowing if the IF_THEN_ELSE can itself be
6025 written in a narrower mode. We play it safe and do not do so. */
6026
6027 SUBST (XEXP (x, 1),
6028 gen_lowpart_for_combine (GET_MODE (x),
6029 force_to_mode (XEXP (x, 1), mode,
e3d616e3 6030 mask, reg, next_select)));
d0ab8cd3
RK
6031 SUBST (XEXP (x, 2),
6032 gen_lowpart_for_combine (GET_MODE (x),
6033 force_to_mode (XEXP (x, 2), mode,
e3d616e3 6034 mask, reg,next_select)));
d0ab8cd3 6035 break;
dfbe1b2f
RK
6036 }
6037
d0ab8cd3 6038 /* Ensure we return a value of the proper mode. */
dfbe1b2f
RK
6039 return gen_lowpart_for_combine (mode, x);
6040}
6041\f
abe6e52f
RK
6042/* Return nonzero if X is an expression that has one of two values depending on
6043 whether some other value is zero or nonzero. In that case, we return the
6044 value that is being tested, *PTRUE is set to the value if the rtx being
6045 returned has a nonzero value, and *PFALSE is set to the other alternative.
6046
6047 If we return zero, we set *PTRUE and *PFALSE to X. */
6048
6049static rtx
6050if_then_else_cond (x, ptrue, pfalse)
6051 rtx x;
6052 rtx *ptrue, *pfalse;
6053{
6054 enum machine_mode mode = GET_MODE (x);
6055 enum rtx_code code = GET_CODE (x);
6056 int size = GET_MODE_BITSIZE (mode);
6057 rtx cond0, cond1, true0, true1, false0, false1;
6058 unsigned HOST_WIDE_INT nz;
6059
6060 /* If this is a unary operation whose operand has one of two values, apply
6061 our opcode to compute those values. */
6062 if (GET_RTX_CLASS (code) == '1'
6063 && (cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0)) != 0)
6064 {
6065 *ptrue = gen_unary (code, mode, true0);
6066 *pfalse = gen_unary (code, mode, false0);
6067 return cond0;
6068 }
6069
6070 /* If this is a binary operation, see if either side has only one of two
6071 values. If either one does or if both do and they are conditional on
6072 the same value, compute the new true and false values. */
6073 else if (GET_RTX_CLASS (code) == 'c' || GET_RTX_CLASS (code) == '2'
6074 || GET_RTX_CLASS (code) == '<')
6075 {
6076 cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0);
6077 cond1 = if_then_else_cond (XEXP (x, 1), &true1, &false1);
6078
6079 if ((cond0 != 0 || cond1 != 0)
6080 && ! (cond0 != 0 && cond1 != 0 && ! rtx_equal_p (cond0, cond1)))
6081 {
6082 *ptrue = gen_binary (code, mode, true0, true1);
6083 *pfalse = gen_binary (code, mode, false0, false1);
6084 return cond0 ? cond0 : cond1;
6085 }
6086 }
6087
6088 else if (code == IF_THEN_ELSE)
6089 {
6090 /* If we have IF_THEN_ELSE already, extract the condition and
6091 canonicalize it if it is NE or EQ. */
6092 cond0 = XEXP (x, 0);
6093 *ptrue = XEXP (x, 1), *pfalse = XEXP (x, 2);
6094 if (GET_CODE (cond0) == NE && XEXP (cond0, 1) == const0_rtx)
6095 return XEXP (cond0, 0);
6096 else if (GET_CODE (cond0) == EQ && XEXP (cond0, 1) == const0_rtx)
6097 {
6098 *ptrue = XEXP (x, 2), *pfalse = XEXP (x, 1);
6099 return XEXP (cond0, 0);
6100 }
6101 else
6102 return cond0;
6103 }
6104
6105 /* If X is a normal SUBREG with both inner and outer modes integral,
6106 we can narrow both the true and false values of the inner expression,
6107 if there is a condition. */
6108 else if (code == SUBREG && GET_MODE_CLASS (mode) == MODE_INT
6109 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
6110 && GET_MODE_SIZE (mode) <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
6111 && 0 != (cond0 = if_then_else_cond (SUBREG_REG (x),
6112 &true0, &false0)))
6113 {
6114 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (x));
6115 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (inner_mode);
6116
6117 *ptrue = force_to_mode (true0, inner_mode, mask, NULL_RTX, 0);
6118 *pfalse = force_to_mode (false0, inner_mode, mask, NULL_RTX, 0);
6119 return cond0;
6120 }
6121
6122 /* If X is a constant, this isn't special and will cause confusions
6123 if we treat it as such. Likewise if it is equivalent to a constant. */
6124 else if (CONSTANT_P (x)
6125 || ((cond0 = get_last_value (x)) != 0 && CONSTANT_P (cond0)))
6126 ;
6127
6128 /* If X is known to be either 0 or -1, those are the true and
6129 false values when testing X. */
6130 else if (num_sign_bit_copies (x, mode) == size)
6131 {
6132 *ptrue = constm1_rtx, *pfalse = const0_rtx;
6133 return x;
6134 }
6135
6136 /* Likewise for 0 or a single bit. */
6137 else if (exact_log2 (nz = nonzero_bits (x, mode)) >= 0)
6138 {
6139 *ptrue = GEN_INT (nz), *pfalse = const0_rtx;
6140 return x;
6141 }
6142
6143 /* Otherwise fail; show no condition with true and false values the same. */
6144 *ptrue = *pfalse = x;
6145 return 0;
6146}
6147\f
1a26b032
RK
6148/* Return the value of expression X given the fact that condition COND
6149 is known to be true when applied to REG as its first operand and VAL
6150 as its second. X is known to not be shared and so can be modified in
6151 place.
6152
6153 We only handle the simplest cases, and specifically those cases that
6154 arise with IF_THEN_ELSE expressions. */
6155
6156static rtx
6157known_cond (x, cond, reg, val)
6158 rtx x;
6159 enum rtx_code cond;
6160 rtx reg, val;
6161{
6162 enum rtx_code code = GET_CODE (x);
6163 rtx new, temp;
6164 char *fmt;
6165 int i, j;
6166
6167 if (side_effects_p (x))
6168 return x;
6169
6170 if (cond == EQ && rtx_equal_p (x, reg))
6171 return val;
6172
6173 /* If X is (abs REG) and we know something about REG's relationship
6174 with zero, we may be able to simplify this. */
6175
6176 if (code == ABS && rtx_equal_p (XEXP (x, 0), reg) && val == const0_rtx)
6177 switch (cond)
6178 {
6179 case GE: case GT: case EQ:
6180 return XEXP (x, 0);
6181 case LT: case LE:
6182 return gen_unary (NEG, GET_MODE (XEXP (x, 0)), XEXP (x, 0));
6183 }
6184
6185 /* The only other cases we handle are MIN, MAX, and comparisons if the
6186 operands are the same as REG and VAL. */
6187
6188 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
6189 {
6190 if (rtx_equal_p (XEXP (x, 0), val))
6191 cond = swap_condition (cond), temp = val, val = reg, reg = temp;
6192
6193 if (rtx_equal_p (XEXP (x, 0), reg) && rtx_equal_p (XEXP (x, 1), val))
6194 {
6195 if (GET_RTX_CLASS (code) == '<')
6196 return (comparison_dominates_p (cond, code) ? const_true_rtx
6197 : (comparison_dominates_p (cond,
6198 reverse_condition (code))
6199 ? const0_rtx : x));
6200
6201 else if (code == SMAX || code == SMIN
6202 || code == UMIN || code == UMAX)
6203 {
6204 int unsignedp = (code == UMIN || code == UMAX);
6205
6206 if (code == SMAX || code == UMAX)
6207 cond = reverse_condition (cond);
6208
6209 switch (cond)
6210 {
6211 case GE: case GT:
6212 return unsignedp ? x : XEXP (x, 1);
6213 case LE: case LT:
6214 return unsignedp ? x : XEXP (x, 0);
6215 case GEU: case GTU:
6216 return unsignedp ? XEXP (x, 1) : x;
6217 case LEU: case LTU:
6218 return unsignedp ? XEXP (x, 0) : x;
6219 }
6220 }
6221 }
6222 }
6223
6224 fmt = GET_RTX_FORMAT (code);
6225 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6226 {
6227 if (fmt[i] == 'e')
6228 SUBST (XEXP (x, i), known_cond (XEXP (x, i), cond, reg, val));
6229 else if (fmt[i] == 'E')
6230 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6231 SUBST (XVECEXP (x, i, j), known_cond (XVECEXP (x, i, j),
6232 cond, reg, val));
6233 }
6234
6235 return x;
6236}
6237\f
230d793d
RS
6238/* See if X, a SET operation, can be rewritten as a bit-field assignment.
6239 Return that assignment if so.
6240
6241 We only handle the most common cases. */
6242
6243static rtx
6244make_field_assignment (x)
6245 rtx x;
6246{
6247 rtx dest = SET_DEST (x);
6248 rtx src = SET_SRC (x);
dfbe1b2f
RK
6249 rtx ourdest;
6250 rtx assign;
5f4f0e22
CH
6251 HOST_WIDE_INT c1;
6252 int pos, len;
dfbe1b2f
RK
6253 rtx other;
6254 enum machine_mode mode;
230d793d
RS
6255
6256 /* If SRC was (and (not (ashift (const_int 1) POS)) DEST), this is
6257 a clear of a one-bit field. We will have changed it to
6258 (and (rotate (const_int -2) POS) DEST), so check for that. Also check
6259 for a SUBREG. */
6260
6261 if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == ROTATE
6262 && GET_CODE (XEXP (XEXP (src, 0), 0)) == CONST_INT
6263 && INTVAL (XEXP (XEXP (src, 0), 0)) == -2
dfbe1b2f
RK
6264 && (rtx_equal_p (dest, XEXP (src, 1))
6265 || rtx_equal_p (dest, get_last_value (XEXP (src, 1)))
6266 || rtx_equal_p (get_last_value (dest), XEXP (src, 1))))
230d793d 6267 {
8999a12e 6268 assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
230d793d 6269 1, 1, 1, 0);
dfbe1b2f 6270 return gen_rtx (SET, VOIDmode, assign, const0_rtx);
230d793d
RS
6271 }
6272
6273 else if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == SUBREG
6274 && subreg_lowpart_p (XEXP (src, 0))
6275 && (GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
6276 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (src, 0)))))
6277 && GET_CODE (SUBREG_REG (XEXP (src, 0))) == ROTATE
6278 && INTVAL (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == -2
dfbe1b2f
RK
6279 && (rtx_equal_p (dest, XEXP (src, 1))
6280 || rtx_equal_p (dest, get_last_value (XEXP (src, 1)))
6281 || rtx_equal_p (get_last_value (dest), XEXP (src, 1))))
230d793d 6282 {
8999a12e 6283 assign = make_extraction (VOIDmode, dest, 0,
230d793d
RS
6284 XEXP (SUBREG_REG (XEXP (src, 0)), 1),
6285 1, 1, 1, 0);
dfbe1b2f 6286 return gen_rtx (SET, VOIDmode, assign, const0_rtx);
230d793d
RS
6287 }
6288
6289 /* If SRC is (ior (ashift (const_int 1) POS DEST)), this is a set of a
6290 one-bit field. */
6291 else if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == ASHIFT
6292 && XEXP (XEXP (src, 0), 0) == const1_rtx
dfbe1b2f
RK
6293 && (rtx_equal_p (dest, XEXP (src, 1))
6294 || rtx_equal_p (dest, get_last_value (XEXP (src, 1)))
6295 || rtx_equal_p (get_last_value (dest), XEXP (src, 1))))
230d793d 6296 {
8999a12e 6297 assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
230d793d 6298 1, 1, 1, 0);
dfbe1b2f 6299 return gen_rtx (SET, VOIDmode, assign, const1_rtx);
230d793d
RS
6300 }
6301
dfbe1b2f
RK
6302 /* The other case we handle is assignments into a constant-position
6303 field. They look like (ior (and DEST C1) OTHER). If C1 represents
6304 a mask that has all one bits except for a group of zero bits and
6305 OTHER is known to have zeros where C1 has ones, this is such an
6306 assignment. Compute the position and length from C1. Shift OTHER
6307 to the appropriate position, force it to the required mode, and
6308 make the extraction. Check for the AND in both operands. */
6309
6310 if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == AND
6311 && GET_CODE (XEXP (XEXP (src, 0), 1)) == CONST_INT
6312 && (rtx_equal_p (XEXP (XEXP (src, 0), 0), dest)
6313 || rtx_equal_p (XEXP (XEXP (src, 0), 0), get_last_value (dest))
6314 || rtx_equal_p (get_last_value (XEXP (XEXP (src, 0), 1)), dest)))
6315 c1 = INTVAL (XEXP (XEXP (src, 0), 1)), other = XEXP (src, 1);
6316 else if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 1)) == AND
6317 && GET_CODE (XEXP (XEXP (src, 1), 1)) == CONST_INT
6318 && (rtx_equal_p (XEXP (XEXP (src, 1), 0), dest)
6319 || rtx_equal_p (XEXP (XEXP (src, 1), 0), get_last_value (dest))
6320 || rtx_equal_p (get_last_value (XEXP (XEXP (src, 1), 0)),
6321 dest)))
6322 c1 = INTVAL (XEXP (XEXP (src, 1), 1)), other = XEXP (src, 0);
6323 else
6324 return x;
230d793d 6325
c2f9f64e 6326 pos = get_pos_from_mask (c1 ^ GET_MODE_MASK (GET_MODE (dest)), &len);
dfbe1b2f 6327 if (pos < 0 || pos + len > GET_MODE_BITSIZE (GET_MODE (dest))
ac49a949 6328 || (GET_MODE_BITSIZE (GET_MODE (other)) <= HOST_BITS_PER_WIDE_INT
951553af 6329 && (c1 & nonzero_bits (other, GET_MODE (other))) != 0))
dfbe1b2f 6330 return x;
230d793d 6331
5f4f0e22 6332 assign = make_extraction (VOIDmode, dest, pos, NULL_RTX, len, 1, 1, 0);
230d793d 6333
dfbe1b2f
RK
6334 /* The mode to use for the source is the mode of the assignment, or of
6335 what is inside a possible STRICT_LOW_PART. */
6336 mode = (GET_CODE (assign) == STRICT_LOW_PART
6337 ? GET_MODE (XEXP (assign, 0)) : GET_MODE (assign));
230d793d 6338
dfbe1b2f
RK
6339 /* Shift OTHER right POS places and make it the source, restricting it
6340 to the proper length and mode. */
230d793d 6341
5f4f0e22
CH
6342 src = force_to_mode (simplify_shift_const (NULL_RTX, LSHIFTRT,
6343 GET_MODE (src), other, pos),
6139ff20
RK
6344 mode,
6345 GET_MODE_BITSIZE (mode) >= HOST_BITS_PER_WIDE_INT
6346 ? GET_MODE_MASK (mode)
6347 : ((HOST_WIDE_INT) 1 << len) - 1,
e3d616e3 6348 dest, 0);
230d793d 6349
dfbe1b2f 6350 return gen_rtx_combine (SET, VOIDmode, assign, src);
230d793d
RS
6351}
6352\f
6353/* See if X is of the form (+ (* a c) (* b c)) and convert to (* (+ a b) c)
6354 if so. */
6355
6356static rtx
6357apply_distributive_law (x)
6358 rtx x;
6359{
6360 enum rtx_code code = GET_CODE (x);
6361 rtx lhs, rhs, other;
6362 rtx tem;
6363 enum rtx_code inner_code;
6364
d8a8a4da
RS
6365 /* Distributivity is not true for floating point.
6366 It can change the value. So don't do it.
6367 -- rms and moshier@world.std.com. */
3ad2180a 6368 if (FLOAT_MODE_P (GET_MODE (x)))
d8a8a4da
RS
6369 return x;
6370
230d793d
RS
6371 /* The outer operation can only be one of the following: */
6372 if (code != IOR && code != AND && code != XOR
6373 && code != PLUS && code != MINUS)
6374 return x;
6375
6376 lhs = XEXP (x, 0), rhs = XEXP (x, 1);
6377
dfbe1b2f 6378 /* If either operand is a primitive we can't do anything, so get out fast. */
230d793d 6379 if (GET_RTX_CLASS (GET_CODE (lhs)) == 'o'
dfbe1b2f 6380 || GET_RTX_CLASS (GET_CODE (rhs)) == 'o')
230d793d
RS
6381 return x;
6382
6383 lhs = expand_compound_operation (lhs);
6384 rhs = expand_compound_operation (rhs);
6385 inner_code = GET_CODE (lhs);
6386 if (inner_code != GET_CODE (rhs))
6387 return x;
6388
6389 /* See if the inner and outer operations distribute. */
6390 switch (inner_code)
6391 {
6392 case LSHIFTRT:
6393 case ASHIFTRT:
6394 case AND:
6395 case IOR:
6396 /* These all distribute except over PLUS. */
6397 if (code == PLUS || code == MINUS)
6398 return x;
6399 break;
6400
6401 case MULT:
6402 if (code != PLUS && code != MINUS)
6403 return x;
6404 break;
6405
6406 case ASHIFT:
6407 case LSHIFT:
6408 /* These are also multiplies, so they distribute over everything. */
6409 break;
6410
6411 case SUBREG:
dfbe1b2f
RK
6412 /* Non-paradoxical SUBREGs distributes over all operations, provided
6413 the inner modes and word numbers are the same, this is an extraction
2b4bd1bc
JW
6414 of a low-order part, we don't convert an fp operation to int or
6415 vice versa, and we would not be converting a single-word
dfbe1b2f 6416 operation into a multi-word operation. The latter test is not
2b4bd1bc 6417 required, but it prevents generating unneeded multi-word operations.
dfbe1b2f
RK
6418 Some of the previous tests are redundant given the latter test, but
6419 are retained because they are required for correctness.
6420
6421 We produce the result slightly differently in this case. */
6422
6423 if (GET_MODE (SUBREG_REG (lhs)) != GET_MODE (SUBREG_REG (rhs))
6424 || SUBREG_WORD (lhs) != SUBREG_WORD (rhs)
6425 || ! subreg_lowpart_p (lhs)
2b4bd1bc
JW
6426 || (GET_MODE_CLASS (GET_MODE (lhs))
6427 != GET_MODE_CLASS (GET_MODE (SUBREG_REG (lhs))))
dfbe1b2f
RK
6428 || (GET_MODE_SIZE (GET_MODE (lhs))
6429 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))))
6430 || GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))) > UNITS_PER_WORD)
230d793d
RS
6431 return x;
6432
6433 tem = gen_binary (code, GET_MODE (SUBREG_REG (lhs)),
6434 SUBREG_REG (lhs), SUBREG_REG (rhs));
6435 return gen_lowpart_for_combine (GET_MODE (x), tem);
6436
6437 default:
6438 return x;
6439 }
6440
6441 /* Set LHS and RHS to the inner operands (A and B in the example
6442 above) and set OTHER to the common operand (C in the example).
6443 These is only one way to do this unless the inner operation is
6444 commutative. */
6445 if (GET_RTX_CLASS (inner_code) == 'c'
6446 && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 0)))
6447 other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 1);
6448 else if (GET_RTX_CLASS (inner_code) == 'c'
6449 && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 1)))
6450 other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 0);
6451 else if (GET_RTX_CLASS (inner_code) == 'c'
6452 && rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 0)))
6453 other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 1);
6454 else if (rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 1)))
6455 other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 0);
6456 else
6457 return x;
6458
6459 /* Form the new inner operation, seeing if it simplifies first. */
6460 tem = gen_binary (code, GET_MODE (x), lhs, rhs);
6461
6462 /* There is one exception to the general way of distributing:
6463 (a ^ b) | (a ^ c) -> (~a) & (b ^ c) */
6464 if (code == XOR && inner_code == IOR)
6465 {
6466 inner_code = AND;
6467 other = gen_unary (NOT, GET_MODE (x), other);
6468 }
6469
6470 /* We may be able to continuing distributing the result, so call
6471 ourselves recursively on the inner operation before forming the
6472 outer operation, which we return. */
6473 return gen_binary (inner_code, GET_MODE (x),
6474 apply_distributive_law (tem), other);
6475}
6476\f
6477/* We have X, a logical `and' of VAROP with the constant CONSTOP, to be done
6478 in MODE.
6479
6480 Return an equivalent form, if different from X. Otherwise, return X. If
6481 X is zero, we are to always construct the equivalent form. */
6482
6483static rtx
6484simplify_and_const_int (x, mode, varop, constop)
6485 rtx x;
6486 enum machine_mode mode;
6487 rtx varop;
5f4f0e22 6488 unsigned HOST_WIDE_INT constop;
230d793d
RS
6489{
6490 register enum machine_mode tmode;
6491 register rtx temp;
951553af 6492 unsigned HOST_WIDE_INT nonzero;
42301240 6493 int i;
230d793d 6494
6139ff20
RK
6495 /* Simplify VAROP knowing that we will be only looking at some of the
6496 bits in it. */
e3d616e3 6497 varop = force_to_mode (varop, mode, constop, NULL_RTX, 0);
230d793d 6498
6139ff20
RK
6499 /* If VAROP is a CLOBBER, we will fail so return it; if it is a
6500 CONST_INT, we are done. */
6501 if (GET_CODE (varop) == CLOBBER || GET_CODE (varop) == CONST_INT)
6502 return varop;
230d793d 6503
fc06d7aa
RK
6504 /* See what bits may be nonzero in VAROP. Unlike the general case of
6505 a call to nonzero_bits, here we don't care about bits outside
6506 MODE. */
6507
6508 nonzero = nonzero_bits (varop, mode) & GET_MODE_MASK (mode);
230d793d
RS
6509
6510 /* Turn off all bits in the constant that are known to already be zero.
951553af 6511 Thus, if the AND isn't needed at all, we will have CONSTOP == NONZERO_BITS
230d793d
RS
6512 which is tested below. */
6513
951553af 6514 constop &= nonzero;
230d793d
RS
6515
6516 /* If we don't have any bits left, return zero. */
6517 if (constop == 0)
6518 return const0_rtx;
6519
42301240
RK
6520 /* If VAROP is a NEG of something known to be zero or 1 and CONSTOP is
6521 a power of two, we can replace this with a ASHIFT. */
6522 if (GET_CODE (varop) == NEG && nonzero_bits (XEXP (varop, 0), mode) == 1
6523 && (i = exact_log2 (constop)) >= 0)
6524 return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (varop, 0), i);
6525
6139ff20
RK
6526 /* If VAROP is an IOR or XOR, apply the AND to both branches of the IOR
6527 or XOR, then try to apply the distributive law. This may eliminate
6528 operations if either branch can be simplified because of the AND.
6529 It may also make some cases more complex, but those cases probably
6530 won't match a pattern either with or without this. */
6531
6532 if (GET_CODE (varop) == IOR || GET_CODE (varop) == XOR)
6533 return
6534 gen_lowpart_for_combine
6535 (mode,
6536 apply_distributive_law
6537 (gen_binary (GET_CODE (varop), GET_MODE (varop),
6538 simplify_and_const_int (NULL_RTX, GET_MODE (varop),
6539 XEXP (varop, 0), constop),
6540 simplify_and_const_int (NULL_RTX, GET_MODE (varop),
6541 XEXP (varop, 1), constop))));
6542
230d793d
RS
6543 /* Get VAROP in MODE. Try to get a SUBREG if not. Don't make a new SUBREG
6544 if we already had one (just check for the simplest cases). */
6545 if (x && GET_CODE (XEXP (x, 0)) == SUBREG
6546 && GET_MODE (XEXP (x, 0)) == mode
6547 && SUBREG_REG (XEXP (x, 0)) == varop)
6548 varop = XEXP (x, 0);
6549 else
6550 varop = gen_lowpart_for_combine (mode, varop);
6551
6552 /* If we can't make the SUBREG, try to return what we were given. */
6553 if (GET_CODE (varop) == CLOBBER)
6554 return x ? x : varop;
6555
6556 /* If we are only masking insignificant bits, return VAROP. */
951553af 6557 if (constop == nonzero)
230d793d
RS
6558 x = varop;
6559
6560 /* Otherwise, return an AND. See how much, if any, of X we can use. */
6561 else if (x == 0 || GET_CODE (x) != AND || GET_MODE (x) != mode)
6139ff20 6562 x = gen_binary (AND, mode, varop, GEN_INT (constop));
230d793d
RS
6563
6564 else
6565 {
6566 if (GET_CODE (XEXP (x, 1)) != CONST_INT
6567 || INTVAL (XEXP (x, 1)) != constop)
5f4f0e22 6568 SUBST (XEXP (x, 1), GEN_INT (constop));
230d793d
RS
6569
6570 SUBST (XEXP (x, 0), varop);
6571 }
6572
6573 return x;
6574}
6575\f
6576/* Given an expression, X, compute which bits in X can be non-zero.
6577 We don't care about bits outside of those defined in MODE.
6578
6579 For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
6580 a shift, AND, or zero_extract, we can do better. */
6581
5f4f0e22 6582static unsigned HOST_WIDE_INT
951553af 6583nonzero_bits (x, mode)
230d793d
RS
6584 rtx x;
6585 enum machine_mode mode;
6586{
951553af
RK
6587 unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
6588 unsigned HOST_WIDE_INT inner_nz;
230d793d
RS
6589 enum rtx_code code;
6590 int mode_width = GET_MODE_BITSIZE (mode);
6591 rtx tem;
6592
6593 /* If X is wider than MODE, use its mode instead. */
6594 if (GET_MODE_BITSIZE (GET_MODE (x)) > mode_width)
6595 {
6596 mode = GET_MODE (x);
951553af 6597 nonzero = GET_MODE_MASK (mode);
230d793d
RS
6598 mode_width = GET_MODE_BITSIZE (mode);
6599 }
6600
5f4f0e22 6601 if (mode_width > HOST_BITS_PER_WIDE_INT)
230d793d
RS
6602 /* Our only callers in this case look for single bit values. So
6603 just return the mode mask. Those tests will then be false. */
951553af 6604 return nonzero;
230d793d 6605
8baf60bb 6606#ifndef WORD_REGISTER_OPERATIONS
c6965c0f 6607 /* If MODE is wider than X, but both are a single word for both the host
0840fd91
RK
6608 and target machines, we can compute this from which bits of the
6609 object might be nonzero in its own mode, taking into account the fact
6610 that on many CISC machines, accessing an object in a wider mode
6611 causes the high-order bits to become undefined. So they are
6612 not known to be zero. */
6613
6614 if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode
6615 && GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD
6616 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
c6965c0f 6617 && GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (GET_MODE (x)))
0840fd91
RK
6618 {
6619 nonzero &= nonzero_bits (x, GET_MODE (x));
6620 nonzero |= GET_MODE_MASK (mode) & ~ GET_MODE_MASK (GET_MODE (x));
6621 return nonzero;
6622 }
6623#endif
6624
230d793d
RS
6625 code = GET_CODE (x);
6626 switch (code)
6627 {
6628 case REG:
6629#ifdef STACK_BOUNDARY
6630 /* If this is the stack pointer, we may know something about its
6631 alignment. If PUSH_ROUNDING is defined, it is possible for the
6632 stack to be momentarily aligned only to that amount, so we pick
6633 the least alignment. */
6634
6635 if (x == stack_pointer_rtx)
6636 {
6637 int sp_alignment = STACK_BOUNDARY / BITS_PER_UNIT;
6638
6639#ifdef PUSH_ROUNDING
6640 sp_alignment = MIN (PUSH_ROUNDING (1), sp_alignment);
6641#endif
6642
951553af 6643 return nonzero & ~ (sp_alignment - 1);
230d793d
RS
6644 }
6645#endif
6646
55310dad
RK
6647 /* If X is a register whose nonzero bits value is current, use it.
6648 Otherwise, if X is a register whose value we can find, use that
6649 value. Otherwise, use the previously-computed global nonzero bits
6650 for this register. */
6651
6652 if (reg_last_set_value[REGNO (x)] != 0
6653 && reg_last_set_mode[REGNO (x)] == mode
6654 && (reg_n_sets[REGNO (x)] == 1
6655 || reg_last_set_label[REGNO (x)] == label_tick)
6656 && INSN_CUID (reg_last_set[REGNO (x)]) < subst_low_cuid)
6657 return reg_last_set_nonzero_bits[REGNO (x)];
230d793d
RS
6658
6659 tem = get_last_value (x);
9afa3d54 6660
230d793d 6661 if (tem)
9afa3d54
RK
6662 {
6663#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
6664 /* If X is narrower than MODE and TEM is a non-negative
6665 constant that would appear negative in the mode of X,
6666 sign-extend it for use in reg_nonzero_bits because some
6667 machines (maybe most) will actually do the sign-extension
6668 and this is the conservative approach.
6669
6670 ??? For 2.5, try to tighten up the MD files in this regard
6671 instead of this kludge. */
6672
6673 if (GET_MODE_BITSIZE (GET_MODE (x)) < mode_width
6674 && GET_CODE (tem) == CONST_INT
6675 && INTVAL (tem) > 0
6676 && 0 != (INTVAL (tem)
6677 & ((HOST_WIDE_INT) 1
9e69be8c 6678 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
9afa3d54
RK
6679 tem = GEN_INT (INTVAL (tem)
6680 | ((HOST_WIDE_INT) (-1)
6681 << GET_MODE_BITSIZE (GET_MODE (x))));
6682#endif
6683 return nonzero_bits (tem, mode);
6684 }
951553af
RK
6685 else if (nonzero_sign_valid && reg_nonzero_bits[REGNO (x)])
6686 return reg_nonzero_bits[REGNO (x)] & nonzero;
230d793d 6687 else
951553af 6688 return nonzero;
230d793d
RS
6689
6690 case CONST_INT:
9afa3d54
RK
6691#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
6692 /* If X is negative in MODE, sign-extend the value. */
9e69be8c
RK
6693 if (INTVAL (x) > 0 && mode_width < BITS_PER_WORD
6694 && 0 != (INTVAL (x) & ((HOST_WIDE_INT) 1 << (mode_width - 1))))
6695 return (INTVAL (x) | ((HOST_WIDE_INT) (-1) << mode_width));
9afa3d54
RK
6696#endif
6697
230d793d
RS
6698 return INTVAL (x);
6699
230d793d 6700 case MEM:
8baf60bb 6701#ifdef LOAD_EXTEND_OP
230d793d
RS
6702 /* In many, if not most, RISC machines, reading a byte from memory
6703 zeros the rest of the register. Noticing that fact saves a lot
6704 of extra zero-extends. */
8baf60bb
RK
6705 if (LOAD_EXTEND_OP (GET_MODE (x)) == ZERO_EXTEND)
6706 nonzero &= GET_MODE_MASK (GET_MODE (x));
230d793d 6707#endif
8baf60bb 6708 break;
230d793d 6709
230d793d
RS
6710 case EQ: case NE:
6711 case GT: case GTU:
6712 case LT: case LTU:
6713 case GE: case GEU:
6714 case LE: case LEU:
3f508eca 6715
c6965c0f
RK
6716 /* If this produces an integer result, we know which bits are set.
6717 Code here used to clear bits outside the mode of X, but that is
6718 now done above. */
230d793d 6719
c6965c0f
RK
6720 if (GET_MODE_CLASS (mode) == MODE_INT
6721 && mode_width <= HOST_BITS_PER_WIDE_INT)
6722 nonzero = STORE_FLAG_VALUE;
230d793d 6723 break;
230d793d 6724
230d793d 6725 case NEG:
d0ab8cd3
RK
6726 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
6727 == GET_MODE_BITSIZE (GET_MODE (x)))
951553af 6728 nonzero = 1;
230d793d
RS
6729
6730 if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
951553af 6731 nonzero |= (GET_MODE_MASK (mode) & ~ GET_MODE_MASK (GET_MODE (x)));
230d793d 6732 break;
d0ab8cd3
RK
6733
6734 case ABS:
6735 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
6736 == GET_MODE_BITSIZE (GET_MODE (x)))
951553af 6737 nonzero = 1;
d0ab8cd3 6738 break;
230d793d
RS
6739
6740 case TRUNCATE:
951553af 6741 nonzero &= (nonzero_bits (XEXP (x, 0), mode) & GET_MODE_MASK (mode));
230d793d
RS
6742 break;
6743
6744 case ZERO_EXTEND:
951553af 6745 nonzero &= nonzero_bits (XEXP (x, 0), mode);
230d793d 6746 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
951553af 6747 nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
230d793d
RS
6748 break;
6749
6750 case SIGN_EXTEND:
6751 /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
6752 Otherwise, show all the bits in the outer mode but not the inner
6753 may be non-zero. */
951553af 6754 inner_nz = nonzero_bits (XEXP (x, 0), mode);
230d793d
RS
6755 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
6756 {
951553af
RK
6757 inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
6758 if (inner_nz &
5f4f0e22
CH
6759 (((HOST_WIDE_INT) 1
6760 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1))))
951553af 6761 inner_nz |= (GET_MODE_MASK (mode)
230d793d
RS
6762 & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
6763 }
6764
951553af 6765 nonzero &= inner_nz;
230d793d
RS
6766 break;
6767
6768 case AND:
951553af
RK
6769 nonzero &= (nonzero_bits (XEXP (x, 0), mode)
6770 & nonzero_bits (XEXP (x, 1), mode));
230d793d
RS
6771 break;
6772
d0ab8cd3
RK
6773 case XOR: case IOR:
6774 case UMIN: case UMAX: case SMIN: case SMAX:
951553af
RK
6775 nonzero &= (nonzero_bits (XEXP (x, 0), mode)
6776 | nonzero_bits (XEXP (x, 1), mode));
230d793d
RS
6777 break;
6778
6779 case PLUS: case MINUS:
6780 case MULT:
6781 case DIV: case UDIV:
6782 case MOD: case UMOD:
6783 /* We can apply the rules of arithmetic to compute the number of
6784 high- and low-order zero bits of these operations. We start by
6785 computing the width (position of the highest-order non-zero bit)
6786 and the number of low-order zero bits for each value. */
6787 {
951553af
RK
6788 unsigned HOST_WIDE_INT nz0 = nonzero_bits (XEXP (x, 0), mode);
6789 unsigned HOST_WIDE_INT nz1 = nonzero_bits (XEXP (x, 1), mode);
6790 int width0 = floor_log2 (nz0) + 1;
6791 int width1 = floor_log2 (nz1) + 1;
6792 int low0 = floor_log2 (nz0 & -nz0);
6793 int low1 = floor_log2 (nz1 & -nz1);
6794 int op0_maybe_minusp = (nz0 & ((HOST_WIDE_INT) 1 << (mode_width - 1)));
6795 int op1_maybe_minusp = (nz1 & ((HOST_WIDE_INT) 1 << (mode_width - 1)));
230d793d
RS
6796 int result_width = mode_width;
6797 int result_low = 0;
6798
6799 switch (code)
6800 {
6801 case PLUS:
6802 result_width = MAX (width0, width1) + 1;
6803 result_low = MIN (low0, low1);
6804 break;
6805 case MINUS:
6806 result_low = MIN (low0, low1);
6807 break;
6808 case MULT:
6809 result_width = width0 + width1;
6810 result_low = low0 + low1;
6811 break;
6812 case DIV:
6813 if (! op0_maybe_minusp && ! op1_maybe_minusp)
6814 result_width = width0;
6815 break;
6816 case UDIV:
6817 result_width = width0;
6818 break;
6819 case MOD:
6820 if (! op0_maybe_minusp && ! op1_maybe_minusp)
6821 result_width = MIN (width0, width1);
6822 result_low = MIN (low0, low1);
6823 break;
6824 case UMOD:
6825 result_width = MIN (width0, width1);
6826 result_low = MIN (low0, low1);
6827 break;
6828 }
6829
6830 if (result_width < mode_width)
951553af 6831 nonzero &= ((HOST_WIDE_INT) 1 << result_width) - 1;
230d793d
RS
6832
6833 if (result_low > 0)
951553af 6834 nonzero &= ~ (((HOST_WIDE_INT) 1 << result_low) - 1);
230d793d
RS
6835 }
6836 break;
6837
6838 case ZERO_EXTRACT:
6839 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5f4f0e22 6840 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
951553af 6841 nonzero &= ((HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
230d793d
RS
6842 break;
6843
6844 case SUBREG:
c3c2cb37
RK
6845 /* If this is a SUBREG formed for a promoted variable that has
6846 been zero-extended, we know that at least the high-order bits
6847 are zero, though others might be too. */
6848
6849 if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x))
951553af
RK
6850 nonzero = (GET_MODE_MASK (GET_MODE (x))
6851 & nonzero_bits (SUBREG_REG (x), GET_MODE (x)));
c3c2cb37 6852
230d793d
RS
6853 /* If the inner mode is a single word for both the host and target
6854 machines, we can compute this from which bits of the inner
951553af 6855 object might be nonzero. */
230d793d 6856 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) <= BITS_PER_WORD
5f4f0e22
CH
6857 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
6858 <= HOST_BITS_PER_WIDE_INT))
230d793d 6859 {
951553af 6860 nonzero &= nonzero_bits (SUBREG_REG (x), mode);
8baf60bb
RK
6861
6862#ifndef WORD_REGISTER_OPERATIONS
230d793d
RS
6863 /* On many CISC machines, accessing an object in a wider mode
6864 causes the high-order bits to become undefined. So they are
6865 not known to be zero. */
6866 if (GET_MODE_SIZE (GET_MODE (x))
6867 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
951553af
RK
6868 nonzero |= (GET_MODE_MASK (GET_MODE (x))
6869 & ~ GET_MODE_MASK (GET_MODE (SUBREG_REG (x))));
230d793d
RS
6870#endif
6871 }
6872 break;
6873
6874 case ASHIFTRT:
6875 case LSHIFTRT:
6876 case ASHIFT:
6877 case LSHIFT:
6878 case ROTATE:
951553af 6879 /* The nonzero bits are in two classes: any bits within MODE
230d793d 6880 that aren't in GET_MODE (x) are always significant. The rest of the
951553af 6881 nonzero bits are those that are significant in the operand of
230d793d
RS
6882 the shift when shifted the appropriate number of bits. This
6883 shows that high-order bits are cleared by the right shift and
6884 low-order bits by left shifts. */
6885 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6886 && INTVAL (XEXP (x, 1)) >= 0
5f4f0e22 6887 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
230d793d
RS
6888 {
6889 enum machine_mode inner_mode = GET_MODE (x);
6890 int width = GET_MODE_BITSIZE (inner_mode);
6891 int count = INTVAL (XEXP (x, 1));
5f4f0e22 6892 unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
951553af
RK
6893 unsigned HOST_WIDE_INT op_nonzero = nonzero_bits (XEXP (x, 0), mode);
6894 unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask;
5f4f0e22 6895 unsigned HOST_WIDE_INT outer = 0;
230d793d
RS
6896
6897 if (mode_width > width)
951553af 6898 outer = (op_nonzero & nonzero & ~ mode_mask);
230d793d
RS
6899
6900 if (code == LSHIFTRT)
6901 inner >>= count;
6902 else if (code == ASHIFTRT)
6903 {
6904 inner >>= count;
6905
951553af 6906 /* If the sign bit may have been nonzero before the shift, we
230d793d 6907 need to mark all the places it could have been copied to
951553af 6908 by the shift as possibly nonzero. */
5f4f0e22
CH
6909 if (inner & ((HOST_WIDE_INT) 1 << (width - 1 - count)))
6910 inner |= (((HOST_WIDE_INT) 1 << count) - 1) << (width - count);
230d793d
RS
6911 }
6912 else if (code == LSHIFT || code == ASHIFT)
6913 inner <<= count;
6914 else
6915 inner = ((inner << (count % width)
6916 | (inner >> (width - (count % width)))) & mode_mask);
6917
951553af 6918 nonzero &= (outer | inner);
230d793d
RS
6919 }
6920 break;
6921
6922 case FFS:
6923 /* This is at most the number of bits in the mode. */
951553af 6924 nonzero = ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width) + 1)) - 1;
230d793d 6925 break;
d0ab8cd3
RK
6926
6927 case IF_THEN_ELSE:
951553af
RK
6928 nonzero &= (nonzero_bits (XEXP (x, 1), mode)
6929 | nonzero_bits (XEXP (x, 2), mode));
d0ab8cd3 6930 break;
230d793d
RS
6931 }
6932
951553af 6933 return nonzero;
230d793d
RS
6934}
6935\f
d0ab8cd3 6936/* Return the number of bits at the high-order end of X that are known to
5109d49f
RK
6937 be equal to the sign bit. X will be used in mode MODE; if MODE is
6938 VOIDmode, X will be used in its own mode. The returned value will always
6939 be between 1 and the number of bits in MODE. */
d0ab8cd3
RK
6940
6941static int
6942num_sign_bit_copies (x, mode)
6943 rtx x;
6944 enum machine_mode mode;
6945{
6946 enum rtx_code code = GET_CODE (x);
6947 int bitwidth;
6948 int num0, num1, result;
951553af 6949 unsigned HOST_WIDE_INT nonzero;
d0ab8cd3
RK
6950 rtx tem;
6951
6952 /* If we weren't given a mode, use the mode of X. If the mode is still
6953 VOIDmode, we don't know anything. */
6954
6955 if (mode == VOIDmode)
6956 mode = GET_MODE (x);
6957
6958 if (mode == VOIDmode)
6752e8d2 6959 return 1;
d0ab8cd3
RK
6960
6961 bitwidth = GET_MODE_BITSIZE (mode);
6962
312def2e
RK
6963 /* For a smaller object, just ignore the high bits. */
6964 if (bitwidth < GET_MODE_BITSIZE (GET_MODE (x)))
6965 return MAX (1, (num_sign_bit_copies (x, GET_MODE (x))
6966 - (GET_MODE_BITSIZE (GET_MODE (x)) - bitwidth)));
6967
0c314d1a
RK
6968#ifndef WORD_REGISTER_OPERATIONS
6969 /* If this machine does not do all register operations on the entire
6970 register and MODE is wider than the mode of X, we can say nothing
6971 at all about the high-order bits. */
6972 if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_BITSIZE (GET_MODE (x)))
6973 return 1;
6974#endif
6975
d0ab8cd3
RK
6976 switch (code)
6977 {
6978 case REG:
55310dad
RK
6979
6980 if (reg_last_set_value[REGNO (x)] != 0
6981 && reg_last_set_mode[REGNO (x)] == mode
6982 && (reg_n_sets[REGNO (x)] == 1
6983 || reg_last_set_label[REGNO (x)] == label_tick)
6984 && INSN_CUID (reg_last_set[REGNO (x)]) < subst_low_cuid)
6985 return reg_last_set_sign_bit_copies[REGNO (x)];
d0ab8cd3
RK
6986
6987 tem = get_last_value (x);
6988 if (tem != 0)
6989 return num_sign_bit_copies (tem, mode);
55310dad
RK
6990
6991 if (nonzero_sign_valid && reg_sign_bit_copies[REGNO (x)] != 0)
6992 return reg_sign_bit_copies[REGNO (x)];
d0ab8cd3
RK
6993 break;
6994
457816e2 6995 case MEM:
8baf60bb 6996#ifdef LOAD_EXTEND_OP
457816e2 6997 /* Some RISC machines sign-extend all loads of smaller than a word. */
8baf60bb
RK
6998 if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND)
6999 return MAX (1, bitwidth - GET_MODE_BITSIZE (GET_MODE (x)) + 1);
457816e2 7000#endif
8baf60bb 7001 break;
457816e2 7002
d0ab8cd3
RK
7003 case CONST_INT:
7004 /* If the constant is negative, take its 1's complement and remask.
7005 Then see how many zero bits we have. */
951553af 7006 nonzero = INTVAL (x) & GET_MODE_MASK (mode);
ac49a949 7007 if (bitwidth <= HOST_BITS_PER_WIDE_INT
951553af
RK
7008 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
7009 nonzero = (~ nonzero) & GET_MODE_MASK (mode);
d0ab8cd3 7010
951553af 7011 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
d0ab8cd3
RK
7012
7013 case SUBREG:
c3c2cb37
RK
7014 /* If this is a SUBREG for a promoted object that is sign-extended
7015 and we are looking at it in a wider mode, we know that at least the
7016 high-order bits are known to be sign bit copies. */
7017
7018 if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x))
dc3e17ad
RK
7019 return MAX (bitwidth - GET_MODE_BITSIZE (GET_MODE (x)) + 1,
7020 num_sign_bit_copies (SUBREG_REG (x), mode));
c3c2cb37 7021
d0ab8cd3
RK
7022 /* For a smaller object, just ignore the high bits. */
7023 if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
7024 {
7025 num0 = num_sign_bit_copies (SUBREG_REG (x), VOIDmode);
7026 return MAX (1, (num0
7027 - (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
7028 - bitwidth)));
7029 }
457816e2 7030
8baf60bb
RK
7031#ifdef WORD_REGISTER_OPERATIONS
7032 /* For paradoxical SUBREGs on machines where all register operations
7033 affect the entire register, just look inside. Note that we are
7034 passing MODE to the recursive call, so the number of sign bit copies
7035 will remain relative to that mode, not the inner mode. */
457816e2
RK
7036
7037 if (GET_MODE_SIZE (GET_MODE (x))
7038 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
7039 return num_sign_bit_copies (SUBREG_REG (x), mode);
7040#endif
d0ab8cd3
RK
7041 break;
7042
7043 case SIGN_EXTRACT:
7044 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
7045 return MAX (1, bitwidth - INTVAL (XEXP (x, 1)));
7046 break;
7047
7048 case SIGN_EXTEND:
7049 return (bitwidth - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
7050 + num_sign_bit_copies (XEXP (x, 0), VOIDmode));
7051
7052 case TRUNCATE:
7053 /* For a smaller object, just ignore the high bits. */
7054 num0 = num_sign_bit_copies (XEXP (x, 0), VOIDmode);
7055 return MAX (1, (num0 - (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
7056 - bitwidth)));
7057
7058 case NOT:
7059 return num_sign_bit_copies (XEXP (x, 0), mode);
7060
7061 case ROTATE: case ROTATERT:
7062 /* If we are rotating left by a number of bits less than the number
7063 of sign bit copies, we can just subtract that amount from the
7064 number. */
7065 if (GET_CODE (XEXP (x, 1)) == CONST_INT
7066 && INTVAL (XEXP (x, 1)) >= 0 && INTVAL (XEXP (x, 1)) < bitwidth)
7067 {
7068 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7069 return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
7070 : bitwidth - INTVAL (XEXP (x, 1))));
7071 }
7072 break;
7073
7074 case NEG:
7075 /* In general, this subtracts one sign bit copy. But if the value
7076 is known to be positive, the number of sign bit copies is the
951553af
RK
7077 same as that of the input. Finally, if the input has just one bit
7078 that might be nonzero, all the bits are copies of the sign bit. */
7079 nonzero = nonzero_bits (XEXP (x, 0), mode);
7080 if (nonzero == 1)
d0ab8cd3
RK
7081 return bitwidth;
7082
7083 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7084 if (num0 > 1
ac49a949 7085 && bitwidth <= HOST_BITS_PER_WIDE_INT
951553af 7086 && (((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero))
d0ab8cd3
RK
7087 num0--;
7088
7089 return num0;
7090
7091 case IOR: case AND: case XOR:
7092 case SMIN: case SMAX: case UMIN: case UMAX:
7093 /* Logical operations will preserve the number of sign-bit copies.
7094 MIN and MAX operations always return one of the operands. */
7095 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7096 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
7097 return MIN (num0, num1);
7098
7099 case PLUS: case MINUS:
7100 /* For addition and subtraction, we can have a 1-bit carry. However,
7101 if we are subtracting 1 from a positive number, there will not
7102 be such a carry. Furthermore, if the positive number is known to
7103 be 0 or 1, we know the result is either -1 or 0. */
7104
3e3ea975 7105 if (code == PLUS && XEXP (x, 1) == constm1_rtx
9295e6af 7106 && bitwidth <= HOST_BITS_PER_WIDE_INT)
d0ab8cd3 7107 {
951553af
RK
7108 nonzero = nonzero_bits (XEXP (x, 0), mode);
7109 if ((((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0)
7110 return (nonzero == 1 || nonzero == 0 ? bitwidth
7111 : bitwidth - floor_log2 (nonzero) - 1);
d0ab8cd3
RK
7112 }
7113
7114 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7115 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
7116 return MAX (1, MIN (num0, num1) - 1);
7117
7118 case MULT:
7119 /* The number of bits of the product is the sum of the number of
7120 bits of both terms. However, unless one of the terms if known
7121 to be positive, we must allow for an additional bit since negating
7122 a negative number can remove one sign bit copy. */
7123
7124 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7125 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
7126
7127 result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
7128 if (result > 0
9295e6af 7129 && bitwidth <= HOST_BITS_PER_WIDE_INT
951553af 7130 && ((nonzero_bits (XEXP (x, 0), mode)
d0ab8cd3 7131 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
951553af 7132 && (nonzero_bits (XEXP (x, 1), mode)
d0ab8cd3
RK
7133 & ((HOST_WIDE_INT) 1 << (bitwidth - 1)) != 0))
7134 result--;
7135
7136 return MAX (1, result);
7137
7138 case UDIV:
7139 /* The result must be <= the first operand. */
7140 return num_sign_bit_copies (XEXP (x, 0), mode);
7141
7142 case UMOD:
7143 /* The result must be <= the scond operand. */
7144 return num_sign_bit_copies (XEXP (x, 1), mode);
7145
7146 case DIV:
7147 /* Similar to unsigned division, except that we have to worry about
7148 the case where the divisor is negative, in which case we have
7149 to add 1. */
7150 result = num_sign_bit_copies (XEXP (x, 0), mode);
7151 if (result > 1
ac49a949 7152 && bitwidth <= HOST_BITS_PER_WIDE_INT
951553af 7153 && (nonzero_bits (XEXP (x, 1), mode)
d0ab8cd3
RK
7154 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
7155 result --;
7156
7157 return result;
7158
7159 case MOD:
7160 result = num_sign_bit_copies (XEXP (x, 1), mode);
7161 if (result > 1
ac49a949 7162 && bitwidth <= HOST_BITS_PER_WIDE_INT
951553af 7163 && (nonzero_bits (XEXP (x, 1), mode)
d0ab8cd3
RK
7164 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
7165 result --;
7166
7167 return result;
7168
7169 case ASHIFTRT:
7170 /* Shifts by a constant add to the number of bits equal to the
7171 sign bit. */
7172 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7173 if (GET_CODE (XEXP (x, 1)) == CONST_INT
7174 && INTVAL (XEXP (x, 1)) > 0)
7175 num0 = MIN (bitwidth, num0 + INTVAL (XEXP (x, 1)));
7176
7177 return num0;
7178
7179 case ASHIFT:
7180 case LSHIFT:
7181 /* Left shifts destroy copies. */
7182 if (GET_CODE (XEXP (x, 1)) != CONST_INT
7183 || INTVAL (XEXP (x, 1)) < 0
7184 || INTVAL (XEXP (x, 1)) >= bitwidth)
7185 return 1;
7186
7187 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7188 return MAX (1, num0 - INTVAL (XEXP (x, 1)));
7189
7190 case IF_THEN_ELSE:
7191 num0 = num_sign_bit_copies (XEXP (x, 1), mode);
7192 num1 = num_sign_bit_copies (XEXP (x, 2), mode);
7193 return MIN (num0, num1);
7194
7195#if STORE_FLAG_VALUE == -1
7196 case EQ: case NE: case GE: case GT: case LE: case LT:
7197 case GEU: case GTU: case LEU: case LTU:
7198 return bitwidth;
7199#endif
7200 }
7201
7202 /* If we haven't been able to figure it out by one of the above rules,
7203 see if some of the high-order bits are known to be zero. If so,
ac49a949
RS
7204 count those bits and return one less than that amount. If we can't
7205 safely compute the mask for this mode, always return BITWIDTH. */
7206
7207 if (bitwidth > HOST_BITS_PER_WIDE_INT)
6752e8d2 7208 return 1;
d0ab8cd3 7209
951553af 7210 nonzero = nonzero_bits (x, mode);
df6f4086 7211 return (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))
951553af 7212 ? 1 : bitwidth - floor_log2 (nonzero) - 1);
d0ab8cd3
RK
7213}
7214\f
1a26b032
RK
7215/* Return the number of "extended" bits there are in X, when interpreted
7216 as a quantity in MODE whose signedness is indicated by UNSIGNEDP. For
7217 unsigned quantities, this is the number of high-order zero bits.
7218 For signed quantities, this is the number of copies of the sign bit
7219 minus 1. In both case, this function returns the number of "spare"
7220 bits. For example, if two quantities for which this function returns
7221 at least 1 are added, the addition is known not to overflow.
7222
7223 This function will always return 0 unless called during combine, which
7224 implies that it must be called from a define_split. */
7225
7226int
7227extended_count (x, mode, unsignedp)
7228 rtx x;
7229 enum machine_mode mode;
7230 int unsignedp;
7231{
951553af 7232 if (nonzero_sign_valid == 0)
1a26b032
RK
7233 return 0;
7234
7235 return (unsignedp
ac49a949
RS
7236 ? (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7237 && (GET_MODE_BITSIZE (mode) - 1
951553af 7238 - floor_log2 (nonzero_bits (x, mode))))
1a26b032
RK
7239 : num_sign_bit_copies (x, mode) - 1);
7240}
7241\f
230d793d
RS
7242/* This function is called from `simplify_shift_const' to merge two
7243 outer operations. Specifically, we have already found that we need
7244 to perform operation *POP0 with constant *PCONST0 at the outermost
7245 position. We would now like to also perform OP1 with constant CONST1
7246 (with *POP0 being done last).
7247
7248 Return 1 if we can do the operation and update *POP0 and *PCONST0 with
7249 the resulting operation. *PCOMP_P is set to 1 if we would need to
7250 complement the innermost operand, otherwise it is unchanged.
7251
7252 MODE is the mode in which the operation will be done. No bits outside
7253 the width of this mode matter. It is assumed that the width of this mode
5f4f0e22 7254 is smaller than or equal to HOST_BITS_PER_WIDE_INT.
230d793d
RS
7255
7256 If *POP0 or OP1 are NIL, it means no operation is required. Only NEG, PLUS,
7257 IOR, XOR, and AND are supported. We may set *POP0 to SET if the proper
7258 result is simply *PCONST0.
7259
7260 If the resulting operation cannot be expressed as one operation, we
7261 return 0 and do not change *POP0, *PCONST0, and *PCOMP_P. */
7262
7263static int
7264merge_outer_ops (pop0, pconst0, op1, const1, mode, pcomp_p)
7265 enum rtx_code *pop0;
5f4f0e22 7266 HOST_WIDE_INT *pconst0;
230d793d 7267 enum rtx_code op1;
5f4f0e22 7268 HOST_WIDE_INT const1;
230d793d
RS
7269 enum machine_mode mode;
7270 int *pcomp_p;
7271{
7272 enum rtx_code op0 = *pop0;
5f4f0e22 7273 HOST_WIDE_INT const0 = *pconst0;
230d793d
RS
7274
7275 const0 &= GET_MODE_MASK (mode);
7276 const1 &= GET_MODE_MASK (mode);
7277
7278 /* If OP0 is an AND, clear unimportant bits in CONST1. */
7279 if (op0 == AND)
7280 const1 &= const0;
7281
7282 /* If OP0 or OP1 is NIL, this is easy. Similarly if they are the same or
7283 if OP0 is SET. */
7284
7285 if (op1 == NIL || op0 == SET)
7286 return 1;
7287
7288 else if (op0 == NIL)
7289 op0 = op1, const0 = const1;
7290
7291 else if (op0 == op1)
7292 {
7293 switch (op0)
7294 {
7295 case AND:
7296 const0 &= const1;
7297 break;
7298 case IOR:
7299 const0 |= const1;
7300 break;
7301 case XOR:
7302 const0 ^= const1;
7303 break;
7304 case PLUS:
7305 const0 += const1;
7306 break;
7307 case NEG:
7308 op0 = NIL;
7309 break;
7310 }
7311 }
7312
7313 /* Otherwise, if either is a PLUS or NEG, we can't do anything. */
7314 else if (op0 == PLUS || op1 == PLUS || op0 == NEG || op1 == NEG)
7315 return 0;
7316
7317 /* If the two constants aren't the same, we can't do anything. The
7318 remaining six cases can all be done. */
7319 else if (const0 != const1)
7320 return 0;
7321
7322 else
7323 switch (op0)
7324 {
7325 case IOR:
7326 if (op1 == AND)
7327 /* (a & b) | b == b */
7328 op0 = SET;
7329 else /* op1 == XOR */
7330 /* (a ^ b) | b == a | b */
7331 ;
7332 break;
7333
7334 case XOR:
7335 if (op1 == AND)
7336 /* (a & b) ^ b == (~a) & b */
7337 op0 = AND, *pcomp_p = 1;
7338 else /* op1 == IOR */
7339 /* (a | b) ^ b == a & ~b */
7340 op0 = AND, *pconst0 = ~ const0;
7341 break;
7342
7343 case AND:
7344 if (op1 == IOR)
7345 /* (a | b) & b == b */
7346 op0 = SET;
7347 else /* op1 == XOR */
7348 /* (a ^ b) & b) == (~a) & b */
7349 *pcomp_p = 1;
7350 break;
7351 }
7352
7353 /* Check for NO-OP cases. */
7354 const0 &= GET_MODE_MASK (mode);
7355 if (const0 == 0
7356 && (op0 == IOR || op0 == XOR || op0 == PLUS))
7357 op0 = NIL;
7358 else if (const0 == 0 && op0 == AND)
7359 op0 = SET;
7360 else if (const0 == GET_MODE_MASK (mode) && op0 == AND)
7361 op0 = NIL;
7362
7363 *pop0 = op0;
7364 *pconst0 = const0;
7365
7366 return 1;
7367}
7368\f
7369/* Simplify a shift of VAROP by COUNT bits. CODE says what kind of shift.
7370 The result of the shift is RESULT_MODE. X, if non-zero, is an expression
7371 that we started with.
7372
7373 The shift is normally computed in the widest mode we find in VAROP, as
7374 long as it isn't a different number of words than RESULT_MODE. Exceptions
7375 are ASHIFTRT and ROTATE, which are always done in their original mode, */
7376
7377static rtx
7378simplify_shift_const (x, code, result_mode, varop, count)
7379 rtx x;
7380 enum rtx_code code;
7381 enum machine_mode result_mode;
7382 rtx varop;
7383 int count;
7384{
7385 enum rtx_code orig_code = code;
7386 int orig_count = count;
7387 enum machine_mode mode = result_mode;
7388 enum machine_mode shift_mode, tmode;
7389 int mode_words
7390 = (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
7391 /* We form (outer_op (code varop count) (outer_const)). */
7392 enum rtx_code outer_op = NIL;
c4e861e8 7393 HOST_WIDE_INT outer_const = 0;
230d793d
RS
7394 rtx const_rtx;
7395 int complement_p = 0;
7396 rtx new;
7397
7398 /* If we were given an invalid count, don't do anything except exactly
7399 what was requested. */
7400
7401 if (count < 0 || count > GET_MODE_BITSIZE (mode))
7402 {
7403 if (x)
7404 return x;
7405
5f4f0e22 7406 return gen_rtx (code, mode, varop, GEN_INT (count));
230d793d
RS
7407 }
7408
7409 /* Unless one of the branches of the `if' in this loop does a `continue',
7410 we will `break' the loop after the `if'. */
7411
7412 while (count != 0)
7413 {
7414 /* If we have an operand of (clobber (const_int 0)), just return that
7415 value. */
7416 if (GET_CODE (varop) == CLOBBER)
7417 return varop;
7418
7419 /* If we discovered we had to complement VAROP, leave. Making a NOT
7420 here would cause an infinite loop. */
7421 if (complement_p)
7422 break;
7423
7424 /* Convert ROTATETRT to ROTATE. */
7425 if (code == ROTATERT)
7426 code = ROTATE, count = GET_MODE_BITSIZE (result_mode) - count;
7427
7428 /* Canonicalize LSHIFT to ASHIFT. */
7429 if (code == LSHIFT)
7430 code = ASHIFT;
7431
7432 /* We need to determine what mode we will do the shift in. If the
7433 shift is a ASHIFTRT or ROTATE, we must always do it in the mode it
7434 was originally done in. Otherwise, we can do it in MODE, the widest
7435 mode encountered. */
7436 shift_mode = (code == ASHIFTRT || code == ROTATE ? result_mode : mode);
7437
7438 /* Handle cases where the count is greater than the size of the mode
7439 minus 1. For ASHIFT, use the size minus one as the count (this can
7440 occur when simplifying (lshiftrt (ashiftrt ..))). For rotates,
7441 take the count modulo the size. For other shifts, the result is
7442 zero.
7443
7444 Since these shifts are being produced by the compiler by combining
7445 multiple operations, each of which are defined, we know what the
7446 result is supposed to be. */
7447
7448 if (count > GET_MODE_BITSIZE (shift_mode) - 1)
7449 {
7450 if (code == ASHIFTRT)
7451 count = GET_MODE_BITSIZE (shift_mode) - 1;
7452 else if (code == ROTATE || code == ROTATERT)
7453 count %= GET_MODE_BITSIZE (shift_mode);
7454 else
7455 {
7456 /* We can't simply return zero because there may be an
7457 outer op. */
7458 varop = const0_rtx;
7459 count = 0;
7460 break;
7461 }
7462 }
7463
7464 /* Negative counts are invalid and should not have been made (a
7465 programmer-specified negative count should have been handled
7466 above). */
7467 else if (count < 0)
7468 abort ();
7469
312def2e
RK
7470 /* An arithmetic right shift of a quantity known to be -1 or 0
7471 is a no-op. */
7472 if (code == ASHIFTRT
7473 && (num_sign_bit_copies (varop, shift_mode)
7474 == GET_MODE_BITSIZE (shift_mode)))
d0ab8cd3 7475 {
312def2e
RK
7476 count = 0;
7477 break;
7478 }
d0ab8cd3 7479
312def2e
RK
7480 /* If we are doing an arithmetic right shift and discarding all but
7481 the sign bit copies, this is equivalent to doing a shift by the
7482 bitsize minus one. Convert it into that shift because it will often
7483 allow other simplifications. */
500c518b 7484
312def2e
RK
7485 if (code == ASHIFTRT
7486 && (count + num_sign_bit_copies (varop, shift_mode)
7487 >= GET_MODE_BITSIZE (shift_mode)))
7488 count = GET_MODE_BITSIZE (shift_mode) - 1;
500c518b 7489
230d793d
RS
7490 /* We simplify the tests below and elsewhere by converting
7491 ASHIFTRT to LSHIFTRT if we know the sign bit is clear.
7492 `make_compound_operation' will convert it to a ASHIFTRT for
7493 those machines (such as Vax) that don't have a LSHIFTRT. */
5f4f0e22 7494 if (GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT
230d793d 7495 && code == ASHIFTRT
951553af 7496 && ((nonzero_bits (varop, shift_mode)
5f4f0e22
CH
7497 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (shift_mode) - 1)))
7498 == 0))
230d793d
RS
7499 code = LSHIFTRT;
7500
7501 switch (GET_CODE (varop))
7502 {
7503 case SIGN_EXTEND:
7504 case ZERO_EXTEND:
7505 case SIGN_EXTRACT:
7506 case ZERO_EXTRACT:
7507 new = expand_compound_operation (varop);
7508 if (new != varop)
7509 {
7510 varop = new;
7511 continue;
7512 }
7513 break;
7514
7515 case MEM:
7516 /* If we have (xshiftrt (mem ...) C) and C is MODE_WIDTH
7517 minus the width of a smaller mode, we can do this with a
7518 SIGN_EXTEND or ZERO_EXTEND from the narrower memory location. */
7519 if ((code == ASHIFTRT || code == LSHIFTRT)
7520 && ! mode_dependent_address_p (XEXP (varop, 0))
7521 && ! MEM_VOLATILE_P (varop)
7522 && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
7523 MODE_INT, 1)) != BLKmode)
7524 {
7525#if BYTES_BIG_ENDIAN
7526 new = gen_rtx (MEM, tmode, XEXP (varop, 0));
7527#else
7528 new = gen_rtx (MEM, tmode,
7529 plus_constant (XEXP (varop, 0),
7530 count / BITS_PER_UNIT));
7531 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (varop);
7532 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (varop);
7533 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (varop);
7534#endif
7535 varop = gen_rtx_combine (code == ASHIFTRT ? SIGN_EXTEND
7536 : ZERO_EXTEND, mode, new);
7537 count = 0;
7538 continue;
7539 }
7540 break;
7541
7542 case USE:
7543 /* Similar to the case above, except that we can only do this if
7544 the resulting mode is the same as that of the underlying
7545 MEM and adjust the address depending on the *bits* endianness
7546 because of the way that bit-field extract insns are defined. */
7547 if ((code == ASHIFTRT || code == LSHIFTRT)
7548 && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
7549 MODE_INT, 1)) != BLKmode
7550 && tmode == GET_MODE (XEXP (varop, 0)))
7551 {
7552#if BITS_BIG_ENDIAN
7553 new = XEXP (varop, 0);
7554#else
7555 new = copy_rtx (XEXP (varop, 0));
7556 SUBST (XEXP (new, 0),
7557 plus_constant (XEXP (new, 0),
7558 count / BITS_PER_UNIT));
7559#endif
7560
7561 varop = gen_rtx_combine (code == ASHIFTRT ? SIGN_EXTEND
7562 : ZERO_EXTEND, mode, new);
7563 count = 0;
7564 continue;
7565 }
7566 break;
7567
7568 case SUBREG:
7569 /* If VAROP is a SUBREG, strip it as long as the inner operand has
7570 the same number of words as what we've seen so far. Then store
7571 the widest mode in MODE. */
f9e67232
RS
7572 if (subreg_lowpart_p (varop)
7573 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
7574 > GET_MODE_SIZE (GET_MODE (varop)))
230d793d
RS
7575 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
7576 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
7577 == mode_words))
7578 {
7579 varop = SUBREG_REG (varop);
7580 if (GET_MODE_SIZE (GET_MODE (varop)) > GET_MODE_SIZE (mode))
7581 mode = GET_MODE (varop);
7582 continue;
7583 }
7584 break;
7585
7586 case MULT:
7587 /* Some machines use MULT instead of ASHIFT because MULT
7588 is cheaper. But it is still better on those machines to
7589 merge two shifts into one. */
7590 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
7591 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
7592 {
7593 varop = gen_binary (ASHIFT, GET_MODE (varop), XEXP (varop, 0),
5f4f0e22 7594 GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));;
230d793d
RS
7595 continue;
7596 }
7597 break;
7598
7599 case UDIV:
7600 /* Similar, for when divides are cheaper. */
7601 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
7602 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
7603 {
7604 varop = gen_binary (LSHIFTRT, GET_MODE (varop), XEXP (varop, 0),
5f4f0e22 7605 GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));
230d793d
RS
7606 continue;
7607 }
7608 break;
7609
7610 case ASHIFTRT:
7611 /* If we are extracting just the sign bit of an arithmetic right
7612 shift, that shift is not needed. */
7613 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1)
7614 {
7615 varop = XEXP (varop, 0);
7616 continue;
7617 }
7618
7619 /* ... fall through ... */
7620
7621 case LSHIFTRT:
7622 case ASHIFT:
7623 case LSHIFT:
7624 case ROTATE:
7625 /* Here we have two nested shifts. The result is usually the
7626 AND of a new shift with a mask. We compute the result below. */
7627 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
7628 && INTVAL (XEXP (varop, 1)) >= 0
7629 && INTVAL (XEXP (varop, 1)) < GET_MODE_BITSIZE (GET_MODE (varop))
5f4f0e22
CH
7630 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
7631 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
230d793d
RS
7632 {
7633 enum rtx_code first_code = GET_CODE (varop);
7634 int first_count = INTVAL (XEXP (varop, 1));
5f4f0e22 7635 unsigned HOST_WIDE_INT mask;
230d793d
RS
7636 rtx mask_rtx;
7637 rtx inner;
7638
7639 if (first_code == LSHIFT)
7640 first_code = ASHIFT;
7641
7642 /* We have one common special case. We can't do any merging if
7643 the inner code is an ASHIFTRT of a smaller mode. However, if
7644 we have (ashift:M1 (subreg:M1 (ashiftrt:M2 FOO C1) 0) C2)
7645 with C2 == GET_MODE_BITSIZE (M1) - GET_MODE_BITSIZE (M2),
7646 we can convert it to
7647 (ashiftrt:M1 (ashift:M1 (and:M1 (subreg:M1 FOO 0 C2) C3) C1).
7648 This simplifies certain SIGN_EXTEND operations. */
7649 if (code == ASHIFT && first_code == ASHIFTRT
7650 && (GET_MODE_BITSIZE (result_mode)
7651 - GET_MODE_BITSIZE (GET_MODE (varop))) == count)
7652 {
7653 /* C3 has the low-order C1 bits zero. */
7654
5f4f0e22
CH
7655 mask = (GET_MODE_MASK (mode)
7656 & ~ (((HOST_WIDE_INT) 1 << first_count) - 1));
230d793d 7657
5f4f0e22 7658 varop = simplify_and_const_int (NULL_RTX, result_mode,
230d793d 7659 XEXP (varop, 0), mask);
5f4f0e22 7660 varop = simplify_shift_const (NULL_RTX, ASHIFT, result_mode,
230d793d
RS
7661 varop, count);
7662 count = first_count;
7663 code = ASHIFTRT;
7664 continue;
7665 }
7666
d0ab8cd3
RK
7667 /* If this was (ashiftrt (ashift foo C1) C2) and FOO has more
7668 than C1 high-order bits equal to the sign bit, we can convert
7669 this to either an ASHIFT or a ASHIFTRT depending on the
7670 two counts.
230d793d
RS
7671
7672 We cannot do this if VAROP's mode is not SHIFT_MODE. */
7673
7674 if (code == ASHIFTRT && first_code == ASHIFT
7675 && GET_MODE (varop) == shift_mode
d0ab8cd3
RK
7676 && (num_sign_bit_copies (XEXP (varop, 0), shift_mode)
7677 > first_count))
230d793d 7678 {
d0ab8cd3
RK
7679 count -= first_count;
7680 if (count < 0)
7681 count = - count, code = ASHIFT;
7682 varop = XEXP (varop, 0);
7683 continue;
230d793d
RS
7684 }
7685
7686 /* There are some cases we can't do. If CODE is ASHIFTRT,
7687 we can only do this if FIRST_CODE is also ASHIFTRT.
7688
7689 We can't do the case when CODE is ROTATE and FIRST_CODE is
7690 ASHIFTRT.
7691
7692 If the mode of this shift is not the mode of the outer shift,
7693 we can't do this if either shift is ASHIFTRT or ROTATE.
7694
7695 Finally, we can't do any of these if the mode is too wide
7696 unless the codes are the same.
7697
7698 Handle the case where the shift codes are the same
7699 first. */
7700
7701 if (code == first_code)
7702 {
7703 if (GET_MODE (varop) != result_mode
7704 && (code == ASHIFTRT || code == ROTATE))
7705 break;
7706
7707 count += first_count;
7708 varop = XEXP (varop, 0);
7709 continue;
7710 }
7711
7712 if (code == ASHIFTRT
7713 || (code == ROTATE && first_code == ASHIFTRT)
5f4f0e22 7714 || GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT
230d793d
RS
7715 || (GET_MODE (varop) != result_mode
7716 && (first_code == ASHIFTRT || first_code == ROTATE
7717 || code == ROTATE)))
7718 break;
7719
7720 /* To compute the mask to apply after the shift, shift the
951553af 7721 nonzero bits of the inner shift the same way the
230d793d
RS
7722 outer shift will. */
7723
951553af 7724 mask_rtx = GEN_INT (nonzero_bits (varop, GET_MODE (varop)));
230d793d
RS
7725
7726 mask_rtx
7727 = simplify_binary_operation (code, result_mode, mask_rtx,
5f4f0e22 7728 GEN_INT (count));
230d793d
RS
7729
7730 /* Give up if we can't compute an outer operation to use. */
7731 if (mask_rtx == 0
7732 || GET_CODE (mask_rtx) != CONST_INT
7733 || ! merge_outer_ops (&outer_op, &outer_const, AND,
7734 INTVAL (mask_rtx),
7735 result_mode, &complement_p))
7736 break;
7737
7738 /* If the shifts are in the same direction, we add the
7739 counts. Otherwise, we subtract them. */
7740 if ((code == ASHIFTRT || code == LSHIFTRT)
7741 == (first_code == ASHIFTRT || first_code == LSHIFTRT))
7742 count += first_count;
7743 else
7744 count -= first_count;
7745
7746 /* If COUNT is positive, the new shift is usually CODE,
7747 except for the two exceptions below, in which case it is
7748 FIRST_CODE. If the count is negative, FIRST_CODE should
7749 always be used */
7750 if (count > 0
7751 && ((first_code == ROTATE && code == ASHIFT)
7752 || (first_code == ASHIFTRT && code == LSHIFTRT)))
7753 code = first_code;
7754 else if (count < 0)
7755 code = first_code, count = - count;
7756
7757 varop = XEXP (varop, 0);
7758 continue;
7759 }
7760
7761 /* If we have (A << B << C) for any shift, we can convert this to
7762 (A << C << B). This wins if A is a constant. Only try this if
7763 B is not a constant. */
7764
7765 else if (GET_CODE (varop) == code
7766 && GET_CODE (XEXP (varop, 1)) != CONST_INT
7767 && 0 != (new
7768 = simplify_binary_operation (code, mode,
7769 XEXP (varop, 0),
5f4f0e22 7770 GEN_INT (count))))
230d793d
RS
7771 {
7772 varop = gen_rtx_combine (code, mode, new, XEXP (varop, 1));
7773 count = 0;
7774 continue;
7775 }
7776 break;
7777
7778 case NOT:
7779 /* Make this fit the case below. */
7780 varop = gen_rtx_combine (XOR, mode, XEXP (varop, 0),
5f4f0e22 7781 GEN_INT (GET_MODE_MASK (mode)));
230d793d
RS
7782 continue;
7783
7784 case IOR:
7785 case AND:
7786 case XOR:
7787 /* If we have (xshiftrt (ior (plus X (const_int -1)) X) C)
7788 with C the size of VAROP - 1 and the shift is logical if
7789 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
7790 we have an (le X 0) operation. If we have an arithmetic shift
7791 and STORE_FLAG_VALUE is 1 or we have a logical shift with
7792 STORE_FLAG_VALUE of -1, we have a (neg (le X 0)) operation. */
7793
7794 if (GET_CODE (varop) == IOR && GET_CODE (XEXP (varop, 0)) == PLUS
7795 && XEXP (XEXP (varop, 0), 1) == constm1_rtx
7796 && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
7797 && (code == LSHIFTRT || code == ASHIFTRT)
7798 && count == GET_MODE_BITSIZE (GET_MODE (varop)) - 1
7799 && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
7800 {
7801 count = 0;
7802 varop = gen_rtx_combine (LE, GET_MODE (varop), XEXP (varop, 1),
7803 const0_rtx);
7804
7805 if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
7806 varop = gen_rtx_combine (NEG, GET_MODE (varop), varop);
7807
7808 continue;
7809 }
7810
7811 /* If we have (shift (logical)), move the logical to the outside
7812 to allow it to possibly combine with another logical and the
7813 shift to combine with another shift. This also canonicalizes to
7814 what a ZERO_EXTRACT looks like. Also, some machines have
7815 (and (shift)) insns. */
7816
7817 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
7818 && (new = simplify_binary_operation (code, result_mode,
7819 XEXP (varop, 1),
5f4f0e22 7820 GEN_INT (count))) != 0
230d793d
RS
7821 && merge_outer_ops (&outer_op, &outer_const, GET_CODE (varop),
7822 INTVAL (new), result_mode, &complement_p))
7823 {
7824 varop = XEXP (varop, 0);
7825 continue;
7826 }
7827
7828 /* If we can't do that, try to simplify the shift in each arm of the
7829 logical expression, make a new logical expression, and apply
7830 the inverse distributive law. */
7831 {
00d4ca1c 7832 rtx lhs = simplify_shift_const (NULL_RTX, code, shift_mode,
230d793d 7833 XEXP (varop, 0), count);
00d4ca1c 7834 rtx rhs = simplify_shift_const (NULL_RTX, code, shift_mode,
230d793d
RS
7835 XEXP (varop, 1), count);
7836
00d4ca1c 7837 varop = gen_binary (GET_CODE (varop), GET_MODE (varop), lhs, rhs);
230d793d
RS
7838 varop = apply_distributive_law (varop);
7839
7840 count = 0;
7841 }
7842 break;
7843
7844 case EQ:
7845 /* convert (lshift (eq FOO 0) C) to (xor FOO 1) if STORE_FLAG_VALUE
7846 says that the sign bit can be tested, FOO has mode MODE, C is
7847 GET_MODE_BITSIZE (MODE) - 1, and FOO has only the low-order bit
951553af 7848 may be nonzero. */
230d793d
RS
7849 if (code == LSHIFT
7850 && XEXP (varop, 1) == const0_rtx
7851 && GET_MODE (XEXP (varop, 0)) == result_mode
7852 && count == GET_MODE_BITSIZE (result_mode) - 1
5f4f0e22 7853 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
230d793d 7854 && ((STORE_FLAG_VALUE
5f4f0e22 7855 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (result_mode) - 1))))
951553af 7856 && nonzero_bits (XEXP (varop, 0), result_mode) == 1
5f4f0e22
CH
7857 && merge_outer_ops (&outer_op, &outer_const, XOR,
7858 (HOST_WIDE_INT) 1, result_mode,
7859 &complement_p))
230d793d
RS
7860 {
7861 varop = XEXP (varop, 0);
7862 count = 0;
7863 continue;
7864 }
7865 break;
7866
7867 case NEG:
d0ab8cd3
RK
7868 /* (lshiftrt (neg A) C) where A is either 0 or 1 and C is one less
7869 than the number of bits in the mode is equivalent to A. */
7870 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1
951553af 7871 && nonzero_bits (XEXP (varop, 0), result_mode) == 1)
230d793d 7872 {
d0ab8cd3 7873 varop = XEXP (varop, 0);
230d793d
RS
7874 count = 0;
7875 continue;
7876 }
7877
7878 /* NEG commutes with ASHIFT since it is multiplication. Move the
7879 NEG outside to allow shifts to combine. */
7880 if (code == ASHIFT
5f4f0e22
CH
7881 && merge_outer_ops (&outer_op, &outer_const, NEG,
7882 (HOST_WIDE_INT) 0, result_mode,
7883 &complement_p))
230d793d
RS
7884 {
7885 varop = XEXP (varop, 0);
7886 continue;
7887 }
7888 break;
7889
7890 case PLUS:
d0ab8cd3
RK
7891 /* (lshiftrt (plus A -1) C) where A is either 0 or 1 and C
7892 is one less than the number of bits in the mode is
7893 equivalent to (xor A 1). */
230d793d
RS
7894 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1
7895 && XEXP (varop, 1) == constm1_rtx
951553af 7896 && nonzero_bits (XEXP (varop, 0), result_mode) == 1
5f4f0e22
CH
7897 && merge_outer_ops (&outer_op, &outer_const, XOR,
7898 (HOST_WIDE_INT) 1, result_mode,
7899 &complement_p))
230d793d
RS
7900 {
7901 count = 0;
7902 varop = XEXP (varop, 0);
7903 continue;
7904 }
7905
3f508eca 7906 /* If we have (xshiftrt (plus FOO BAR) C), and the only bits
951553af 7907 that might be nonzero in BAR are those being shifted out and those
3f508eca
RK
7908 bits are known zero in FOO, we can replace the PLUS with FOO.
7909 Similarly in the other operand order. This code occurs when
7910 we are computing the size of a variable-size array. */
7911
7912 if ((code == ASHIFTRT || code == LSHIFTRT)
5f4f0e22 7913 && count < HOST_BITS_PER_WIDE_INT
951553af
RK
7914 && nonzero_bits (XEXP (varop, 1), result_mode) >> count == 0
7915 && (nonzero_bits (XEXP (varop, 1), result_mode)
7916 & nonzero_bits (XEXP (varop, 0), result_mode)) == 0)
3f508eca
RK
7917 {
7918 varop = XEXP (varop, 0);
7919 continue;
7920 }
7921 else if ((code == ASHIFTRT || code == LSHIFTRT)
5f4f0e22 7922 && count < HOST_BITS_PER_WIDE_INT
ac49a949 7923 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
951553af 7924 && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
3f508eca 7925 >> count)
951553af
RK
7926 && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
7927 & nonzero_bits (XEXP (varop, 1),
3f508eca
RK
7928 result_mode)))
7929 {
7930 varop = XEXP (varop, 1);
7931 continue;
7932 }
7933
230d793d
RS
7934 /* (ashift (plus foo C) N) is (plus (ashift foo N) C'). */
7935 if (code == ASHIFT
7936 && GET_CODE (XEXP (varop, 1)) == CONST_INT
7937 && (new = simplify_binary_operation (ASHIFT, result_mode,
7938 XEXP (varop, 1),
5f4f0e22 7939 GEN_INT (count))) != 0
230d793d
RS
7940 && merge_outer_ops (&outer_op, &outer_const, PLUS,
7941 INTVAL (new), result_mode, &complement_p))
7942 {
7943 varop = XEXP (varop, 0);
7944 continue;
7945 }
7946 break;
7947
7948 case MINUS:
7949 /* If we have (xshiftrt (minus (ashiftrt X C)) X) C)
7950 with C the size of VAROP - 1 and the shift is logical if
7951 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
7952 we have a (gt X 0) operation. If the shift is arithmetic with
7953 STORE_FLAG_VALUE of 1 or logical with STORE_FLAG_VALUE == -1,
7954 we have a (neg (gt X 0)) operation. */
7955
7956 if (GET_CODE (XEXP (varop, 0)) == ASHIFTRT
7957 && count == GET_MODE_BITSIZE (GET_MODE (varop)) - 1
7958 && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
7959 && (code == LSHIFTRT || code == ASHIFTRT)
7960 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
7961 && INTVAL (XEXP (XEXP (varop, 0), 1)) == count
7962 && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
7963 {
7964 count = 0;
7965 varop = gen_rtx_combine (GT, GET_MODE (varop), XEXP (varop, 1),
7966 const0_rtx);
7967
7968 if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
7969 varop = gen_rtx_combine (NEG, GET_MODE (varop), varop);
7970
7971 continue;
7972 }
7973 break;
7974 }
7975
7976 break;
7977 }
7978
7979 /* We need to determine what mode to do the shift in. If the shift is
7980 a ASHIFTRT or ROTATE, we must always do it in the mode it was originally
7981 done in. Otherwise, we can do it in MODE, the widest mode encountered.
7982 The code we care about is that of the shift that will actually be done,
7983 not the shift that was originally requested. */
7984 shift_mode = (code == ASHIFTRT || code == ROTATE ? result_mode : mode);
7985
7986 /* We have now finished analyzing the shift. The result should be
7987 a shift of type CODE with SHIFT_MODE shifting VAROP COUNT places. If
7988 OUTER_OP is non-NIL, it is an operation that needs to be applied
7989 to the result of the shift. OUTER_CONST is the relevant constant,
7990 but we must turn off all bits turned off in the shift.
7991
7992 If we were passed a value for X, see if we can use any pieces of
7993 it. If not, make new rtx. */
7994
7995 if (x && GET_RTX_CLASS (GET_CODE (x)) == '2'
7996 && GET_CODE (XEXP (x, 1)) == CONST_INT
7997 && INTVAL (XEXP (x, 1)) == count)
7998 const_rtx = XEXP (x, 1);
7999 else
5f4f0e22 8000 const_rtx = GEN_INT (count);
230d793d
RS
8001
8002 if (x && GET_CODE (XEXP (x, 0)) == SUBREG
8003 && GET_MODE (XEXP (x, 0)) == shift_mode
8004 && SUBREG_REG (XEXP (x, 0)) == varop)
8005 varop = XEXP (x, 0);
8006 else if (GET_MODE (varop) != shift_mode)
8007 varop = gen_lowpart_for_combine (shift_mode, varop);
8008
8009 /* If we can't make the SUBREG, try to return what we were given. */
8010 if (GET_CODE (varop) == CLOBBER)
8011 return x ? x : varop;
8012
8013 new = simplify_binary_operation (code, shift_mode, varop, const_rtx);
8014 if (new != 0)
8015 x = new;
8016 else
8017 {
8018 if (x == 0 || GET_CODE (x) != code || GET_MODE (x) != shift_mode)
8019 x = gen_rtx_combine (code, shift_mode, varop, const_rtx);
8020
8021 SUBST (XEXP (x, 0), varop);
8022 SUBST (XEXP (x, 1), const_rtx);
8023 }
8024
224eeff2
RK
8025 /* If we have an outer operation and we just made a shift, it is
8026 possible that we could have simplified the shift were it not
8027 for the outer operation. So try to do the simplification
8028 recursively. */
8029
8030 if (outer_op != NIL && GET_CODE (x) == code
8031 && GET_CODE (XEXP (x, 1)) == CONST_INT)
8032 x = simplify_shift_const (x, code, shift_mode, XEXP (x, 0),
8033 INTVAL (XEXP (x, 1)));
8034
230d793d
RS
8035 /* If we were doing a LSHIFTRT in a wider mode than it was originally,
8036 turn off all the bits that the shift would have turned off. */
8037 if (orig_code == LSHIFTRT && result_mode != shift_mode)
5f4f0e22 8038 x = simplify_and_const_int (NULL_RTX, shift_mode, x,
230d793d
RS
8039 GET_MODE_MASK (result_mode) >> orig_count);
8040
8041 /* Do the remainder of the processing in RESULT_MODE. */
8042 x = gen_lowpart_for_combine (result_mode, x);
8043
8044 /* If COMPLEMENT_P is set, we have to complement X before doing the outer
8045 operation. */
8046 if (complement_p)
8047 x = gen_unary (NOT, result_mode, x);
8048
8049 if (outer_op != NIL)
8050 {
5f4f0e22 8051 if (GET_MODE_BITSIZE (result_mode) < HOST_BITS_PER_WIDE_INT)
230d793d
RS
8052 outer_const &= GET_MODE_MASK (result_mode);
8053
8054 if (outer_op == AND)
5f4f0e22 8055 x = simplify_and_const_int (NULL_RTX, result_mode, x, outer_const);
230d793d
RS
8056 else if (outer_op == SET)
8057 /* This means that we have determined that the result is
8058 equivalent to a constant. This should be rare. */
5f4f0e22 8059 x = GEN_INT (outer_const);
230d793d
RS
8060 else if (GET_RTX_CLASS (outer_op) == '1')
8061 x = gen_unary (outer_op, result_mode, x);
8062 else
5f4f0e22 8063 x = gen_binary (outer_op, result_mode, x, GEN_INT (outer_const));
230d793d
RS
8064 }
8065
8066 return x;
8067}
8068\f
8069/* Like recog, but we receive the address of a pointer to a new pattern.
8070 We try to match the rtx that the pointer points to.
8071 If that fails, we may try to modify or replace the pattern,
8072 storing the replacement into the same pointer object.
8073
8074 Modifications include deletion or addition of CLOBBERs.
8075
8076 PNOTES is a pointer to a location where any REG_UNUSED notes added for
8077 the CLOBBERs are placed.
8078
8079 The value is the final insn code from the pattern ultimately matched,
8080 or -1. */
8081
8082static int
8083recog_for_combine (pnewpat, insn, pnotes)
8084 rtx *pnewpat;
8085 rtx insn;
8086 rtx *pnotes;
8087{
8088 register rtx pat = *pnewpat;
8089 int insn_code_number;
8090 int num_clobbers_to_add = 0;
8091 int i;
8092 rtx notes = 0;
8093
974f4146
RK
8094 /* If PAT is a PARALLEL, check to see if it contains the CLOBBER
8095 we use to indicate that something didn't match. If we find such a
8096 thing, force rejection. */
d96023cf 8097 if (GET_CODE (pat) == PARALLEL)
974f4146 8098 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
d96023cf
RK
8099 if (GET_CODE (XVECEXP (pat, 0, i)) == CLOBBER
8100 && XEXP (XVECEXP (pat, 0, i), 0) == const0_rtx)
974f4146
RK
8101 return -1;
8102
230d793d
RS
8103 /* Is the result of combination a valid instruction? */
8104 insn_code_number = recog (pat, insn, &num_clobbers_to_add);
8105
8106 /* If it isn't, there is the possibility that we previously had an insn
8107 that clobbered some register as a side effect, but the combined
8108 insn doesn't need to do that. So try once more without the clobbers
8109 unless this represents an ASM insn. */
8110
8111 if (insn_code_number < 0 && ! check_asm_operands (pat)
8112 && GET_CODE (pat) == PARALLEL)
8113 {
8114 int pos;
8115
8116 for (pos = 0, i = 0; i < XVECLEN (pat, 0); i++)
8117 if (GET_CODE (XVECEXP (pat, 0, i)) != CLOBBER)
8118 {
8119 if (i != pos)
8120 SUBST (XVECEXP (pat, 0, pos), XVECEXP (pat, 0, i));
8121 pos++;
8122 }
8123
8124 SUBST_INT (XVECLEN (pat, 0), pos);
8125
8126 if (pos == 1)
8127 pat = XVECEXP (pat, 0, 0);
8128
8129 insn_code_number = recog (pat, insn, &num_clobbers_to_add);
8130 }
8131
8132 /* If we had any clobbers to add, make a new pattern than contains
8133 them. Then check to make sure that all of them are dead. */
8134 if (num_clobbers_to_add)
8135 {
8136 rtx newpat = gen_rtx (PARALLEL, VOIDmode,
8137 gen_rtvec (GET_CODE (pat) == PARALLEL
8138 ? XVECLEN (pat, 0) + num_clobbers_to_add
8139 : num_clobbers_to_add + 1));
8140
8141 if (GET_CODE (pat) == PARALLEL)
8142 for (i = 0; i < XVECLEN (pat, 0); i++)
8143 XVECEXP (newpat, 0, i) = XVECEXP (pat, 0, i);
8144 else
8145 XVECEXP (newpat, 0, 0) = pat;
8146
8147 add_clobbers (newpat, insn_code_number);
8148
8149 for (i = XVECLEN (newpat, 0) - num_clobbers_to_add;
8150 i < XVECLEN (newpat, 0); i++)
8151 {
8152 if (GET_CODE (XEXP (XVECEXP (newpat, 0, i), 0)) == REG
8153 && ! reg_dead_at_p (XEXP (XVECEXP (newpat, 0, i), 0), insn))
8154 return -1;
8155 notes = gen_rtx (EXPR_LIST, REG_UNUSED,
8156 XEXP (XVECEXP (newpat, 0, i), 0), notes);
8157 }
8158 pat = newpat;
8159 }
8160
8161 *pnewpat = pat;
8162 *pnotes = notes;
8163
8164 return insn_code_number;
8165}
8166\f
8167/* Like gen_lowpart but for use by combine. In combine it is not possible
8168 to create any new pseudoregs. However, it is safe to create
8169 invalid memory addresses, because combine will try to recognize
8170 them and all they will do is make the combine attempt fail.
8171
8172 If for some reason this cannot do its job, an rtx
8173 (clobber (const_int 0)) is returned.
8174 An insn containing that will not be recognized. */
8175
8176#undef gen_lowpart
8177
8178static rtx
8179gen_lowpart_for_combine (mode, x)
8180 enum machine_mode mode;
8181 register rtx x;
8182{
8183 rtx result;
8184
8185 if (GET_MODE (x) == mode)
8186 return x;
8187
eae957a8
RK
8188 /* We can only support MODE being wider than a word if X is a
8189 constant integer or has a mode the same size. */
8190
8191 if (GET_MODE_SIZE (mode) > UNITS_PER_WORD
8192 && ! ((GET_MODE (x) == VOIDmode
8193 && (GET_CODE (x) == CONST_INT
8194 || GET_CODE (x) == CONST_DOUBLE))
8195 || GET_MODE_SIZE (GET_MODE (x)) == GET_MODE_SIZE (mode)))
230d793d
RS
8196 return gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
8197
8198 /* X might be a paradoxical (subreg (mem)). In that case, gen_lowpart
8199 won't know what to do. So we will strip off the SUBREG here and
8200 process normally. */
8201 if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
8202 {
8203 x = SUBREG_REG (x);
8204 if (GET_MODE (x) == mode)
8205 return x;
8206 }
8207
8208 result = gen_lowpart_common (mode, x);
8209 if (result)
8210 return result;
8211
8212 if (GET_CODE (x) == MEM)
8213 {
8214 register int offset = 0;
8215 rtx new;
8216
8217 /* Refuse to work on a volatile memory ref or one with a mode-dependent
8218 address. */
8219 if (MEM_VOLATILE_P (x) || mode_dependent_address_p (XEXP (x, 0)))
8220 return gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
8221
8222 /* If we want to refer to something bigger than the original memref,
8223 generate a perverse subreg instead. That will force a reload
8224 of the original memref X. */
8225 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode))
8226 return gen_rtx (SUBREG, mode, x, 0);
8227
8228#if WORDS_BIG_ENDIAN
8229 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
8230 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
8231#endif
8232#if BYTES_BIG_ENDIAN
8233 /* Adjust the address so that the address-after-the-data
8234 is unchanged. */
8235 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
8236 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
8237#endif
8238 new = gen_rtx (MEM, mode, plus_constant (XEXP (x, 0), offset));
8239 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
8240 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
8241 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
8242 return new;
8243 }
8244
8245 /* If X is a comparison operator, rewrite it in a new mode. This
8246 probably won't match, but may allow further simplifications. */
8247 else if (GET_RTX_CLASS (GET_CODE (x)) == '<')
8248 return gen_rtx_combine (GET_CODE (x), mode, XEXP (x, 0), XEXP (x, 1));
8249
8250 /* If we couldn't simplify X any other way, just enclose it in a
8251 SUBREG. Normally, this SUBREG won't match, but some patterns may
a7c99304 8252 include an explicit SUBREG or we may simplify it further in combine. */
230d793d 8253 else
dfbe1b2f
RK
8254 {
8255 int word = 0;
8256
8257 if (WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD)
8258 word = ((GET_MODE_SIZE (GET_MODE (x))
8259 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD))
8260 / UNITS_PER_WORD);
8261 return gen_rtx (SUBREG, mode, x, word);
8262 }
230d793d
RS
8263}
8264\f
8265/* Make an rtx expression. This is a subset of gen_rtx and only supports
8266 expressions of 1, 2, or 3 operands, each of which are rtx expressions.
8267
8268 If the identical expression was previously in the insn (in the undobuf),
8269 it will be returned. Only if it is not found will a new expression
8270 be made. */
8271
8272/*VARARGS2*/
8273static rtx
8274gen_rtx_combine (va_alist)
8275 va_dcl
8276{
8277 va_list p;
8278 enum rtx_code code;
8279 enum machine_mode mode;
8280 int n_args;
8281 rtx args[3];
8282 int i, j;
8283 char *fmt;
8284 rtx rt;
8285
8286 va_start (p);
8287 code = va_arg (p, enum rtx_code);
8288 mode = va_arg (p, enum machine_mode);
8289 n_args = GET_RTX_LENGTH (code);
8290 fmt = GET_RTX_FORMAT (code);
8291
8292 if (n_args == 0 || n_args > 3)
8293 abort ();
8294
8295 /* Get each arg and verify that it is supposed to be an expression. */
8296 for (j = 0; j < n_args; j++)
8297 {
8298 if (*fmt++ != 'e')
8299 abort ();
8300
8301 args[j] = va_arg (p, rtx);
8302 }
8303
8304 /* See if this is in undobuf. Be sure we don't use objects that came
8305 from another insn; this could produce circular rtl structures. */
8306
8307 for (i = previous_num_undos; i < undobuf.num_undo; i++)
8308 if (!undobuf.undo[i].is_int
f5393ab9
RS
8309 && GET_CODE (undobuf.undo[i].old_contents.r) == code
8310 && GET_MODE (undobuf.undo[i].old_contents.r) == mode)
230d793d
RS
8311 {
8312 for (j = 0; j < n_args; j++)
f5393ab9 8313 if (XEXP (undobuf.undo[i].old_contents.r, j) != args[j])
230d793d
RS
8314 break;
8315
8316 if (j == n_args)
f5393ab9 8317 return undobuf.undo[i].old_contents.r;
230d793d
RS
8318 }
8319
8320 /* Otherwise make a new rtx. We know we have 1, 2, or 3 args.
8321 Use rtx_alloc instead of gen_rtx because it's faster on RISC. */
8322 rt = rtx_alloc (code);
8323 PUT_MODE (rt, mode);
8324 XEXP (rt, 0) = args[0];
8325 if (n_args > 1)
8326 {
8327 XEXP (rt, 1) = args[1];
8328 if (n_args > 2)
8329 XEXP (rt, 2) = args[2];
8330 }
8331 return rt;
8332}
8333
8334/* These routines make binary and unary operations by first seeing if they
8335 fold; if not, a new expression is allocated. */
8336
8337static rtx
8338gen_binary (code, mode, op0, op1)
8339 enum rtx_code code;
8340 enum machine_mode mode;
8341 rtx op0, op1;
8342{
8343 rtx result;
1a26b032
RK
8344 rtx tem;
8345
8346 if (GET_RTX_CLASS (code) == 'c'
8347 && (GET_CODE (op0) == CONST_INT
8348 || (CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)))
8349 tem = op0, op0 = op1, op1 = tem;
230d793d
RS
8350
8351 if (GET_RTX_CLASS (code) == '<')
8352 {
8353 enum machine_mode op_mode = GET_MODE (op0);
8354 if (op_mode == VOIDmode)
8355 op_mode = GET_MODE (op1);
8356 result = simplify_relational_operation (code, op_mode, op0, op1);
8357 }
8358 else
8359 result = simplify_binary_operation (code, mode, op0, op1);
8360
8361 if (result)
8362 return result;
8363
8364 /* Put complex operands first and constants second. */
8365 if (GET_RTX_CLASS (code) == 'c'
8366 && ((CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)
8367 || (GET_RTX_CLASS (GET_CODE (op0)) == 'o'
8368 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')
8369 || (GET_CODE (op0) == SUBREG
8370 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op0))) == 'o'
8371 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')))
8372 return gen_rtx_combine (code, mode, op1, op0);
8373
8374 return gen_rtx_combine (code, mode, op0, op1);
8375}
8376
8377static rtx
8378gen_unary (code, mode, op0)
8379 enum rtx_code code;
8380 enum machine_mode mode;
8381 rtx op0;
8382{
8383 rtx result = simplify_unary_operation (code, mode, op0, mode);
8384
8385 if (result)
8386 return result;
8387
8388 return gen_rtx_combine (code, mode, op0);
8389}
8390\f
8391/* Simplify a comparison between *POP0 and *POP1 where CODE is the
8392 comparison code that will be tested.
8393
8394 The result is a possibly different comparison code to use. *POP0 and
8395 *POP1 may be updated.
8396
8397 It is possible that we might detect that a comparison is either always
8398 true or always false. However, we do not perform general constant
5089e22e 8399 folding in combine, so this knowledge isn't useful. Such tautologies
230d793d
RS
8400 should have been detected earlier. Hence we ignore all such cases. */
8401
8402static enum rtx_code
8403simplify_comparison (code, pop0, pop1)
8404 enum rtx_code code;
8405 rtx *pop0;
8406 rtx *pop1;
8407{
8408 rtx op0 = *pop0;
8409 rtx op1 = *pop1;
8410 rtx tem, tem1;
8411 int i;
8412 enum machine_mode mode, tmode;
8413
8414 /* Try a few ways of applying the same transformation to both operands. */
8415 while (1)
8416 {
8417 /* If both operands are the same constant shift, see if we can ignore the
8418 shift. We can if the shift is a rotate or if the bits shifted out of
951553af 8419 this shift are known to be zero for both inputs and if the type of
230d793d
RS
8420 comparison is compatible with the shift. */
8421 if (GET_CODE (op0) == GET_CODE (op1)
5f4f0e22 8422 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
230d793d
RS
8423 && ((GET_CODE (op0) == ROTATE && (code == NE || code == EQ))
8424 || ((GET_CODE (op0) == LSHIFTRT
8425 || GET_CODE (op0) == ASHIFT || GET_CODE (op0) == LSHIFT)
8426 && (code != GT && code != LT && code != GE && code != LE))
8427 || (GET_CODE (op0) == ASHIFTRT
8428 && (code != GTU && code != LTU
8429 && code != GEU && code != GEU)))
8430 && GET_CODE (XEXP (op0, 1)) == CONST_INT
8431 && INTVAL (XEXP (op0, 1)) >= 0
5f4f0e22 8432 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
230d793d
RS
8433 && XEXP (op0, 1) == XEXP (op1, 1))
8434 {
8435 enum machine_mode mode = GET_MODE (op0);
5f4f0e22 8436 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
230d793d
RS
8437 int shift_count = INTVAL (XEXP (op0, 1));
8438
8439 if (GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFTRT)
8440 mask &= (mask >> shift_count) << shift_count;
8441 else if (GET_CODE (op0) == ASHIFT || GET_CODE (op0) == LSHIFT)
8442 mask = (mask & (mask << shift_count)) >> shift_count;
8443
951553af
RK
8444 if ((nonzero_bits (XEXP (op0, 0), mode) & ~ mask) == 0
8445 && (nonzero_bits (XEXP (op1, 0), mode) & ~ mask) == 0)
230d793d
RS
8446 op0 = XEXP (op0, 0), op1 = XEXP (op1, 0);
8447 else
8448 break;
8449 }
8450
8451 /* If both operands are AND's of a paradoxical SUBREG by constant, the
8452 SUBREGs are of the same mode, and, in both cases, the AND would
8453 be redundant if the comparison was done in the narrower mode,
8454 do the comparison in the narrower mode (e.g., we are AND'ing with 1
951553af
RK
8455 and the operand's possibly nonzero bits are 0xffffff01; in that case
8456 if we only care about QImode, we don't need the AND). This case
8457 occurs if the output mode of an scc insn is not SImode and
230d793d
RS
8458 STORE_FLAG_VALUE == 1 (e.g., the 386). */
8459
8460 else if (GET_CODE (op0) == AND && GET_CODE (op1) == AND
8461 && GET_CODE (XEXP (op0, 1)) == CONST_INT
8462 && GET_CODE (XEXP (op1, 1)) == CONST_INT
8463 && GET_CODE (XEXP (op0, 0)) == SUBREG
8464 && GET_CODE (XEXP (op1, 0)) == SUBREG
8465 && (GET_MODE_SIZE (GET_MODE (XEXP (op0, 0)))
8466 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (op0, 0)))))
8467 && (GET_MODE (SUBREG_REG (XEXP (op0, 0)))
8468 == GET_MODE (SUBREG_REG (XEXP (op1, 0))))
ac49a949
RS
8469 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (XEXP (op0, 0))))
8470 <= HOST_BITS_PER_WIDE_INT)
951553af 8471 && (nonzero_bits (SUBREG_REG (XEXP (op0, 0)),
230d793d
RS
8472 GET_MODE (SUBREG_REG (XEXP (op0, 0))))
8473 & ~ INTVAL (XEXP (op0, 1))) == 0
951553af 8474 && (nonzero_bits (SUBREG_REG (XEXP (op1, 0)),
230d793d
RS
8475 GET_MODE (SUBREG_REG (XEXP (op1, 0))))
8476 & ~ INTVAL (XEXP (op1, 1))) == 0)
8477 {
8478 op0 = SUBREG_REG (XEXP (op0, 0));
8479 op1 = SUBREG_REG (XEXP (op1, 0));
8480
8481 /* the resulting comparison is always unsigned since we masked off
8482 the original sign bit. */
8483 code = unsigned_condition (code);
8484 }
8485 else
8486 break;
8487 }
8488
8489 /* If the first operand is a constant, swap the operands and adjust the
8490 comparison code appropriately. */
8491 if (CONSTANT_P (op0))
8492 {
8493 tem = op0, op0 = op1, op1 = tem;
8494 code = swap_condition (code);
8495 }
8496
8497 /* We now enter a loop during which we will try to simplify the comparison.
8498 For the most part, we only are concerned with comparisons with zero,
8499 but some things may really be comparisons with zero but not start
8500 out looking that way. */
8501
8502 while (GET_CODE (op1) == CONST_INT)
8503 {
8504 enum machine_mode mode = GET_MODE (op0);
8505 int mode_width = GET_MODE_BITSIZE (mode);
5f4f0e22 8506 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
230d793d
RS
8507 int equality_comparison_p;
8508 int sign_bit_comparison_p;
8509 int unsigned_comparison_p;
5f4f0e22 8510 HOST_WIDE_INT const_op;
230d793d
RS
8511
8512 /* We only want to handle integral modes. This catches VOIDmode,
8513 CCmode, and the floating-point modes. An exception is that we
8514 can handle VOIDmode if OP0 is a COMPARE or a comparison
8515 operation. */
8516
8517 if (GET_MODE_CLASS (mode) != MODE_INT
8518 && ! (mode == VOIDmode
8519 && (GET_CODE (op0) == COMPARE
8520 || GET_RTX_CLASS (GET_CODE (op0)) == '<')))
8521 break;
8522
8523 /* Get the constant we are comparing against and turn off all bits
8524 not on in our mode. */
8525 const_op = INTVAL (op1);
5f4f0e22 8526 if (mode_width <= HOST_BITS_PER_WIDE_INT)
4803a34a 8527 const_op &= mask;
230d793d
RS
8528
8529 /* If we are comparing against a constant power of two and the value
951553af 8530 being compared can only have that single bit nonzero (e.g., it was
230d793d
RS
8531 `and'ed with that bit), we can replace this with a comparison
8532 with zero. */
8533 if (const_op
8534 && (code == EQ || code == NE || code == GE || code == GEU
8535 || code == LT || code == LTU)
5f4f0e22 8536 && mode_width <= HOST_BITS_PER_WIDE_INT
230d793d 8537 && exact_log2 (const_op) >= 0
951553af 8538 && nonzero_bits (op0, mode) == const_op)
230d793d
RS
8539 {
8540 code = (code == EQ || code == GE || code == GEU ? NE : EQ);
8541 op1 = const0_rtx, const_op = 0;
8542 }
8543
d0ab8cd3
RK
8544 /* Similarly, if we are comparing a value known to be either -1 or
8545 0 with -1, change it to the opposite comparison against zero. */
8546
8547 if (const_op == -1
8548 && (code == EQ || code == NE || code == GT || code == LE
8549 || code == GEU || code == LTU)
8550 && num_sign_bit_copies (op0, mode) == mode_width)
8551 {
8552 code = (code == EQ || code == LE || code == GEU ? NE : EQ);
8553 op1 = const0_rtx, const_op = 0;
8554 }
8555
230d793d 8556 /* Do some canonicalizations based on the comparison code. We prefer
4803a34a
RK
8557 comparisons against zero and then prefer equality comparisons.
8558 If we can reduce the size of a constant, we will do that too. */
230d793d
RS
8559
8560 switch (code)
8561 {
8562 case LT:
4803a34a
RK
8563 /* < C is equivalent to <= (C - 1) */
8564 if (const_op > 0)
230d793d 8565 {
4803a34a 8566 const_op -= 1;
5f4f0e22 8567 op1 = GEN_INT (const_op);
230d793d
RS
8568 code = LE;
8569 /* ... fall through to LE case below. */
8570 }
8571 else
8572 break;
8573
8574 case LE:
4803a34a
RK
8575 /* <= C is equivalent to < (C + 1); we do this for C < 0 */
8576 if (const_op < 0)
8577 {
8578 const_op += 1;
5f4f0e22 8579 op1 = GEN_INT (const_op);
4803a34a
RK
8580 code = LT;
8581 }
230d793d
RS
8582
8583 /* If we are doing a <= 0 comparison on a value known to have
8584 a zero sign bit, we can replace this with == 0. */
8585 else if (const_op == 0
5f4f0e22 8586 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 8587 && (nonzero_bits (op0, mode)
5f4f0e22 8588 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
230d793d
RS
8589 code = EQ;
8590 break;
8591
8592 case GE:
4803a34a
RK
8593 /* >= C is equivalent to > (C - 1). */
8594 if (const_op > 0)
230d793d 8595 {
4803a34a 8596 const_op -= 1;
5f4f0e22 8597 op1 = GEN_INT (const_op);
230d793d
RS
8598 code = GT;
8599 /* ... fall through to GT below. */
8600 }
8601 else
8602 break;
8603
8604 case GT:
4803a34a
RK
8605 /* > C is equivalent to >= (C + 1); we do this for C < 0*/
8606 if (const_op < 0)
8607 {
8608 const_op += 1;
5f4f0e22 8609 op1 = GEN_INT (const_op);
4803a34a
RK
8610 code = GE;
8611 }
230d793d
RS
8612
8613 /* If we are doing a > 0 comparison on a value known to have
8614 a zero sign bit, we can replace this with != 0. */
8615 else if (const_op == 0
5f4f0e22 8616 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 8617 && (nonzero_bits (op0, mode)
5f4f0e22 8618 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
230d793d
RS
8619 code = NE;
8620 break;
8621
230d793d 8622 case LTU:
4803a34a
RK
8623 /* < C is equivalent to <= (C - 1). */
8624 if (const_op > 0)
8625 {
8626 const_op -= 1;
5f4f0e22 8627 op1 = GEN_INT (const_op);
4803a34a
RK
8628 code = LEU;
8629 /* ... fall through ... */
8630 }
d0ab8cd3
RK
8631
8632 /* (unsigned) < 0x80000000 is equivalent to >= 0. */
8633 else if (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1))
8634 {
8635 const_op = 0, op1 = const0_rtx;
8636 code = GE;
8637 break;
8638 }
4803a34a
RK
8639 else
8640 break;
230d793d
RS
8641
8642 case LEU:
8643 /* unsigned <= 0 is equivalent to == 0 */
8644 if (const_op == 0)
8645 code = EQ;
d0ab8cd3
RK
8646
8647 /* (unsigned) <= 0x7fffffff is equivalent to >= 0. */
8648 else if (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1)
8649 {
8650 const_op = 0, op1 = const0_rtx;
8651 code = GE;
8652 }
230d793d
RS
8653 break;
8654
4803a34a
RK
8655 case GEU:
8656 /* >= C is equivalent to < (C - 1). */
8657 if (const_op > 1)
8658 {
8659 const_op -= 1;
5f4f0e22 8660 op1 = GEN_INT (const_op);
4803a34a
RK
8661 code = GTU;
8662 /* ... fall through ... */
8663 }
d0ab8cd3
RK
8664
8665 /* (unsigned) >= 0x80000000 is equivalent to < 0. */
8666 else if (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1))
8667 {
8668 const_op = 0, op1 = const0_rtx;
8669 code = LT;
8670 }
4803a34a
RK
8671 else
8672 break;
8673
230d793d
RS
8674 case GTU:
8675 /* unsigned > 0 is equivalent to != 0 */
8676 if (const_op == 0)
8677 code = NE;
d0ab8cd3
RK
8678
8679 /* (unsigned) > 0x7fffffff is equivalent to < 0. */
8680 else if (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1)
8681 {
8682 const_op = 0, op1 = const0_rtx;
8683 code = LT;
8684 }
230d793d
RS
8685 break;
8686 }
8687
8688 /* Compute some predicates to simplify code below. */
8689
8690 equality_comparison_p = (code == EQ || code == NE);
8691 sign_bit_comparison_p = ((code == LT || code == GE) && const_op == 0);
8692 unsigned_comparison_p = (code == LTU || code == LEU || code == GTU
8693 || code == LEU);
8694
6139ff20
RK
8695 /* If this is a sign bit comparison and we can do arithmetic in
8696 MODE, say that we will only be needing the sign bit of OP0. */
8697 if (sign_bit_comparison_p
8698 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
8699 op0 = force_to_mode (op0, mode,
8700 ((HOST_WIDE_INT) 1
8701 << (GET_MODE_BITSIZE (mode) - 1)),
e3d616e3 8702 NULL_RTX, 0);
6139ff20 8703
230d793d
RS
8704 /* Now try cases based on the opcode of OP0. If none of the cases
8705 does a "continue", we exit this loop immediately after the
8706 switch. */
8707
8708 switch (GET_CODE (op0))
8709 {
8710 case ZERO_EXTRACT:
8711 /* If we are extracting a single bit from a variable position in
8712 a constant that has only a single bit set and are comparing it
8713 with zero, we can convert this into an equality comparison
8714 between the position and the location of the single bit. We can't
8715 do this if bit endian and we don't have an extzv since we then
8716 can't know what mode to use for the endianness adjustment. */
8717
8718#if ! BITS_BIG_ENDIAN || defined (HAVE_extzv)
8719 if (GET_CODE (XEXP (op0, 0)) == CONST_INT
8720 && XEXP (op0, 1) == const1_rtx
8721 && equality_comparison_p && const_op == 0
8722 && (i = exact_log2 (INTVAL (XEXP (op0, 0)))) >= 0)
8723 {
8724#if BITS_BIG_ENDIAN
8725 i = (GET_MODE_BITSIZE
8726 (insn_operand_mode[(int) CODE_FOR_extzv][1]) - 1 - i);
8727#endif
8728
8729 op0 = XEXP (op0, 2);
5f4f0e22 8730 op1 = GEN_INT (i);
230d793d
RS
8731 const_op = i;
8732
8733 /* Result is nonzero iff shift count is equal to I. */
8734 code = reverse_condition (code);
8735 continue;
8736 }
8737#endif
8738
8739 /* ... fall through ... */
8740
8741 case SIGN_EXTRACT:
8742 tem = expand_compound_operation (op0);
8743 if (tem != op0)
8744 {
8745 op0 = tem;
8746 continue;
8747 }
8748 break;
8749
8750 case NOT:
8751 /* If testing for equality, we can take the NOT of the constant. */
8752 if (equality_comparison_p
8753 && (tem = simplify_unary_operation (NOT, mode, op1, mode)) != 0)
8754 {
8755 op0 = XEXP (op0, 0);
8756 op1 = tem;
8757 continue;
8758 }
8759
8760 /* If just looking at the sign bit, reverse the sense of the
8761 comparison. */
8762 if (sign_bit_comparison_p)
8763 {
8764 op0 = XEXP (op0, 0);
8765 code = (code == GE ? LT : GE);
8766 continue;
8767 }
8768 break;
8769
8770 case NEG:
8771 /* If testing for equality, we can take the NEG of the constant. */
8772 if (equality_comparison_p
8773 && (tem = simplify_unary_operation (NEG, mode, op1, mode)) != 0)
8774 {
8775 op0 = XEXP (op0, 0);
8776 op1 = tem;
8777 continue;
8778 }
8779
8780 /* The remaining cases only apply to comparisons with zero. */
8781 if (const_op != 0)
8782 break;
8783
8784 /* When X is ABS or is known positive,
8785 (neg X) is < 0 if and only if X != 0. */
8786
8787 if (sign_bit_comparison_p
8788 && (GET_CODE (XEXP (op0, 0)) == ABS
5f4f0e22 8789 || (mode_width <= HOST_BITS_PER_WIDE_INT
951553af 8790 && (nonzero_bits (XEXP (op0, 0), mode)
5f4f0e22 8791 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)))
230d793d
RS
8792 {
8793 op0 = XEXP (op0, 0);
8794 code = (code == LT ? NE : EQ);
8795 continue;
8796 }
8797
3bed8141
RK
8798 /* If we have NEG of something whose two high-order bits are the
8799 same, we know that "(-a) < 0" is equivalent to "a > 0". */
8800 if (num_sign_bit_copies (op0, mode) >= 2)
230d793d
RS
8801 {
8802 op0 = XEXP (op0, 0);
8803 code = swap_condition (code);
8804 continue;
8805 }
8806 break;
8807
8808 case ROTATE:
8809 /* If we are testing equality and our count is a constant, we
8810 can perform the inverse operation on our RHS. */
8811 if (equality_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
8812 && (tem = simplify_binary_operation (ROTATERT, mode,
8813 op1, XEXP (op0, 1))) != 0)
8814 {
8815 op0 = XEXP (op0, 0);
8816 op1 = tem;
8817 continue;
8818 }
8819
8820 /* If we are doing a < 0 or >= 0 comparison, it means we are testing
8821 a particular bit. Convert it to an AND of a constant of that
8822 bit. This will be converted into a ZERO_EXTRACT. */
8823 if (const_op == 0 && sign_bit_comparison_p
8824 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5f4f0e22 8825 && mode_width <= HOST_BITS_PER_WIDE_INT)
230d793d 8826 {
5f4f0e22
CH
8827 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
8828 ((HOST_WIDE_INT) 1
8829 << (mode_width - 1
8830 - INTVAL (XEXP (op0, 1)))));
230d793d
RS
8831 code = (code == LT ? NE : EQ);
8832 continue;
8833 }
8834
8835 /* ... fall through ... */
8836
8837 case ABS:
8838 /* ABS is ignorable inside an equality comparison with zero. */
8839 if (const_op == 0 && equality_comparison_p)
8840 {
8841 op0 = XEXP (op0, 0);
8842 continue;
8843 }
8844 break;
8845
8846
8847 case SIGN_EXTEND:
8848 /* Can simplify (compare (zero/sign_extend FOO) CONST)
8849 to (compare FOO CONST) if CONST fits in FOO's mode and we
8850 are either testing inequality or have an unsigned comparison
8851 with ZERO_EXTEND or a signed comparison with SIGN_EXTEND. */
8852 if (! unsigned_comparison_p
8853 && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
5f4f0e22
CH
8854 <= HOST_BITS_PER_WIDE_INT)
8855 && ((unsigned HOST_WIDE_INT) const_op
8856 < (((HOST_WIDE_INT) 1
8857 << (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0))) - 1)))))
230d793d
RS
8858 {
8859 op0 = XEXP (op0, 0);
8860 continue;
8861 }
8862 break;
8863
8864 case SUBREG:
a687e897
RK
8865 /* Check for the case where we are comparing A - C1 with C2,
8866 both constants are smaller than 1/2 the maxium positive
8867 value in MODE, and the comparison is equality or unsigned.
8868 In that case, if A is either zero-extended to MODE or has
8869 sufficient sign bits so that the high-order bit in MODE
8870 is a copy of the sign in the inner mode, we can prove that it is
8871 safe to do the operation in the wider mode. This simplifies
8872 many range checks. */
8873
8874 if (mode_width <= HOST_BITS_PER_WIDE_INT
8875 && subreg_lowpart_p (op0)
8876 && GET_CODE (SUBREG_REG (op0)) == PLUS
8877 && GET_CODE (XEXP (SUBREG_REG (op0), 1)) == CONST_INT
8878 && INTVAL (XEXP (SUBREG_REG (op0), 1)) < 0
8879 && (- INTVAL (XEXP (SUBREG_REG (op0), 1))
8880 < GET_MODE_MASK (mode) / 2)
adb7a1cb 8881 && (unsigned HOST_WIDE_INT) const_op < GET_MODE_MASK (mode) / 2
951553af
RK
8882 && (0 == (nonzero_bits (XEXP (SUBREG_REG (op0), 0),
8883 GET_MODE (SUBREG_REG (op0)))
a687e897
RK
8884 & ~ GET_MODE_MASK (mode))
8885 || (num_sign_bit_copies (XEXP (SUBREG_REG (op0), 0),
8886 GET_MODE (SUBREG_REG (op0)))
8887 > (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
8888 - GET_MODE_BITSIZE (mode)))))
8889 {
8890 op0 = SUBREG_REG (op0);
8891 continue;
8892 }
8893
fe0cf571
RK
8894 /* If the inner mode is narrower and we are extracting the low part,
8895 we can treat the SUBREG as if it were a ZERO_EXTEND. */
8896 if (subreg_lowpart_p (op0)
89f1c7f2
RS
8897 && GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) < mode_width)
8898 /* Fall through */ ;
8899 else
230d793d
RS
8900 break;
8901
8902 /* ... fall through ... */
8903
8904 case ZERO_EXTEND:
8905 if ((unsigned_comparison_p || equality_comparison_p)
8906 && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
5f4f0e22
CH
8907 <= HOST_BITS_PER_WIDE_INT)
8908 && ((unsigned HOST_WIDE_INT) const_op
230d793d
RS
8909 < GET_MODE_MASK (GET_MODE (XEXP (op0, 0)))))
8910 {
8911 op0 = XEXP (op0, 0);
8912 continue;
8913 }
8914 break;
8915
8916 case PLUS:
20fdd649 8917 /* (eq (plus X A) B) -> (eq X (minus B A)). We can only do
5089e22e 8918 this for equality comparisons due to pathological cases involving
230d793d 8919 overflows. */
20fdd649
RK
8920 if (equality_comparison_p
8921 && 0 != (tem = simplify_binary_operation (MINUS, mode,
8922 op1, XEXP (op0, 1))))
230d793d
RS
8923 {
8924 op0 = XEXP (op0, 0);
8925 op1 = tem;
8926 continue;
8927 }
8928
8929 /* (plus (abs X) (const_int -1)) is < 0 if and only if X == 0. */
8930 if (const_op == 0 && XEXP (op0, 1) == constm1_rtx
8931 && GET_CODE (XEXP (op0, 0)) == ABS && sign_bit_comparison_p)
8932 {
8933 op0 = XEXP (XEXP (op0, 0), 0);
8934 code = (code == LT ? EQ : NE);
8935 continue;
8936 }
8937 break;
8938
8939 case MINUS:
20fdd649
RK
8940 /* (eq (minus A B) C) -> (eq A (plus B C)) or
8941 (eq B (minus A C)), whichever simplifies. We can only do
8942 this for equality comparisons due to pathological cases involving
8943 overflows. */
8944 if (equality_comparison_p
8945 && 0 != (tem = simplify_binary_operation (PLUS, mode,
8946 XEXP (op0, 1), op1)))
8947 {
8948 op0 = XEXP (op0, 0);
8949 op1 = tem;
8950 continue;
8951 }
8952
8953 if (equality_comparison_p
8954 && 0 != (tem = simplify_binary_operation (MINUS, mode,
8955 XEXP (op0, 0), op1)))
8956 {
8957 op0 = XEXP (op0, 1);
8958 op1 = tem;
8959 continue;
8960 }
8961
230d793d
RS
8962 /* The sign bit of (minus (ashiftrt X C) X), where C is the number
8963 of bits in X minus 1, is one iff X > 0. */
8964 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == ASHIFTRT
8965 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
8966 && INTVAL (XEXP (XEXP (op0, 0), 1)) == mode_width - 1
8967 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
8968 {
8969 op0 = XEXP (op0, 1);
8970 code = (code == GE ? LE : GT);
8971 continue;
8972 }
8973 break;
8974
8975 case XOR:
8976 /* (eq (xor A B) C) -> (eq A (xor B C)). This is a simplification
8977 if C is zero or B is a constant. */
8978 if (equality_comparison_p
8979 && 0 != (tem = simplify_binary_operation (XOR, mode,
8980 XEXP (op0, 1), op1)))
8981 {
8982 op0 = XEXP (op0, 0);
8983 op1 = tem;
8984 continue;
8985 }
8986 break;
8987
8988 case EQ: case NE:
8989 case LT: case LTU: case LE: case LEU:
8990 case GT: case GTU: case GE: case GEU:
8991 /* We can't do anything if OP0 is a condition code value, rather
8992 than an actual data value. */
8993 if (const_op != 0
8994#ifdef HAVE_cc0
8995 || XEXP (op0, 0) == cc0_rtx
8996#endif
8997 || GET_MODE_CLASS (GET_MODE (XEXP (op0, 0))) == MODE_CC)
8998 break;
8999
9000 /* Get the two operands being compared. */
9001 if (GET_CODE (XEXP (op0, 0)) == COMPARE)
9002 tem = XEXP (XEXP (op0, 0), 0), tem1 = XEXP (XEXP (op0, 0), 1);
9003 else
9004 tem = XEXP (op0, 0), tem1 = XEXP (op0, 1);
9005
9006 /* Check for the cases where we simply want the result of the
9007 earlier test or the opposite of that result. */
9008 if (code == NE
9009 || (code == EQ && reversible_comparison_p (op0))
5f4f0e22 9010 || (GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
3f508eca 9011 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
230d793d 9012 && (STORE_FLAG_VALUE
5f4f0e22
CH
9013 & (((HOST_WIDE_INT) 1
9014 << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
230d793d
RS
9015 && (code == LT
9016 || (code == GE && reversible_comparison_p (op0)))))
9017 {
9018 code = (code == LT || code == NE
9019 ? GET_CODE (op0) : reverse_condition (GET_CODE (op0)));
9020 op0 = tem, op1 = tem1;
9021 continue;
9022 }
9023 break;
9024
9025 case IOR:
9026 /* The sign bit of (ior (plus X (const_int -1)) X) is non-zero
9027 iff X <= 0. */
9028 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == PLUS
9029 && XEXP (XEXP (op0, 0), 1) == constm1_rtx
9030 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
9031 {
9032 op0 = XEXP (op0, 1);
9033 code = (code == GE ? GT : LE);
9034 continue;
9035 }
9036 break;
9037
9038 case AND:
9039 /* Convert (and (xshift 1 X) Y) to (and (lshiftrt Y X) 1). This
9040 will be converted to a ZERO_EXTRACT later. */
9041 if (const_op == 0 && equality_comparison_p
9042 && (GET_CODE (XEXP (op0, 0)) == ASHIFT
9043 || GET_CODE (XEXP (op0, 0)) == LSHIFT)
9044 && XEXP (XEXP (op0, 0), 0) == const1_rtx)
9045 {
9046 op0 = simplify_and_const_int
9047 (op0, mode, gen_rtx_combine (LSHIFTRT, mode,
9048 XEXP (op0, 1),
9049 XEXP (XEXP (op0, 0), 1)),
5f4f0e22 9050 (HOST_WIDE_INT) 1);
230d793d
RS
9051 continue;
9052 }
9053
9054 /* If we are comparing (and (lshiftrt X C1) C2) for equality with
9055 zero and X is a comparison and C1 and C2 describe only bits set
9056 in STORE_FLAG_VALUE, we can compare with X. */
9057 if (const_op == 0 && equality_comparison_p
5f4f0e22 9058 && mode_width <= HOST_BITS_PER_WIDE_INT
230d793d
RS
9059 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9060 && GET_CODE (XEXP (op0, 0)) == LSHIFTRT
9061 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
9062 && INTVAL (XEXP (XEXP (op0, 0), 1)) >= 0
5f4f0e22 9063 && INTVAL (XEXP (XEXP (op0, 0), 1)) < HOST_BITS_PER_WIDE_INT)
230d793d
RS
9064 {
9065 mask = ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
9066 << INTVAL (XEXP (XEXP (op0, 0), 1)));
9067 if ((~ STORE_FLAG_VALUE & mask) == 0
9068 && (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (op0, 0), 0))) == '<'
9069 || ((tem = get_last_value (XEXP (XEXP (op0, 0), 0))) != 0
9070 && GET_RTX_CLASS (GET_CODE (tem)) == '<')))
9071 {
9072 op0 = XEXP (XEXP (op0, 0), 0);
9073 continue;
9074 }
9075 }
9076
9077 /* If we are doing an equality comparison of an AND of a bit equal
9078 to the sign bit, replace this with a LT or GE comparison of
9079 the underlying value. */
9080 if (equality_comparison_p
9081 && const_op == 0
9082 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5f4f0e22 9083 && mode_width <= HOST_BITS_PER_WIDE_INT
230d793d 9084 && ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
5f4f0e22 9085 == (HOST_WIDE_INT) 1 << (mode_width - 1)))
230d793d
RS
9086 {
9087 op0 = XEXP (op0, 0);
9088 code = (code == EQ ? GE : LT);
9089 continue;
9090 }
9091
9092 /* If this AND operation is really a ZERO_EXTEND from a narrower
9093 mode, the constant fits within that mode, and this is either an
9094 equality or unsigned comparison, try to do this comparison in
9095 the narrower mode. */
9096 if ((equality_comparison_p || unsigned_comparison_p)
9097 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9098 && (i = exact_log2 ((INTVAL (XEXP (op0, 1))
9099 & GET_MODE_MASK (mode))
9100 + 1)) >= 0
9101 && const_op >> i == 0
9102 && (tmode = mode_for_size (i, MODE_INT, 1)) != BLKmode)
9103 {
9104 op0 = gen_lowpart_for_combine (tmode, XEXP (op0, 0));
9105 continue;
9106 }
9107 break;
9108
9109 case ASHIFT:
9110 case LSHIFT:
9111 /* If we have (compare (xshift FOO N) (const_int C)) and
9112 the high order N bits of FOO (N+1 if an inequality comparison)
951553af 9113 are known to be zero, we can do this by comparing FOO with C
230d793d
RS
9114 shifted right N bits so long as the low-order N bits of C are
9115 zero. */
9116 if (GET_CODE (XEXP (op0, 1)) == CONST_INT
9117 && INTVAL (XEXP (op0, 1)) >= 0
9118 && ((INTVAL (XEXP (op0, 1)) + ! equality_comparison_p)
5f4f0e22
CH
9119 < HOST_BITS_PER_WIDE_INT)
9120 && ((const_op
34785d05 9121 & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0)
5f4f0e22 9122 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 9123 && (nonzero_bits (XEXP (op0, 0), mode)
230d793d
RS
9124 & ~ (mask >> (INTVAL (XEXP (op0, 1))
9125 + ! equality_comparison_p))) == 0)
9126 {
9127 const_op >>= INTVAL (XEXP (op0, 1));
5f4f0e22 9128 op1 = GEN_INT (const_op);
230d793d
RS
9129 op0 = XEXP (op0, 0);
9130 continue;
9131 }
9132
dfbe1b2f 9133 /* If we are doing a sign bit comparison, it means we are testing
230d793d 9134 a particular bit. Convert it to the appropriate AND. */
dfbe1b2f 9135 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
5f4f0e22 9136 && mode_width <= HOST_BITS_PER_WIDE_INT)
230d793d 9137 {
5f4f0e22
CH
9138 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
9139 ((HOST_WIDE_INT) 1
9140 << (mode_width - 1
9141 - INTVAL (XEXP (op0, 1)))));
230d793d
RS
9142 code = (code == LT ? NE : EQ);
9143 continue;
9144 }
dfbe1b2f
RK
9145
9146 /* If this an equality comparison with zero and we are shifting
9147 the low bit to the sign bit, we can convert this to an AND of the
9148 low-order bit. */
9149 if (const_op == 0 && equality_comparison_p
9150 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9151 && INTVAL (XEXP (op0, 1)) == mode_width - 1)
9152 {
5f4f0e22
CH
9153 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
9154 (HOST_WIDE_INT) 1);
dfbe1b2f
RK
9155 continue;
9156 }
230d793d
RS
9157 break;
9158
9159 case ASHIFTRT:
d0ab8cd3
RK
9160 /* If this is an equality comparison with zero, we can do this
9161 as a logical shift, which might be much simpler. */
9162 if (equality_comparison_p && const_op == 0
9163 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
9164 {
9165 op0 = simplify_shift_const (NULL_RTX, LSHIFTRT, mode,
9166 XEXP (op0, 0),
9167 INTVAL (XEXP (op0, 1)));
9168 continue;
9169 }
9170
230d793d
RS
9171 /* If OP0 is a sign extension and CODE is not an unsigned comparison,
9172 do the comparison in a narrower mode. */
9173 if (! unsigned_comparison_p
9174 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9175 && GET_CODE (XEXP (op0, 0)) == ASHIFT
9176 && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1)
9177 && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)),
22331794 9178 MODE_INT, 1)) != BLKmode
5f4f0e22
CH
9179 && ((unsigned HOST_WIDE_INT) const_op <= GET_MODE_MASK (tmode)
9180 || ((unsigned HOST_WIDE_INT) - const_op
9181 <= GET_MODE_MASK (tmode))))
230d793d
RS
9182 {
9183 op0 = gen_lowpart_for_combine (tmode, XEXP (XEXP (op0, 0), 0));
9184 continue;
9185 }
9186
9187 /* ... fall through ... */
9188 case LSHIFTRT:
9189 /* If we have (compare (xshiftrt FOO N) (const_int C)) and
951553af 9190 the low order N bits of FOO are known to be zero, we can do this
230d793d
RS
9191 by comparing FOO with C shifted left N bits so long as no
9192 overflow occurs. */
9193 if (GET_CODE (XEXP (op0, 1)) == CONST_INT
9194 && INTVAL (XEXP (op0, 1)) >= 0
5f4f0e22
CH
9195 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
9196 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 9197 && (nonzero_bits (XEXP (op0, 0), mode)
5f4f0e22 9198 & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0
230d793d
RS
9199 && (const_op == 0
9200 || (floor_log2 (const_op) + INTVAL (XEXP (op0, 1))
9201 < mode_width)))
9202 {
9203 const_op <<= INTVAL (XEXP (op0, 1));
5f4f0e22 9204 op1 = GEN_INT (const_op);
230d793d
RS
9205 op0 = XEXP (op0, 0);
9206 continue;
9207 }
9208
9209 /* If we are using this shift to extract just the sign bit, we
9210 can replace this with an LT or GE comparison. */
9211 if (const_op == 0
9212 && (equality_comparison_p || sign_bit_comparison_p)
9213 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9214 && INTVAL (XEXP (op0, 1)) == mode_width - 1)
9215 {
9216 op0 = XEXP (op0, 0);
9217 code = (code == NE || code == GT ? LT : GE);
9218 continue;
9219 }
9220 break;
9221 }
9222
9223 break;
9224 }
9225
9226 /* Now make any compound operations involved in this comparison. Then,
9227 check for an outmost SUBREG on OP0 that isn't doing anything or is
9228 paradoxical. The latter case can only occur when it is known that the
9229 "extra" bits will be zero. Therefore, it is safe to remove the SUBREG.
9230 We can never remove a SUBREG for a non-equality comparison because the
9231 sign bit is in a different place in the underlying object. */
9232
9233 op0 = make_compound_operation (op0, op1 == const0_rtx ? COMPARE : SET);
9234 op1 = make_compound_operation (op1, SET);
9235
9236 if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
9237 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
9238 && (code == NE || code == EQ)
9239 && ((GET_MODE_SIZE (GET_MODE (op0))
9240 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0))))))
9241 {
9242 op0 = SUBREG_REG (op0);
9243 op1 = gen_lowpart_for_combine (GET_MODE (op0), op1);
9244 }
9245
9246 else if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
9247 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
9248 && (code == NE || code == EQ)
ac49a949
RS
9249 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
9250 <= HOST_BITS_PER_WIDE_INT)
951553af 9251 && (nonzero_bits (SUBREG_REG (op0), GET_MODE (SUBREG_REG (op0)))
230d793d
RS
9252 & ~ GET_MODE_MASK (GET_MODE (op0))) == 0
9253 && (tem = gen_lowpart_for_combine (GET_MODE (SUBREG_REG (op0)),
9254 op1),
951553af 9255 (nonzero_bits (tem, GET_MODE (SUBREG_REG (op0)))
230d793d
RS
9256 & ~ GET_MODE_MASK (GET_MODE (op0))) == 0))
9257 op0 = SUBREG_REG (op0), op1 = tem;
9258
9259 /* We now do the opposite procedure: Some machines don't have compare
9260 insns in all modes. If OP0's mode is an integer mode smaller than a
9261 word and we can't do a compare in that mode, see if there is a larger
a687e897
RK
9262 mode for which we can do the compare. There are a number of cases in
9263 which we can use the wider mode. */
230d793d
RS
9264
9265 mode = GET_MODE (op0);
9266 if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT
9267 && GET_MODE_SIZE (mode) < UNITS_PER_WORD
9268 && cmp_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
9269 for (tmode = GET_MODE_WIDER_MODE (mode);
5f4f0e22
CH
9270 (tmode != VOIDmode
9271 && GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT);
230d793d 9272 tmode = GET_MODE_WIDER_MODE (tmode))
a687e897 9273 if (cmp_optab->handlers[(int) tmode].insn_code != CODE_FOR_nothing)
230d793d 9274 {
951553af 9275 /* If the only nonzero bits in OP0 and OP1 are those in the
a687e897
RK
9276 narrower mode and this is an equality or unsigned comparison,
9277 we can use the wider mode. Similarly for sign-extended
9278 values and equality or signed comparisons. */
9279 if (((code == EQ || code == NE
9280 || code == GEU || code == GTU || code == LEU || code == LTU)
951553af
RK
9281 && (nonzero_bits (op0, tmode) & ~ GET_MODE_MASK (mode)) == 0
9282 && (nonzero_bits (op1, tmode) & ~ GET_MODE_MASK (mode)) == 0)
a687e897
RK
9283 || ((code == EQ || code == NE
9284 || code == GE || code == GT || code == LE || code == LT)
9285 && (num_sign_bit_copies (op0, tmode)
58744483 9286 > GET_MODE_BITSIZE (tmode) - GET_MODE_BITSIZE (mode))
a687e897 9287 && (num_sign_bit_copies (op1, tmode)
58744483 9288 > GET_MODE_BITSIZE (tmode) - GET_MODE_BITSIZE (mode))))
a687e897
RK
9289 {
9290 op0 = gen_lowpart_for_combine (tmode, op0);
9291 op1 = gen_lowpart_for_combine (tmode, op1);
9292 break;
9293 }
230d793d 9294
a687e897
RK
9295 /* If this is a test for negative, we can make an explicit
9296 test of the sign bit. */
9297
9298 if (op1 == const0_rtx && (code == LT || code == GE)
9299 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
230d793d 9300 {
a687e897
RK
9301 op0 = gen_binary (AND, tmode,
9302 gen_lowpart_for_combine (tmode, op0),
5f4f0e22
CH
9303 GEN_INT ((HOST_WIDE_INT) 1
9304 << (GET_MODE_BITSIZE (mode) - 1)));
230d793d 9305 code = (code == LT) ? NE : EQ;
a687e897 9306 break;
230d793d 9307 }
230d793d
RS
9308 }
9309
9310 *pop0 = op0;
9311 *pop1 = op1;
9312
9313 return code;
9314}
9315\f
9316/* Return 1 if we know that X, a comparison operation, is not operating
9317 on a floating-point value or is EQ or NE, meaning that we can safely
9318 reverse it. */
9319
9320static int
9321reversible_comparison_p (x)
9322 rtx x;
9323{
9324 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
7e2a0d8e 9325 || flag_fast_math
230d793d
RS
9326 || GET_CODE (x) == NE || GET_CODE (x) == EQ)
9327 return 1;
9328
9329 switch (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))))
9330 {
9331 case MODE_INT:
3ad2180a
RK
9332 case MODE_PARTIAL_INT:
9333 case MODE_COMPLEX_INT:
230d793d
RS
9334 return 1;
9335
9336 case MODE_CC:
9337 x = get_last_value (XEXP (x, 0));
9338 return (x && GET_CODE (x) == COMPARE
3ad2180a 9339 && ! FLOAT_MODE_P (GET_MODE (XEXP (x, 0))));
230d793d
RS
9340 }
9341
9342 return 0;
9343}
9344\f
9345/* Utility function for following routine. Called when X is part of a value
9346 being stored into reg_last_set_value. Sets reg_last_set_table_tick
9347 for each register mentioned. Similar to mention_regs in cse.c */
9348
9349static void
9350update_table_tick (x)
9351 rtx x;
9352{
9353 register enum rtx_code code = GET_CODE (x);
9354 register char *fmt = GET_RTX_FORMAT (code);
9355 register int i;
9356
9357 if (code == REG)
9358 {
9359 int regno = REGNO (x);
9360 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
9361 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
9362
9363 for (i = regno; i < endregno; i++)
9364 reg_last_set_table_tick[i] = label_tick;
9365
9366 return;
9367 }
9368
9369 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
9370 /* Note that we can't have an "E" in values stored; see
9371 get_last_value_validate. */
9372 if (fmt[i] == 'e')
9373 update_table_tick (XEXP (x, i));
9374}
9375
9376/* Record that REG is set to VALUE in insn INSN. If VALUE is zero, we
9377 are saying that the register is clobbered and we no longer know its
7988fd36
RK
9378 value. If INSN is zero, don't update reg_last_set; this is only permitted
9379 with VALUE also zero and is used to invalidate the register. */
230d793d
RS
9380
9381static void
9382record_value_for_reg (reg, insn, value)
9383 rtx reg;
9384 rtx insn;
9385 rtx value;
9386{
9387 int regno = REGNO (reg);
9388 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
9389 ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1);
9390 int i;
9391
9392 /* If VALUE contains REG and we have a previous value for REG, substitute
9393 the previous value. */
9394 if (value && insn && reg_overlap_mentioned_p (reg, value))
9395 {
9396 rtx tem;
9397
9398 /* Set things up so get_last_value is allowed to see anything set up to
9399 our insn. */
9400 subst_low_cuid = INSN_CUID (insn);
9401 tem = get_last_value (reg);
9402
9403 if (tem)
9404 value = replace_rtx (copy_rtx (value), reg, tem);
9405 }
9406
9407 /* For each register modified, show we don't know its value, that
ef026f91
RS
9408 we don't know about its bitwise content, that its value has been
9409 updated, and that we don't know the location of the death of the
9410 register. */
230d793d
RS
9411 for (i = regno; i < endregno; i ++)
9412 {
9413 if (insn)
9414 reg_last_set[i] = insn;
9415 reg_last_set_value[i] = 0;
ef026f91
RS
9416 reg_last_set_mode[i] = 0;
9417 reg_last_set_nonzero_bits[i] = 0;
9418 reg_last_set_sign_bit_copies[i] = 0;
230d793d
RS
9419 reg_last_death[i] = 0;
9420 }
9421
9422 /* Mark registers that are being referenced in this value. */
9423 if (value)
9424 update_table_tick (value);
9425
9426 /* Now update the status of each register being set.
9427 If someone is using this register in this block, set this register
9428 to invalid since we will get confused between the two lives in this
9429 basic block. This makes using this register always invalid. In cse, we
9430 scan the table to invalidate all entries using this register, but this
9431 is too much work for us. */
9432
9433 for (i = regno; i < endregno; i++)
9434 {
9435 reg_last_set_label[i] = label_tick;
9436 if (value && reg_last_set_table_tick[i] == label_tick)
9437 reg_last_set_invalid[i] = 1;
9438 else
9439 reg_last_set_invalid[i] = 0;
9440 }
9441
9442 /* The value being assigned might refer to X (like in "x++;"). In that
9443 case, we must replace it with (clobber (const_int 0)) to prevent
9444 infinite loops. */
9445 if (value && ! get_last_value_validate (&value,
9446 reg_last_set_label[regno], 0))
9447 {
9448 value = copy_rtx (value);
9449 if (! get_last_value_validate (&value, reg_last_set_label[regno], 1))
9450 value = 0;
9451 }
9452
55310dad
RK
9453 /* For the main register being modified, update the value, the mode, the
9454 nonzero bits, and the number of sign bit copies. */
9455
230d793d
RS
9456 reg_last_set_value[regno] = value;
9457
55310dad
RK
9458 if (value)
9459 {
2afabb48 9460 subst_low_cuid = INSN_CUID (insn);
55310dad
RK
9461 reg_last_set_mode[regno] = GET_MODE (reg);
9462 reg_last_set_nonzero_bits[regno] = nonzero_bits (value, GET_MODE (reg));
9463 reg_last_set_sign_bit_copies[regno]
9464 = num_sign_bit_copies (value, GET_MODE (reg));
9465 }
230d793d
RS
9466}
9467
9468/* Used for communication between the following two routines. */
9469static rtx record_dead_insn;
9470
9471/* Called via note_stores from record_dead_and_set_regs to handle one
9472 SET or CLOBBER in an insn. */
9473
9474static void
9475record_dead_and_set_regs_1 (dest, setter)
9476 rtx dest, setter;
9477{
9478 if (GET_CODE (dest) == REG)
9479 {
9480 /* If we are setting the whole register, we know its value. Otherwise
9481 show that we don't know the value. We can handle SUBREG in
9482 some cases. */
9483 if (GET_CODE (setter) == SET && dest == SET_DEST (setter))
9484 record_value_for_reg (dest, record_dead_insn, SET_SRC (setter));
9485 else if (GET_CODE (setter) == SET
9486 && GET_CODE (SET_DEST (setter)) == SUBREG
9487 && SUBREG_REG (SET_DEST (setter)) == dest
9488 && subreg_lowpart_p (SET_DEST (setter)))
d0ab8cd3
RK
9489 record_value_for_reg (dest, record_dead_insn,
9490 gen_lowpart_for_combine (GET_MODE (dest),
9491 SET_SRC (setter)));
230d793d 9492 else
5f4f0e22 9493 record_value_for_reg (dest, record_dead_insn, NULL_RTX);
230d793d
RS
9494 }
9495 else if (GET_CODE (dest) == MEM
9496 /* Ignore pushes, they clobber nothing. */
9497 && ! push_operand (dest, GET_MODE (dest)))
9498 mem_last_set = INSN_CUID (record_dead_insn);
9499}
9500
9501/* Update the records of when each REG was most recently set or killed
9502 for the things done by INSN. This is the last thing done in processing
9503 INSN in the combiner loop.
9504
ef026f91
RS
9505 We update reg_last_set, reg_last_set_value, reg_last_set_mode,
9506 reg_last_set_nonzero_bits, reg_last_set_sign_bit_copies, reg_last_death,
9507 and also the similar information mem_last_set (which insn most recently
9508 modified memory) and last_call_cuid (which insn was the most recent
9509 subroutine call). */
230d793d
RS
9510
9511static void
9512record_dead_and_set_regs (insn)
9513 rtx insn;
9514{
9515 register rtx link;
55310dad
RK
9516 int i;
9517
230d793d
RS
9518 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
9519 {
dbc131f3
RK
9520 if (REG_NOTE_KIND (link) == REG_DEAD
9521 && GET_CODE (XEXP (link, 0)) == REG)
9522 {
9523 int regno = REGNO (XEXP (link, 0));
9524 int endregno
9525 = regno + (regno < FIRST_PSEUDO_REGISTER
9526 ? HARD_REGNO_NREGS (regno, GET_MODE (XEXP (link, 0)))
9527 : 1);
dbc131f3
RK
9528
9529 for (i = regno; i < endregno; i++)
9530 reg_last_death[i] = insn;
9531 }
230d793d 9532 else if (REG_NOTE_KIND (link) == REG_INC)
5f4f0e22 9533 record_value_for_reg (XEXP (link, 0), insn, NULL_RTX);
230d793d
RS
9534 }
9535
9536 if (GET_CODE (insn) == CALL_INSN)
55310dad
RK
9537 {
9538 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
9539 if (call_used_regs[i])
9540 {
9541 reg_last_set_value[i] = 0;
ef026f91
RS
9542 reg_last_set_mode[i] = 0;
9543 reg_last_set_nonzero_bits[i] = 0;
9544 reg_last_set_sign_bit_copies[i] = 0;
55310dad
RK
9545 reg_last_death[i] = 0;
9546 }
9547
9548 last_call_cuid = mem_last_set = INSN_CUID (insn);
9549 }
230d793d
RS
9550
9551 record_dead_insn = insn;
9552 note_stores (PATTERN (insn), record_dead_and_set_regs_1);
9553}
9554\f
9555/* Utility routine for the following function. Verify that all the registers
9556 mentioned in *LOC are valid when *LOC was part of a value set when
9557 label_tick == TICK. Return 0 if some are not.
9558
9559 If REPLACE is non-zero, replace the invalid reference with
9560 (clobber (const_int 0)) and return 1. This replacement is useful because
9561 we often can get useful information about the form of a value (e.g., if
9562 it was produced by a shift that always produces -1 or 0) even though
9563 we don't know exactly what registers it was produced from. */
9564
9565static int
9566get_last_value_validate (loc, tick, replace)
9567 rtx *loc;
9568 int tick;
9569 int replace;
9570{
9571 rtx x = *loc;
9572 char *fmt = GET_RTX_FORMAT (GET_CODE (x));
9573 int len = GET_RTX_LENGTH (GET_CODE (x));
9574 int i;
9575
9576 if (GET_CODE (x) == REG)
9577 {
9578 int regno = REGNO (x);
9579 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
9580 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
9581 int j;
9582
9583 for (j = regno; j < endregno; j++)
9584 if (reg_last_set_invalid[j]
9585 /* If this is a pseudo-register that was only set once, it is
9586 always valid. */
9587 || (! (regno >= FIRST_PSEUDO_REGISTER && reg_n_sets[regno] == 1)
9588 && reg_last_set_label[j] > tick))
9589 {
9590 if (replace)
9591 *loc = gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
9592 return replace;
9593 }
9594
9595 return 1;
9596 }
9597
9598 for (i = 0; i < len; i++)
9599 if ((fmt[i] == 'e'
9600 && get_last_value_validate (&XEXP (x, i), tick, replace) == 0)
9601 /* Don't bother with these. They shouldn't occur anyway. */
9602 || fmt[i] == 'E')
9603 return 0;
9604
9605 /* If we haven't found a reason for it to be invalid, it is valid. */
9606 return 1;
9607}
9608
9609/* Get the last value assigned to X, if known. Some registers
9610 in the value may be replaced with (clobber (const_int 0)) if their value
9611 is known longer known reliably. */
9612
9613static rtx
9614get_last_value (x)
9615 rtx x;
9616{
9617 int regno;
9618 rtx value;
9619
9620 /* If this is a non-paradoxical SUBREG, get the value of its operand and
9621 then convert it to the desired mode. If this is a paradoxical SUBREG,
9622 we cannot predict what values the "extra" bits might have. */
9623 if (GET_CODE (x) == SUBREG
9624 && subreg_lowpart_p (x)
9625 && (GET_MODE_SIZE (GET_MODE (x))
9626 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
9627 && (value = get_last_value (SUBREG_REG (x))) != 0)
9628 return gen_lowpart_for_combine (GET_MODE (x), value);
9629
9630 if (GET_CODE (x) != REG)
9631 return 0;
9632
9633 regno = REGNO (x);
9634 value = reg_last_set_value[regno];
9635
d0ab8cd3 9636 /* If we don't have a value or if it isn't for this basic block, return 0. */
230d793d
RS
9637
9638 if (value == 0
9639 || (reg_n_sets[regno] != 1
55310dad 9640 && reg_last_set_label[regno] != label_tick))
230d793d
RS
9641 return 0;
9642
d0ab8cd3 9643 /* If the value was set in a later insn that the ones we are processing,
4090a6b3
RK
9644 we can't use it even if the register was only set once, but make a quick
9645 check to see if the previous insn set it to something. This is commonly
9646 the case when the same pseudo is used by repeated insns. */
d0ab8cd3 9647
4090a6b3 9648 if (INSN_CUID (reg_last_set[regno]) >= subst_low_cuid)
d0ab8cd3
RK
9649 {
9650 rtx insn, set;
9651
137e889e
RK
9652 /* If there is an insn that is supposed to be immediately
9653 in front of subst_insn, use it. */
9654 if (subst_prev_insn != 0)
9655 insn = subst_prev_insn;
9656 else
9657 for (insn = prev_nonnote_insn (subst_insn);
9658 insn && INSN_CUID (insn) >= subst_low_cuid;
9659 insn = prev_nonnote_insn (insn))
9660 ;
d0ab8cd3
RK
9661
9662 if (insn
9663 && (set = single_set (insn)) != 0
9664 && rtx_equal_p (SET_DEST (set), x))
9665 {
9666 value = SET_SRC (set);
9667
9668 /* Make sure that VALUE doesn't reference X. Replace any
9669 expliit references with a CLOBBER. If there are any remaining
9670 references (rare), don't use the value. */
9671
9672 if (reg_mentioned_p (x, value))
9673 value = replace_rtx (copy_rtx (value), x,
9674 gen_rtx (CLOBBER, GET_MODE (x), const0_rtx));
9675
9676 if (reg_overlap_mentioned_p (x, value))
9677 return 0;
9678 }
9679 else
9680 return 0;
9681 }
9682
9683 /* If the value has all its registers valid, return it. */
230d793d
RS
9684 if (get_last_value_validate (&value, reg_last_set_label[regno], 0))
9685 return value;
9686
9687 /* Otherwise, make a copy and replace any invalid register with
9688 (clobber (const_int 0)). If that fails for some reason, return 0. */
9689
9690 value = copy_rtx (value);
9691 if (get_last_value_validate (&value, reg_last_set_label[regno], 1))
9692 return value;
9693
9694 return 0;
9695}
9696\f
9697/* Return nonzero if expression X refers to a REG or to memory
9698 that is set in an instruction more recent than FROM_CUID. */
9699
9700static int
9701use_crosses_set_p (x, from_cuid)
9702 register rtx x;
9703 int from_cuid;
9704{
9705 register char *fmt;
9706 register int i;
9707 register enum rtx_code code = GET_CODE (x);
9708
9709 if (code == REG)
9710 {
9711 register int regno = REGNO (x);
e28f5732
RK
9712 int endreg = regno + (regno < FIRST_PSEUDO_REGISTER
9713 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
9714
230d793d
RS
9715#ifdef PUSH_ROUNDING
9716 /* Don't allow uses of the stack pointer to be moved,
9717 because we don't know whether the move crosses a push insn. */
9718 if (regno == STACK_POINTER_REGNUM)
9719 return 1;
9720#endif
e28f5732
RK
9721 for (;regno < endreg; regno++)
9722 if (reg_last_set[regno]
9723 && INSN_CUID (reg_last_set[regno]) > from_cuid)
9724 return 1;
9725 return 0;
230d793d
RS
9726 }
9727
9728 if (code == MEM && mem_last_set > from_cuid)
9729 return 1;
9730
9731 fmt = GET_RTX_FORMAT (code);
9732
9733 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
9734 {
9735 if (fmt[i] == 'E')
9736 {
9737 register int j;
9738 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
9739 if (use_crosses_set_p (XVECEXP (x, i, j), from_cuid))
9740 return 1;
9741 }
9742 else if (fmt[i] == 'e'
9743 && use_crosses_set_p (XEXP (x, i), from_cuid))
9744 return 1;
9745 }
9746 return 0;
9747}
9748\f
9749/* Define three variables used for communication between the following
9750 routines. */
9751
9752static int reg_dead_regno, reg_dead_endregno;
9753static int reg_dead_flag;
9754
9755/* Function called via note_stores from reg_dead_at_p.
9756
9757 If DEST is within [reg_dead_rengno, reg_dead_endregno), set
9758 reg_dead_flag to 1 if X is a CLOBBER and to -1 it is a SET. */
9759
9760static void
9761reg_dead_at_p_1 (dest, x)
9762 rtx dest;
9763 rtx x;
9764{
9765 int regno, endregno;
9766
9767 if (GET_CODE (dest) != REG)
9768 return;
9769
9770 regno = REGNO (dest);
9771 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
9772 ? HARD_REGNO_NREGS (regno, GET_MODE (dest)) : 1);
9773
9774 if (reg_dead_endregno > regno && reg_dead_regno < endregno)
9775 reg_dead_flag = (GET_CODE (x) == CLOBBER) ? 1 : -1;
9776}
9777
9778/* Return non-zero if REG is known to be dead at INSN.
9779
9780 We scan backwards from INSN. If we hit a REG_DEAD note or a CLOBBER
9781 referencing REG, it is dead. If we hit a SET referencing REG, it is
9782 live. Otherwise, see if it is live or dead at the start of the basic
9783 block we are in. */
9784
9785static int
9786reg_dead_at_p (reg, insn)
9787 rtx reg;
9788 rtx insn;
9789{
9790 int block, i;
9791
9792 /* Set variables for reg_dead_at_p_1. */
9793 reg_dead_regno = REGNO (reg);
9794 reg_dead_endregno = reg_dead_regno + (reg_dead_regno < FIRST_PSEUDO_REGISTER
9795 ? HARD_REGNO_NREGS (reg_dead_regno,
9796 GET_MODE (reg))
9797 : 1);
9798
9799 reg_dead_flag = 0;
9800
9801 /* Scan backwards until we find a REG_DEAD note, SET, CLOBBER, label, or
9802 beginning of function. */
9803 for (; insn && GET_CODE (insn) != CODE_LABEL;
9804 insn = prev_nonnote_insn (insn))
9805 {
9806 note_stores (PATTERN (insn), reg_dead_at_p_1);
9807 if (reg_dead_flag)
9808 return reg_dead_flag == 1 ? 1 : 0;
9809
9810 if (find_regno_note (insn, REG_DEAD, reg_dead_regno))
9811 return 1;
9812 }
9813
9814 /* Get the basic block number that we were in. */
9815 if (insn == 0)
9816 block = 0;
9817 else
9818 {
9819 for (block = 0; block < n_basic_blocks; block++)
9820 if (insn == basic_block_head[block])
9821 break;
9822
9823 if (block == n_basic_blocks)
9824 return 0;
9825 }
9826
9827 for (i = reg_dead_regno; i < reg_dead_endregno; i++)
5f4f0e22
CH
9828 if (basic_block_live_at_start[block][i / REGSET_ELT_BITS]
9829 & ((REGSET_ELT_TYPE) 1 << (i % REGSET_ELT_BITS)))
230d793d
RS
9830 return 0;
9831
9832 return 1;
9833}
9834\f
9835/* Remove register number REGNO from the dead registers list of INSN.
9836
9837 Return the note used to record the death, if there was one. */
9838
9839rtx
9840remove_death (regno, insn)
9841 int regno;
9842 rtx insn;
9843{
9844 register rtx note = find_regno_note (insn, REG_DEAD, regno);
9845
9846 if (note)
1a26b032
RK
9847 {
9848 reg_n_deaths[regno]--;
9849 remove_note (insn, note);
9850 }
230d793d
RS
9851
9852 return note;
9853}
9854
9855/* For each register (hardware or pseudo) used within expression X, if its
9856 death is in an instruction with cuid between FROM_CUID (inclusive) and
9857 TO_INSN (exclusive), put a REG_DEAD note for that register in the
9858 list headed by PNOTES.
9859
9860 This is done when X is being merged by combination into TO_INSN. These
9861 notes will then be distributed as needed. */
9862
9863static void
9864move_deaths (x, from_cuid, to_insn, pnotes)
9865 rtx x;
9866 int from_cuid;
9867 rtx to_insn;
9868 rtx *pnotes;
9869{
9870 register char *fmt;
9871 register int len, i;
9872 register enum rtx_code code = GET_CODE (x);
9873
9874 if (code == REG)
9875 {
9876 register int regno = REGNO (x);
9877 register rtx where_dead = reg_last_death[regno];
9878
9879 if (where_dead && INSN_CUID (where_dead) >= from_cuid
9880 && INSN_CUID (where_dead) < INSN_CUID (to_insn))
9881 {
dbc131f3 9882 rtx note = remove_death (regno, where_dead);
230d793d
RS
9883
9884 /* It is possible for the call above to return 0. This can occur
9885 when reg_last_death points to I2 or I1 that we combined with.
dbc131f3
RK
9886 In that case make a new note.
9887
9888 We must also check for the case where X is a hard register
9889 and NOTE is a death note for a range of hard registers
9890 including X. In that case, we must put REG_DEAD notes for
9891 the remaining registers in place of NOTE. */
9892
9893 if (note != 0 && regno < FIRST_PSEUDO_REGISTER
9894 && (GET_MODE_SIZE (GET_MODE (XEXP (note, 0)))
9895 != GET_MODE_SIZE (GET_MODE (x))))
9896 {
9897 int deadregno = REGNO (XEXP (note, 0));
9898 int deadend
9899 = (deadregno + HARD_REGNO_NREGS (deadregno,
9900 GET_MODE (XEXP (note, 0))));
9901 int ourend = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
9902 int i;
9903
9904 for (i = deadregno; i < deadend; i++)
9905 if (i < regno || i >= ourend)
9906 REG_NOTES (where_dead)
9907 = gen_rtx (EXPR_LIST, REG_DEAD,
9908 gen_rtx (REG, word_mode, i),
9909 REG_NOTES (where_dead));
9910 }
230d793d 9911
dbc131f3 9912 if (note != 0 && GET_MODE (XEXP (note, 0)) == GET_MODE (x))
230d793d
RS
9913 {
9914 XEXP (note, 1) = *pnotes;
9915 *pnotes = note;
9916 }
9917 else
9918 *pnotes = gen_rtx (EXPR_LIST, REG_DEAD, x, *pnotes);
1a26b032
RK
9919
9920 reg_n_deaths[regno]++;
230d793d
RS
9921 }
9922
9923 return;
9924 }
9925
9926 else if (GET_CODE (x) == SET)
9927 {
9928 rtx dest = SET_DEST (x);
9929
9930 move_deaths (SET_SRC (x), from_cuid, to_insn, pnotes);
9931
a7c99304
RK
9932 /* In the case of a ZERO_EXTRACT, a STRICT_LOW_PART, or a SUBREG
9933 that accesses one word of a multi-word item, some
9934 piece of everything register in the expression is used by
9935 this insn, so remove any old death. */
9936
9937 if (GET_CODE (dest) == ZERO_EXTRACT
9938 || GET_CODE (dest) == STRICT_LOW_PART
9939 || (GET_CODE (dest) == SUBREG
9940 && (((GET_MODE_SIZE (GET_MODE (dest))
9941 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
9942 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
9943 + UNITS_PER_WORD - 1) / UNITS_PER_WORD))))
230d793d 9944 {
a7c99304
RK
9945 move_deaths (dest, from_cuid, to_insn, pnotes);
9946 return;
230d793d
RS
9947 }
9948
a7c99304
RK
9949 /* If this is some other SUBREG, we know it replaces the entire
9950 value, so use that as the destination. */
9951 if (GET_CODE (dest) == SUBREG)
9952 dest = SUBREG_REG (dest);
9953
9954 /* If this is a MEM, adjust deaths of anything used in the address.
9955 For a REG (the only other possibility), the entire value is
9956 being replaced so the old value is not used in this insn. */
230d793d
RS
9957
9958 if (GET_CODE (dest) == MEM)
9959 move_deaths (XEXP (dest, 0), from_cuid, to_insn, pnotes);
9960 return;
9961 }
9962
9963 else if (GET_CODE (x) == CLOBBER)
9964 return;
9965
9966 len = GET_RTX_LENGTH (code);
9967 fmt = GET_RTX_FORMAT (code);
9968
9969 for (i = 0; i < len; i++)
9970 {
9971 if (fmt[i] == 'E')
9972 {
9973 register int j;
9974 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
9975 move_deaths (XVECEXP (x, i, j), from_cuid, to_insn, pnotes);
9976 }
9977 else if (fmt[i] == 'e')
9978 move_deaths (XEXP (x, i), from_cuid, to_insn, pnotes);
9979 }
9980}
9981\f
a7c99304
RK
9982/* Return 1 if X is the target of a bit-field assignment in BODY, the
9983 pattern of an insn. X must be a REG. */
230d793d
RS
9984
9985static int
a7c99304
RK
9986reg_bitfield_target_p (x, body)
9987 rtx x;
230d793d
RS
9988 rtx body;
9989{
9990 int i;
9991
9992 if (GET_CODE (body) == SET)
a7c99304
RK
9993 {
9994 rtx dest = SET_DEST (body);
9995 rtx target;
9996 int regno, tregno, endregno, endtregno;
9997
9998 if (GET_CODE (dest) == ZERO_EXTRACT)
9999 target = XEXP (dest, 0);
10000 else if (GET_CODE (dest) == STRICT_LOW_PART)
10001 target = SUBREG_REG (XEXP (dest, 0));
10002 else
10003 return 0;
10004
10005 if (GET_CODE (target) == SUBREG)
10006 target = SUBREG_REG (target);
10007
10008 if (GET_CODE (target) != REG)
10009 return 0;
10010
10011 tregno = REGNO (target), regno = REGNO (x);
10012 if (tregno >= FIRST_PSEUDO_REGISTER || regno >= FIRST_PSEUDO_REGISTER)
10013 return target == x;
10014
10015 endtregno = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (target));
10016 endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
10017
10018 return endregno > tregno && regno < endtregno;
10019 }
230d793d
RS
10020
10021 else if (GET_CODE (body) == PARALLEL)
10022 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
a7c99304 10023 if (reg_bitfield_target_p (x, XVECEXP (body, 0, i)))
230d793d
RS
10024 return 1;
10025
10026 return 0;
10027}
10028\f
10029/* Given a chain of REG_NOTES originally from FROM_INSN, try to place them
10030 as appropriate. I3 and I2 are the insns resulting from the combination
10031 insns including FROM (I2 may be zero).
10032
10033 ELIM_I2 and ELIM_I1 are either zero or registers that we know will
10034 not need REG_DEAD notes because they are being substituted for. This
10035 saves searching in the most common cases.
10036
10037 Each note in the list is either ignored or placed on some insns, depending
10038 on the type of note. */
10039
10040static void
10041distribute_notes (notes, from_insn, i3, i2, elim_i2, elim_i1)
10042 rtx notes;
10043 rtx from_insn;
10044 rtx i3, i2;
10045 rtx elim_i2, elim_i1;
10046{
10047 rtx note, next_note;
10048 rtx tem;
10049
10050 for (note = notes; note; note = next_note)
10051 {
10052 rtx place = 0, place2 = 0;
10053
10054 /* If this NOTE references a pseudo register, ensure it references
10055 the latest copy of that register. */
10056 if (XEXP (note, 0) && GET_CODE (XEXP (note, 0)) == REG
10057 && REGNO (XEXP (note, 0)) >= FIRST_PSEUDO_REGISTER)
10058 XEXP (note, 0) = regno_reg_rtx[REGNO (XEXP (note, 0))];
10059
10060 next_note = XEXP (note, 1);
10061 switch (REG_NOTE_KIND (note))
10062 {
10063 case REG_UNUSED:
176c9e6b
JW
10064 /* If this note is from any insn other than i3, then we have no
10065 use for it, and must ignore it.
10066
10067 Any clobbers for i3 may still exist, and so we must process
10068 REG_UNUSED notes from that insn.
10069
10070 Any clobbers from i2 or i1 can only exist if they were added by
10071 recog_for_combine. In that case, recog_for_combine created the
10072 necessary REG_UNUSED notes. Trying to keep any original
10073 REG_UNUSED notes from these insns can cause incorrect output
10074 if it is for the same register as the original i3 dest.
10075 In that case, we will notice that the register is set in i3,
10076 and then add a REG_UNUSED note for the destination of i3, which
10077 is wrong. */
10078 if (from_insn != i3)
10079 break;
10080
230d793d
RS
10081 /* If this register is set or clobbered in I3, put the note there
10082 unless there is one already. */
176c9e6b 10083 else if (reg_set_p (XEXP (note, 0), PATTERN (i3)))
230d793d
RS
10084 {
10085 if (! (GET_CODE (XEXP (note, 0)) == REG
10086 ? find_regno_note (i3, REG_UNUSED, REGNO (XEXP (note, 0)))
10087 : find_reg_note (i3, REG_UNUSED, XEXP (note, 0))))
10088 place = i3;
10089 }
10090 /* Otherwise, if this register is used by I3, then this register
10091 now dies here, so we must put a REG_DEAD note here unless there
10092 is one already. */
10093 else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3))
10094 && ! (GET_CODE (XEXP (note, 0)) == REG
10095 ? find_regno_note (i3, REG_DEAD, REGNO (XEXP (note, 0)))
10096 : find_reg_note (i3, REG_DEAD, XEXP (note, 0))))
10097 {
10098 PUT_REG_NOTE_KIND (note, REG_DEAD);
10099 place = i3;
10100 }
10101 break;
10102
10103 case REG_EQUAL:
10104 case REG_EQUIV:
10105 case REG_NONNEG:
10106 /* These notes say something about results of an insn. We can
10107 only support them if they used to be on I3 in which case they
a687e897
RK
10108 remain on I3. Otherwise they are ignored.
10109
10110 If the note refers to an expression that is not a constant, we
10111 must also ignore the note since we cannot tell whether the
10112 equivalence is still true. It might be possible to do
10113 slightly better than this (we only have a problem if I2DEST
10114 or I1DEST is present in the expression), but it doesn't
10115 seem worth the trouble. */
10116
10117 if (from_insn == i3
10118 && (XEXP (note, 0) == 0 || CONSTANT_P (XEXP (note, 0))))
230d793d
RS
10119 place = i3;
10120 break;
10121
10122 case REG_INC:
10123 case REG_NO_CONFLICT:
10124 case REG_LABEL:
10125 /* These notes say something about how a register is used. They must
10126 be present on any use of the register in I2 or I3. */
10127 if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3)))
10128 place = i3;
10129
10130 if (i2 && reg_mentioned_p (XEXP (note, 0), PATTERN (i2)))
10131 {
10132 if (place)
10133 place2 = i2;
10134 else
10135 place = i2;
10136 }
10137 break;
10138
10139 case REG_WAS_0:
10140 /* It is too much trouble to try to see if this note is still
10141 correct in all situations. It is better to simply delete it. */
10142 break;
10143
10144 case REG_RETVAL:
10145 /* If the insn previously containing this note still exists,
10146 put it back where it was. Otherwise move it to the previous
10147 insn. Adjust the corresponding REG_LIBCALL note. */
10148 if (GET_CODE (from_insn) != NOTE)
10149 place = from_insn;
10150 else
10151 {
5f4f0e22 10152 tem = find_reg_note (XEXP (note, 0), REG_LIBCALL, NULL_RTX);
230d793d
RS
10153 place = prev_real_insn (from_insn);
10154 if (tem && place)
10155 XEXP (tem, 0) = place;
10156 }
10157 break;
10158
10159 case REG_LIBCALL:
10160 /* This is handled similarly to REG_RETVAL. */
10161 if (GET_CODE (from_insn) != NOTE)
10162 place = from_insn;
10163 else
10164 {
5f4f0e22 10165 tem = find_reg_note (XEXP (note, 0), REG_RETVAL, NULL_RTX);
230d793d
RS
10166 place = next_real_insn (from_insn);
10167 if (tem && place)
10168 XEXP (tem, 0) = place;
10169 }
10170 break;
10171
10172 case REG_DEAD:
10173 /* If the register is used as an input in I3, it dies there.
10174 Similarly for I2, if it is non-zero and adjacent to I3.
10175
10176 If the register is not used as an input in either I3 or I2
10177 and it is not one of the registers we were supposed to eliminate,
10178 there are two possibilities. We might have a non-adjacent I2
10179 or we might have somehow eliminated an additional register
10180 from a computation. For example, we might have had A & B where
10181 we discover that B will always be zero. In this case we will
10182 eliminate the reference to A.
10183
10184 In both cases, we must search to see if we can find a previous
10185 use of A and put the death note there. */
10186
10187 if (reg_referenced_p (XEXP (note, 0), PATTERN (i3)))
10188 place = i3;
10189 else if (i2 != 0 && next_nonnote_insn (i2) == i3
10190 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
10191 place = i2;
10192
10193 if (XEXP (note, 0) == elim_i2 || XEXP (note, 0) == elim_i1)
10194 break;
10195
510dd77e
RK
10196 /* If the register is used in both I2 and I3 and it dies in I3,
10197 we might have added another reference to it. If reg_n_refs
10198 was 2, bump it to 3. This has to be correct since the
10199 register must have been set somewhere. The reason this is
10200 done is because local-alloc.c treats 2 references as a
10201 special case. */
10202
10203 if (place == i3 && i2 != 0 && GET_CODE (XEXP (note, 0)) == REG
10204 && reg_n_refs[REGNO (XEXP (note, 0))]== 2
10205 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
10206 reg_n_refs[REGNO (XEXP (note, 0))] = 3;
10207
230d793d
RS
10208 if (place == 0)
10209 for (tem = prev_nonnote_insn (i3);
10210 tem && (GET_CODE (tem) == INSN
10211 || GET_CODE (tem) == CALL_INSN);
10212 tem = prev_nonnote_insn (tem))
10213 {
10214 /* If the register is being set at TEM, see if that is all
10215 TEM is doing. If so, delete TEM. Otherwise, make this
10216 into a REG_UNUSED note instead. */
10217 if (reg_set_p (XEXP (note, 0), PATTERN (tem)))
10218 {
10219 rtx set = single_set (tem);
10220
5089e22e
RS
10221 /* Verify that it was the set, and not a clobber that
10222 modified the register. */
10223
10224 if (set != 0 && ! side_effects_p (SET_SRC (set))
10225 && rtx_equal_p (XEXP (note, 0), SET_DEST (set)))
230d793d
RS
10226 {
10227 /* Move the notes and links of TEM elsewhere.
10228 This might delete other dead insns recursively.
10229 First set the pattern to something that won't use
10230 any register. */
10231
10232 PATTERN (tem) = pc_rtx;
10233
5f4f0e22
CH
10234 distribute_notes (REG_NOTES (tem), tem, tem,
10235 NULL_RTX, NULL_RTX, NULL_RTX);
230d793d
RS
10236 distribute_links (LOG_LINKS (tem));
10237
10238 PUT_CODE (tem, NOTE);
10239 NOTE_LINE_NUMBER (tem) = NOTE_INSN_DELETED;
10240 NOTE_SOURCE_FILE (tem) = 0;
10241 }
10242 else
10243 {
10244 PUT_REG_NOTE_KIND (note, REG_UNUSED);
10245
10246 /* If there isn't already a REG_UNUSED note, put one
10247 here. */
10248 if (! find_regno_note (tem, REG_UNUSED,
10249 REGNO (XEXP (note, 0))))
10250 place = tem;
10251 break;
10252 }
10253 }
10254 else if (reg_referenced_p (XEXP (note, 0), PATTERN (tem)))
10255 {
10256 place = tem;
10257 break;
10258 }
10259 }
10260
10261 /* If the register is set or already dead at PLACE, we needn't do
10262 anything with this note if it is still a REG_DEAD note.
10263
10264 Note that we cannot use just `dead_or_set_p' here since we can
10265 convert an assignment to a register into a bit-field assignment.
10266 Therefore, we must also omit the note if the register is the
10267 target of a bitfield assignment. */
10268
10269 if (place && REG_NOTE_KIND (note) == REG_DEAD)
10270 {
10271 int regno = REGNO (XEXP (note, 0));
10272
10273 if (dead_or_set_p (place, XEXP (note, 0))
10274 || reg_bitfield_target_p (XEXP (note, 0), PATTERN (place)))
10275 {
10276 /* Unless the register previously died in PLACE, clear
10277 reg_last_death. [I no longer understand why this is
10278 being done.] */
10279 if (reg_last_death[regno] != place)
10280 reg_last_death[regno] = 0;
10281 place = 0;
10282 }
10283 else
10284 reg_last_death[regno] = place;
10285
10286 /* If this is a death note for a hard reg that is occupying
10287 multiple registers, ensure that we are still using all
10288 parts of the object. If we find a piece of the object
10289 that is unused, we must add a USE for that piece before
10290 PLACE and put the appropriate REG_DEAD note on it.
10291
10292 An alternative would be to put a REG_UNUSED for the pieces
10293 on the insn that set the register, but that can't be done if
10294 it is not in the same block. It is simpler, though less
10295 efficient, to add the USE insns. */
10296
10297 if (place && regno < FIRST_PSEUDO_REGISTER
10298 && HARD_REGNO_NREGS (regno, GET_MODE (XEXP (note, 0))) > 1)
10299 {
10300 int endregno
10301 = regno + HARD_REGNO_NREGS (regno,
10302 GET_MODE (XEXP (note, 0)));
10303 int all_used = 1;
10304 int i;
10305
10306 for (i = regno; i < endregno; i++)
10307 if (! refers_to_regno_p (i, i + 1, PATTERN (place), 0))
10308 {
10309 rtx piece = gen_rtx (REG, word_mode, i);
28f6d3af
RK
10310 rtx p;
10311
10312 /* See if we already placed a USE note for this
10313 register in front of PLACE. */
10314 for (p = place;
10315 GET_CODE (PREV_INSN (p)) == INSN
10316 && GET_CODE (PATTERN (PREV_INSN (p))) == USE;
10317 p = PREV_INSN (p))
10318 if (rtx_equal_p (piece,
10319 XEXP (PATTERN (PREV_INSN (p)), 0)))
10320 {
10321 p = 0;
10322 break;
10323 }
10324
10325 if (p)
10326 {
10327 rtx use_insn
10328 = emit_insn_before (gen_rtx (USE, VOIDmode,
10329 piece),
10330 p);
10331 REG_NOTES (use_insn)
10332 = gen_rtx (EXPR_LIST, REG_DEAD, piece,
10333 REG_NOTES (use_insn));
10334 }
230d793d 10335
5089e22e 10336 all_used = 0;
230d793d
RS
10337 }
10338
a394b17b
JW
10339 /* Check for the case where the register dying partially
10340 overlaps the register set by this insn. */
10341 if (all_used)
10342 for (i = regno; i < endregno; i++)
10343 if (dead_or_set_regno_p (place, i))
10344 {
10345 all_used = 0;
10346 break;
10347 }
10348
230d793d
RS
10349 if (! all_used)
10350 {
10351 /* Put only REG_DEAD notes for pieces that are
10352 still used and that are not already dead or set. */
10353
10354 for (i = regno; i < endregno; i++)
10355 {
10356 rtx piece = gen_rtx (REG, word_mode, i);
10357
10358 if (reg_referenced_p (piece, PATTERN (place))
10359 && ! dead_or_set_p (place, piece)
10360 && ! reg_bitfield_target_p (piece,
10361 PATTERN (place)))
10362 REG_NOTES (place) = gen_rtx (EXPR_LIST, REG_DEAD,
10363 piece,
10364 REG_NOTES (place));
10365 }
10366
10367 place = 0;
10368 }
10369 }
10370 }
10371 break;
10372
10373 default:
10374 /* Any other notes should not be present at this point in the
10375 compilation. */
10376 abort ();
10377 }
10378
10379 if (place)
10380 {
10381 XEXP (note, 1) = REG_NOTES (place);
10382 REG_NOTES (place) = note;
10383 }
1a26b032
RK
10384 else if ((REG_NOTE_KIND (note) == REG_DEAD
10385 || REG_NOTE_KIND (note) == REG_UNUSED)
10386 && GET_CODE (XEXP (note, 0)) == REG)
10387 reg_n_deaths[REGNO (XEXP (note, 0))]--;
230d793d
RS
10388
10389 if (place2)
1a26b032
RK
10390 {
10391 if ((REG_NOTE_KIND (note) == REG_DEAD
10392 || REG_NOTE_KIND (note) == REG_UNUSED)
10393 && GET_CODE (XEXP (note, 0)) == REG)
10394 reg_n_deaths[REGNO (XEXP (note, 0))]++;
10395
10396 REG_NOTES (place2) = gen_rtx (GET_CODE (note), REG_NOTE_KIND (note),
10397 XEXP (note, 0), REG_NOTES (place2));
10398 }
230d793d
RS
10399 }
10400}
10401\f
10402/* Similarly to above, distribute the LOG_LINKS that used to be present on
5089e22e
RS
10403 I3, I2, and I1 to new locations. This is also called in one case to
10404 add a link pointing at I3 when I3's destination is changed. */
230d793d
RS
10405
10406static void
10407distribute_links (links)
10408 rtx links;
10409{
10410 rtx link, next_link;
10411
10412 for (link = links; link; link = next_link)
10413 {
10414 rtx place = 0;
10415 rtx insn;
10416 rtx set, reg;
10417
10418 next_link = XEXP (link, 1);
10419
10420 /* If the insn that this link points to is a NOTE or isn't a single
10421 set, ignore it. In the latter case, it isn't clear what we
10422 can do other than ignore the link, since we can't tell which
10423 register it was for. Such links wouldn't be used by combine
10424 anyway.
10425
10426 It is not possible for the destination of the target of the link to
10427 have been changed by combine. The only potential of this is if we
10428 replace I3, I2, and I1 by I3 and I2. But in that case the
10429 destination of I2 also remains unchanged. */
10430
10431 if (GET_CODE (XEXP (link, 0)) == NOTE
10432 || (set = single_set (XEXP (link, 0))) == 0)
10433 continue;
10434
10435 reg = SET_DEST (set);
10436 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
10437 || GET_CODE (reg) == SIGN_EXTRACT
10438 || GET_CODE (reg) == STRICT_LOW_PART)
10439 reg = XEXP (reg, 0);
10440
10441 /* A LOG_LINK is defined as being placed on the first insn that uses
10442 a register and points to the insn that sets the register. Start
10443 searching at the next insn after the target of the link and stop
10444 when we reach a set of the register or the end of the basic block.
10445
10446 Note that this correctly handles the link that used to point from
5089e22e 10447 I3 to I2. Also note that not much searching is typically done here
230d793d
RS
10448 since most links don't point very far away. */
10449
10450 for (insn = NEXT_INSN (XEXP (link, 0));
0d4d42c3
RK
10451 (insn && (this_basic_block == n_basic_blocks - 1
10452 || basic_block_head[this_basic_block + 1] != insn));
230d793d
RS
10453 insn = NEXT_INSN (insn))
10454 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
10455 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
10456 {
10457 if (reg_referenced_p (reg, PATTERN (insn)))
10458 place = insn;
10459 break;
10460 }
10461
10462 /* If we found a place to put the link, place it there unless there
10463 is already a link to the same insn as LINK at that point. */
10464
10465 if (place)
10466 {
10467 rtx link2;
10468
10469 for (link2 = LOG_LINKS (place); link2; link2 = XEXP (link2, 1))
10470 if (XEXP (link2, 0) == XEXP (link, 0))
10471 break;
10472
10473 if (link2 == 0)
10474 {
10475 XEXP (link, 1) = LOG_LINKS (place);
10476 LOG_LINKS (place) = link;
abe6e52f
RK
10477
10478 /* Set added_links_insn to the earliest insn we added a
10479 link to. */
10480 if (added_links_insn == 0
10481 || INSN_CUID (added_links_insn) > INSN_CUID (place))
10482 added_links_insn = place;
230d793d
RS
10483 }
10484 }
10485 }
10486}
10487\f
10488void
10489dump_combine_stats (file)
10490 FILE *file;
10491{
10492 fprintf
10493 (file,
10494 ";; Combiner statistics: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n\n",
10495 combine_attempts, combine_merges, combine_extras, combine_successes);
10496}
10497
10498void
10499dump_combine_total_stats (file)
10500 FILE *file;
10501{
10502 fprintf
10503 (file,
10504 "\n;; Combiner totals: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n",
10505 total_attempts, total_merges, total_extras, total_successes);
10506}
This page took 1.444595 seconds and 5 git commands to generate.