]> gcc.gnu.org Git - gcc.git/blame - gcc/combine.c
* Makefile.in: Back out previous change.
[gcc.git] / gcc / combine.c
CommitLineData
230d793d 1/* Optimize by combining instructions for GNU compiler.
1bf27b5b 2 Copyright (C) 1987, 88, 92-98, 1999 Free Software Foundation, Inc.
230d793d
RS
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
940d9d63
RK
18the Free Software Foundation, 59 Temple Place - Suite 330,
19Boston, MA 02111-1307, USA. */
230d793d
RS
20
21
22/* This module is essentially the "combiner" phase of the U. of Arizona
23 Portable Optimizer, but redone to work on our list-structured
24 representation for RTL instead of their string representation.
25
26 The LOG_LINKS of each insn identify the most recent assignment
27 to each REG used in the insn. It is a list of previous insns,
28 each of which contains a SET for a REG that is used in this insn
29 and not used or set in between. LOG_LINKs never cross basic blocks.
30 They were set up by the preceding pass (lifetime analysis).
31
32 We try to combine each pair of insns joined by a logical link.
33 We also try to combine triples of insns A, B and C when
34 C has a link back to B and B has a link back to A.
35
36 LOG_LINKS does not have links for use of the CC0. They don't
37 need to, because the insn that sets the CC0 is always immediately
38 before the insn that tests it. So we always regard a branch
39 insn as having a logical link to the preceding insn. The same is true
40 for an insn explicitly using CC0.
41
42 We check (with use_crosses_set_p) to avoid combining in such a way
43 as to move a computation to a place where its value would be different.
44
45 Combination is done by mathematically substituting the previous
46 insn(s) values for the regs they set into the expressions in
47 the later insns that refer to these regs. If the result is a valid insn
48 for our target machine, according to the machine description,
49 we install it, delete the earlier insns, and update the data flow
50 information (LOG_LINKS and REG_NOTES) for what we did.
51
52 There are a few exceptions where the dataflow information created by
53 flow.c aren't completely updated:
54
55 - reg_live_length is not updated
56 - reg_n_refs is not adjusted in the rare case when a register is
57 no longer required in a computation
58 - there are extremely rare cases (see distribute_regnotes) when a
59 REG_DEAD note is lost
60 - a LOG_LINKS entry that refers to an insn with multiple SETs may be
61 removed because there is no way to know which register it was
62 linking
63
64 To simplify substitution, we combine only when the earlier insn(s)
65 consist of only a single assignment. To simplify updating afterward,
66 we never combine when a subroutine call appears in the middle.
67
68 Since we do not represent assignments to CC0 explicitly except when that
69 is all an insn does, there is no LOG_LINKS entry in an insn that uses
70 the condition code for the insn that set the condition code.
71 Fortunately, these two insns must be consecutive.
72 Therefore, every JUMP_INSN is taken to have an implicit logical link
73 to the preceding insn. This is not quite right, since non-jumps can
74 also use the condition code; but in practice such insns would not
75 combine anyway. */
76
230d793d 77#include "config.h"
670ee920 78#include "system.h"
c5c76735 79#include "rtl.h"
a091679a 80#include "tm_p.h"
230d793d
RS
81#include "flags.h"
82#include "regs.h"
55310dad 83#include "hard-reg-set.h"
230d793d
RS
84#include "basic-block.h"
85#include "insn-config.h"
49ad7cfa 86#include "function.h"
d6f4ec51
KG
87/* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
88#include "expr.h"
230d793d
RS
89#include "insn-flags.h"
90#include "insn-codes.h"
91#include "insn-attr.h"
92#include "recog.h"
93#include "real.h"
2e107e9e 94#include "toplev.h"
230d793d
RS
95
96/* It is not safe to use ordinary gen_lowpart in combine.
97 Use gen_lowpart_for_combine instead. See comments there. */
98#define gen_lowpart dont_use_gen_lowpart_you_dummy
99
100/* Number of attempts to combine instructions in this function. */
101
102static int combine_attempts;
103
104/* Number of attempts that got as far as substitution in this function. */
105
106static int combine_merges;
107
108/* Number of instructions combined with added SETs in this function. */
109
110static int combine_extras;
111
112/* Number of instructions combined in this function. */
113
114static int combine_successes;
115
116/* Totals over entire compilation. */
117
118static int total_attempts, total_merges, total_extras, total_successes;
9210df58 119
ddd5a7c1 120/* Define a default value for REVERSIBLE_CC_MODE.
9210df58
RK
121 We can never assume that a condition code mode is safe to reverse unless
122 the md tells us so. */
123#ifndef REVERSIBLE_CC_MODE
124#define REVERSIBLE_CC_MODE(MODE) 0
125#endif
230d793d
RS
126\f
127/* Vector mapping INSN_UIDs to cuids.
5089e22e 128 The cuids are like uids but increase monotonically always.
230d793d
RS
129 Combine always uses cuids so that it can compare them.
130 But actually renumbering the uids, which we used to do,
131 proves to be a bad idea because it makes it hard to compare
132 the dumps produced by earlier passes with those from later passes. */
133
134static int *uid_cuid;
4255220d 135static int max_uid_cuid;
230d793d
RS
136
137/* Get the cuid of an insn. */
138
1427d6d2
RK
139#define INSN_CUID(INSN) \
140(INSN_UID (INSN) > max_uid_cuid ? insn_cuid (INSN) : uid_cuid[INSN_UID (INSN)])
230d793d
RS
141
142/* Maximum register number, which is the size of the tables below. */
143
144static int combine_max_regno;
145
146/* Record last point of death of (hard or pseudo) register n. */
147
148static rtx *reg_last_death;
149
150/* Record last point of modification of (hard or pseudo) register n. */
151
152static rtx *reg_last_set;
153
154/* Record the cuid of the last insn that invalidated memory
155 (anything that writes memory, and subroutine calls, but not pushes). */
156
157static int mem_last_set;
158
159/* Record the cuid of the last CALL_INSN
160 so we can tell whether a potential combination crosses any calls. */
161
162static int last_call_cuid;
163
164/* When `subst' is called, this is the insn that is being modified
165 (by combining in a previous insn). The PATTERN of this insn
166 is still the old pattern partially modified and it should not be
167 looked at, but this may be used to examine the successors of the insn
168 to judge whether a simplification is valid. */
169
170static rtx subst_insn;
171
0d9641d1
JW
172/* This is an insn that belongs before subst_insn, but is not currently
173 on the insn chain. */
174
175static rtx subst_prev_insn;
176
230d793d
RS
177/* This is the lowest CUID that `subst' is currently dealing with.
178 get_last_value will not return a value if the register was set at or
179 after this CUID. If not for this mechanism, we could get confused if
180 I2 or I1 in try_combine were an insn that used the old value of a register
181 to obtain a new value. In that case, we might erroneously get the
182 new value of the register when we wanted the old one. */
183
184static int subst_low_cuid;
185
6e25d159
RK
186/* This contains any hard registers that are used in newpat; reg_dead_at_p
187 must consider all these registers to be always live. */
188
189static HARD_REG_SET newpat_used_regs;
190
abe6e52f
RK
191/* This is an insn to which a LOG_LINKS entry has been added. If this
192 insn is the earlier than I2 or I3, combine should rescan starting at
193 that location. */
194
195static rtx added_links_insn;
196
0d4d42c3
RK
197/* Basic block number of the block in which we are performing combines. */
198static int this_basic_block;
715e7fbc
RH
199
200/* A bitmap indicating which blocks had registers go dead at entry.
201 After combine, we'll need to re-do global life analysis with
202 those blocks as starting points. */
203static sbitmap refresh_blocks;
204static int need_refresh;
230d793d
RS
205\f
206/* The next group of arrays allows the recording of the last value assigned
207 to (hard or pseudo) register n. We use this information to see if a
5089e22e 208 operation being processed is redundant given a prior operation performed
230d793d
RS
209 on the register. For example, an `and' with a constant is redundant if
210 all the zero bits are already known to be turned off.
211
212 We use an approach similar to that used by cse, but change it in the
213 following ways:
214
215 (1) We do not want to reinitialize at each label.
216 (2) It is useful, but not critical, to know the actual value assigned
217 to a register. Often just its form is helpful.
218
219 Therefore, we maintain the following arrays:
220
221 reg_last_set_value the last value assigned
222 reg_last_set_label records the value of label_tick when the
223 register was assigned
224 reg_last_set_table_tick records the value of label_tick when a
225 value using the register is assigned
226 reg_last_set_invalid set to non-zero when it is not valid
227 to use the value of this register in some
228 register's value
229
230 To understand the usage of these tables, it is important to understand
231 the distinction between the value in reg_last_set_value being valid
232 and the register being validly contained in some other expression in the
233 table.
234
235 Entry I in reg_last_set_value is valid if it is non-zero, and either
236 reg_n_sets[i] is 1 or reg_last_set_label[i] == label_tick.
237
238 Register I may validly appear in any expression returned for the value
239 of another register if reg_n_sets[i] is 1. It may also appear in the
240 value for register J if reg_last_set_label[i] < reg_last_set_label[j] or
241 reg_last_set_invalid[j] is zero.
242
243 If an expression is found in the table containing a register which may
244 not validly appear in an expression, the register is replaced by
245 something that won't match, (clobber (const_int 0)).
246
247 reg_last_set_invalid[i] is set non-zero when register I is being assigned
248 to and reg_last_set_table_tick[i] == label_tick. */
249
0f41302f 250/* Record last value assigned to (hard or pseudo) register n. */
230d793d
RS
251
252static rtx *reg_last_set_value;
253
254/* Record the value of label_tick when the value for register n is placed in
255 reg_last_set_value[n]. */
256
568356af 257static int *reg_last_set_label;
230d793d
RS
258
259/* Record the value of label_tick when an expression involving register n
0f41302f 260 is placed in reg_last_set_value. */
230d793d 261
568356af 262static int *reg_last_set_table_tick;
230d793d
RS
263
264/* Set non-zero if references to register n in expressions should not be
265 used. */
266
267static char *reg_last_set_invalid;
268
0f41302f 269/* Incremented for each label. */
230d793d 270
568356af 271static int label_tick;
230d793d
RS
272
273/* Some registers that are set more than once and used in more than one
274 basic block are nevertheless always set in similar ways. For example,
275 a QImode register may be loaded from memory in two places on a machine
276 where byte loads zero extend.
277
951553af 278 We record in the following array what we know about the nonzero
230d793d
RS
279 bits of a register, specifically which bits are known to be zero.
280
281 If an entry is zero, it means that we don't know anything special. */
282
55310dad 283static unsigned HOST_WIDE_INT *reg_nonzero_bits;
230d793d 284
951553af 285/* Mode used to compute significance in reg_nonzero_bits. It is the largest
5f4f0e22 286 integer mode that can fit in HOST_BITS_PER_WIDE_INT. */
230d793d 287
951553af 288static enum machine_mode nonzero_bits_mode;
230d793d 289
d0ab8cd3
RK
290/* Nonzero if we know that a register has some leading bits that are always
291 equal to the sign bit. */
292
293static char *reg_sign_bit_copies;
294
951553af 295/* Nonzero when reg_nonzero_bits and reg_sign_bit_copies can be safely used.
1a26b032
RK
296 It is zero while computing them and after combine has completed. This
297 former test prevents propagating values based on previously set values,
298 which can be incorrect if a variable is modified in a loop. */
230d793d 299
951553af 300static int nonzero_sign_valid;
55310dad
RK
301
302/* These arrays are maintained in parallel with reg_last_set_value
303 and are used to store the mode in which the register was last set,
304 the bits that were known to be zero when it was last set, and the
305 number of sign bits copies it was known to have when it was last set. */
306
307static enum machine_mode *reg_last_set_mode;
308static unsigned HOST_WIDE_INT *reg_last_set_nonzero_bits;
309static char *reg_last_set_sign_bit_copies;
230d793d
RS
310\f
311/* Record one modification to rtl structure
312 to be undone by storing old_contents into *where.
313 is_int is 1 if the contents are an int. */
314
315struct undo
316{
241cea85 317 struct undo *next;
230d793d 318 int is_int;
f5393ab9
RS
319 union {rtx r; int i;} old_contents;
320 union {rtx *r; int *i;} where;
230d793d
RS
321};
322
323/* Record a bunch of changes to be undone, up to MAX_UNDO of them.
324 num_undo says how many are currently recorded.
325
326 storage is nonzero if we must undo the allocation of new storage.
327 The value of storage is what to pass to obfree.
328
329 other_insn is nonzero if we have modified some other insn in the process
241cea85 330 of working on subst_insn. It must be verified too.
230d793d 331
241cea85
RK
332 previous_undos is the value of undobuf.undos when we started processing
333 this substitution. This will prevent gen_rtx_combine from re-used a piece
334 from the previous expression. Doing so can produce circular rtl
335 structures. */
230d793d
RS
336
337struct undobuf
338{
230d793d 339 char *storage;
241cea85
RK
340 struct undo *undos;
341 struct undo *frees;
342 struct undo *previous_undos;
230d793d
RS
343 rtx other_insn;
344};
345
346static struct undobuf undobuf;
347
230d793d
RS
348/* Number of times the pseudo being substituted for
349 was found and replaced. */
350
351static int n_occurrences;
352
76095e2f
RH
353static void do_SUBST PROTO((rtx *, rtx));
354static void do_SUBST_INT PROTO((int *, int));
c5ad722c
RK
355static void init_reg_last_arrays PROTO((void));
356static void setup_incoming_promotions PROTO((void));
fe2db4fb
RK
357static void set_nonzero_bits_and_sign_copies PROTO((rtx, rtx));
358static int can_combine_p PROTO((rtx, rtx, rtx, rtx, rtx *, rtx *));
e009aaf3 359static int sets_function_arg_p PROTO((rtx));
fe2db4fb
RK
360static int combinable_i3pat PROTO((rtx, rtx *, rtx, rtx, int, rtx *));
361static rtx try_combine PROTO((rtx, rtx, rtx));
362static void undo_all PROTO((void));
363static rtx *find_split_point PROTO((rtx *, rtx));
364static rtx subst PROTO((rtx, rtx, rtx, int, int));
8079805d
RK
365static rtx simplify_rtx PROTO((rtx, enum machine_mode, int, int));
366static rtx simplify_if_then_else PROTO((rtx));
367static rtx simplify_set PROTO((rtx));
368static rtx simplify_logical PROTO((rtx, int));
fe2db4fb
RK
369static rtx expand_compound_operation PROTO((rtx));
370static rtx expand_field_assignment PROTO((rtx));
371static rtx make_extraction PROTO((enum machine_mode, rtx, int, rtx, int,
372 int, int, int));
71923da7 373static rtx extract_left_shift PROTO((rtx, int));
fe2db4fb
RK
374static rtx make_compound_operation PROTO((rtx, enum rtx_code));
375static int get_pos_from_mask PROTO((unsigned HOST_WIDE_INT, int *));
6139ff20 376static rtx force_to_mode PROTO((rtx, enum machine_mode,
e3d616e3 377 unsigned HOST_WIDE_INT, rtx, int));
abe6e52f 378static rtx if_then_else_cond PROTO((rtx, rtx *, rtx *));
fe2db4fb 379static rtx known_cond PROTO((rtx, enum rtx_code, rtx, rtx));
e11fa86f 380static int rtx_equal_for_field_assignment_p PROTO((rtx, rtx));
fe2db4fb
RK
381static rtx make_field_assignment PROTO((rtx));
382static rtx apply_distributive_law PROTO((rtx));
383static rtx simplify_and_const_int PROTO((rtx, enum machine_mode, rtx,
384 unsigned HOST_WIDE_INT));
385static unsigned HOST_WIDE_INT nonzero_bits PROTO((rtx, enum machine_mode));
386static int num_sign_bit_copies PROTO((rtx, enum machine_mode));
387static int merge_outer_ops PROTO((enum rtx_code *, HOST_WIDE_INT *,
388 enum rtx_code, HOST_WIDE_INT,
389 enum machine_mode, int *));
390static rtx simplify_shift_const PROTO((rtx, enum rtx_code, enum machine_mode,
391 rtx, int));
8e2f6e35 392static int recog_for_combine PROTO((rtx *, rtx, rtx *));
fe2db4fb 393static rtx gen_lowpart_for_combine PROTO((enum machine_mode, rtx));
d18225c4 394static rtx gen_rtx_combine PVPROTO((enum rtx_code code, enum machine_mode mode,
4f90e4a0 395 ...));
fe2db4fb
RK
396static rtx gen_binary PROTO((enum rtx_code, enum machine_mode,
397 rtx, rtx));
0c1c8ea6
RK
398static rtx gen_unary PROTO((enum rtx_code, enum machine_mode,
399 enum machine_mode, rtx));
fe2db4fb
RK
400static enum rtx_code simplify_comparison PROTO((enum rtx_code, rtx *, rtx *));
401static int reversible_comparison_p PROTO((rtx));
402static void update_table_tick PROTO((rtx));
403static void record_value_for_reg PROTO((rtx, rtx, rtx));
404static void record_dead_and_set_regs_1 PROTO((rtx, rtx));
405static void record_dead_and_set_regs PROTO((rtx));
9a893315 406static int get_last_value_validate PROTO((rtx *, rtx, int, int));
fe2db4fb
RK
407static rtx get_last_value PROTO((rtx));
408static int use_crosses_set_p PROTO((rtx, int));
409static void reg_dead_at_p_1 PROTO((rtx, rtx));
410static int reg_dead_at_p PROTO((rtx, rtx));
6eb12cef 411static void move_deaths PROTO((rtx, rtx, int, rtx, rtx *));
fe2db4fb
RK
412static int reg_bitfield_target_p PROTO((rtx, rtx));
413static void distribute_notes PROTO((rtx, rtx, rtx, rtx, rtx, rtx));
414static void distribute_links PROTO((rtx));
6e25d159 415static void mark_used_regs_combine PROTO((rtx));
1427d6d2 416static int insn_cuid PROTO((rtx));
230d793d 417\f
76095e2f
RH
418/* Substitute NEWVAL, an rtx expression, into INTO, a place in some
419 insn. The substitution can be undone by undo_all. If INTO is already
420 set to NEWVAL, do not record this change. Because computing NEWVAL might
421 also call SUBST, we have to compute it before we put anything into
422 the undo table. */
423
424static void
425do_SUBST(into, newval)
426 rtx *into, newval;
427{
428 struct undo *buf;
429 rtx oldval = *into;
430
431 if (oldval == newval)
432 return;
433
434 if (undobuf.frees)
435 buf = undobuf.frees, undobuf.frees = buf->next;
436 else
437 buf = (struct undo *) xmalloc (sizeof (struct undo));
438
439 buf->is_int = 0;
440 buf->where.r = into;
441 buf->old_contents.r = oldval;
442 *into = newval;
443
444 buf->next = undobuf.undos, undobuf.undos = buf;
445}
446
447#define SUBST(INTO, NEWVAL) do_SUBST(&(INTO), (NEWVAL))
448
449/* Similar to SUBST, but NEWVAL is an int expression. Note that substitution
450 for the value of a HOST_WIDE_INT value (including CONST_INT) is
451 not safe. */
452
453static void
454do_SUBST_INT(into, newval)
455 int *into, newval;
456{
457 struct undo *buf;
458 int oldval = *into;
459
460 if (oldval == newval)
461 return;
462
463 if (undobuf.frees)
464 buf = undobuf.frees, undobuf.frees = buf->next;
465 else
466 buf = (struct undo *) xmalloc (sizeof (struct undo));
467
468 buf->is_int = 1;
469 buf->where.i = into;
470 buf->old_contents.i = oldval;
471 *into = newval;
472
473 buf->next = undobuf.undos, undobuf.undos = buf;
474}
475
476#define SUBST_INT(INTO, NEWVAL) do_SUBST_INT(&(INTO), (NEWVAL))
477\f
230d793d
RS
478/* Main entry point for combiner. F is the first insn of the function.
479 NREGS is the first unused pseudo-reg number. */
480
481void
482combine_instructions (f, nregs)
483 rtx f;
484 int nregs;
485{
b729186a
JL
486 register rtx insn, next;
487#ifdef HAVE_cc0
488 register rtx prev;
489#endif
230d793d
RS
490 register int i;
491 register rtx links, nextlinks;
492
493 combine_attempts = 0;
494 combine_merges = 0;
495 combine_extras = 0;
496 combine_successes = 0;
241cea85 497 undobuf.undos = undobuf.previous_undos = 0;
230d793d
RS
498
499 combine_max_regno = nregs;
500
ef026f91
RS
501 reg_nonzero_bits
502 = (unsigned HOST_WIDE_INT *) alloca (nregs * sizeof (HOST_WIDE_INT));
503 reg_sign_bit_copies = (char *) alloca (nregs * sizeof (char));
504
4c9a05bc 505 bzero ((char *) reg_nonzero_bits, nregs * sizeof (HOST_WIDE_INT));
ef026f91
RS
506 bzero (reg_sign_bit_copies, nregs * sizeof (char));
507
230d793d
RS
508 reg_last_death = (rtx *) alloca (nregs * sizeof (rtx));
509 reg_last_set = (rtx *) alloca (nregs * sizeof (rtx));
510 reg_last_set_value = (rtx *) alloca (nregs * sizeof (rtx));
568356af
RK
511 reg_last_set_table_tick = (int *) alloca (nregs * sizeof (int));
512 reg_last_set_label = (int *) alloca (nregs * sizeof (int));
5f4f0e22 513 reg_last_set_invalid = (char *) alloca (nregs * sizeof (char));
55310dad
RK
514 reg_last_set_mode
515 = (enum machine_mode *) alloca (nregs * sizeof (enum machine_mode));
516 reg_last_set_nonzero_bits
517 = (unsigned HOST_WIDE_INT *) alloca (nregs * sizeof (HOST_WIDE_INT));
518 reg_last_set_sign_bit_copies
519 = (char *) alloca (nregs * sizeof (char));
520
ef026f91 521 init_reg_last_arrays ();
230d793d
RS
522
523 init_recog_no_volatile ();
524
525 /* Compute maximum uid value so uid_cuid can be allocated. */
526
527 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
528 if (INSN_UID (insn) > i)
529 i = INSN_UID (insn);
530
531 uid_cuid = (int *) alloca ((i + 1) * sizeof (int));
4255220d 532 max_uid_cuid = i;
230d793d 533
951553af 534 nonzero_bits_mode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
230d793d 535
951553af 536 /* Don't use reg_nonzero_bits when computing it. This can cause problems
230d793d
RS
537 when, for example, we have j <<= 1 in a loop. */
538
951553af 539 nonzero_sign_valid = 0;
230d793d
RS
540
541 /* Compute the mapping from uids to cuids.
542 Cuids are numbers assigned to insns, like uids,
543 except that cuids increase monotonically through the code.
544
545 Scan all SETs and see if we can deduce anything about what
951553af 546 bits are known to be zero for some registers and how many copies
d79f08e0
RK
547 of the sign bit are known to exist for those registers.
548
549 Also set any known values so that we can use it while searching
550 for what bits are known to be set. */
551
552 label_tick = 1;
230d793d 553
bcd49eb7
JW
554 /* We need to initialize it here, because record_dead_and_set_regs may call
555 get_last_value. */
556 subst_prev_insn = NULL_RTX;
557
7988fd36
RK
558 setup_incoming_promotions ();
559
715e7fbc
RH
560 refresh_blocks = sbitmap_alloc (n_basic_blocks);
561 sbitmap_zero (refresh_blocks);
562 need_refresh = 0;
563
230d793d
RS
564 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
565 {
4255220d 566 uid_cuid[INSN_UID (insn)] = ++i;
d79f08e0
RK
567 subst_low_cuid = i;
568 subst_insn = insn;
569
230d793d 570 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
d79f08e0
RK
571 {
572 note_stores (PATTERN (insn), set_nonzero_bits_and_sign_copies);
573 record_dead_and_set_regs (insn);
2dab894a
RK
574
575#ifdef AUTO_INC_DEC
576 for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
577 if (REG_NOTE_KIND (links) == REG_INC)
578 set_nonzero_bits_and_sign_copies (XEXP (links, 0), NULL_RTX);
579#endif
d79f08e0
RK
580 }
581
582 if (GET_CODE (insn) == CODE_LABEL)
583 label_tick++;
230d793d
RS
584 }
585
951553af 586 nonzero_sign_valid = 1;
230d793d
RS
587
588 /* Now scan all the insns in forward order. */
589
0d4d42c3 590 this_basic_block = -1;
230d793d
RS
591 label_tick = 1;
592 last_call_cuid = 0;
593 mem_last_set = 0;
ef026f91 594 init_reg_last_arrays ();
7988fd36
RK
595 setup_incoming_promotions ();
596
230d793d
RS
597 for (insn = f; insn; insn = next ? next : NEXT_INSN (insn))
598 {
599 next = 0;
600
0d4d42c3 601 /* If INSN starts a new basic block, update our basic block number. */
f085c9cd 602 if (this_basic_block + 1 < n_basic_blocks
3b413743 603 && BLOCK_HEAD (this_basic_block + 1) == insn)
0d4d42c3
RK
604 this_basic_block++;
605
230d793d
RS
606 if (GET_CODE (insn) == CODE_LABEL)
607 label_tick++;
608
0d4d42c3 609 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
230d793d
RS
610 {
611 /* Try this insn with each insn it links back to. */
612
613 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
5f4f0e22 614 if ((next = try_combine (insn, XEXP (links, 0), NULL_RTX)) != 0)
230d793d
RS
615 goto retry;
616
617 /* Try each sequence of three linked insns ending with this one. */
618
619 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
620 for (nextlinks = LOG_LINKS (XEXP (links, 0)); nextlinks;
621 nextlinks = XEXP (nextlinks, 1))
622 if ((next = try_combine (insn, XEXP (links, 0),
623 XEXP (nextlinks, 0))) != 0)
624 goto retry;
625
626#ifdef HAVE_cc0
627 /* Try to combine a jump insn that uses CC0
628 with a preceding insn that sets CC0, and maybe with its
629 logical predecessor as well.
630 This is how we make decrement-and-branch insns.
631 We need this special code because data flow connections
632 via CC0 do not get entered in LOG_LINKS. */
633
634 if (GET_CODE (insn) == JUMP_INSN
635 && (prev = prev_nonnote_insn (insn)) != 0
636 && GET_CODE (prev) == INSN
637 && sets_cc0_p (PATTERN (prev)))
638 {
5f4f0e22 639 if ((next = try_combine (insn, prev, NULL_RTX)) != 0)
230d793d
RS
640 goto retry;
641
642 for (nextlinks = LOG_LINKS (prev); nextlinks;
643 nextlinks = XEXP (nextlinks, 1))
644 if ((next = try_combine (insn, prev,
645 XEXP (nextlinks, 0))) != 0)
646 goto retry;
647 }
648
649 /* Do the same for an insn that explicitly references CC0. */
650 if (GET_CODE (insn) == INSN
651 && (prev = prev_nonnote_insn (insn)) != 0
652 && GET_CODE (prev) == INSN
653 && sets_cc0_p (PATTERN (prev))
654 && GET_CODE (PATTERN (insn)) == SET
655 && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (insn))))
656 {
5f4f0e22 657 if ((next = try_combine (insn, prev, NULL_RTX)) != 0)
230d793d
RS
658 goto retry;
659
660 for (nextlinks = LOG_LINKS (prev); nextlinks;
661 nextlinks = XEXP (nextlinks, 1))
662 if ((next = try_combine (insn, prev,
663 XEXP (nextlinks, 0))) != 0)
664 goto retry;
665 }
666
667 /* Finally, see if any of the insns that this insn links to
668 explicitly references CC0. If so, try this insn, that insn,
5089e22e 669 and its predecessor if it sets CC0. */
230d793d
RS
670 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
671 if (GET_CODE (XEXP (links, 0)) == INSN
672 && GET_CODE (PATTERN (XEXP (links, 0))) == SET
673 && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (XEXP (links, 0))))
674 && (prev = prev_nonnote_insn (XEXP (links, 0))) != 0
675 && GET_CODE (prev) == INSN
676 && sets_cc0_p (PATTERN (prev))
677 && (next = try_combine (insn, XEXP (links, 0), prev)) != 0)
678 goto retry;
679#endif
680
681 /* Try combining an insn with two different insns whose results it
682 uses. */
683 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
684 for (nextlinks = XEXP (links, 1); nextlinks;
685 nextlinks = XEXP (nextlinks, 1))
686 if ((next = try_combine (insn, XEXP (links, 0),
687 XEXP (nextlinks, 0))) != 0)
688 goto retry;
689
690 if (GET_CODE (insn) != NOTE)
691 record_dead_and_set_regs (insn);
692
693 retry:
694 ;
695 }
696 }
697
715e7fbc
RH
698 if (need_refresh)
699 update_life_info (refresh_blocks, UPDATE_LIFE_GLOBAL_RM_NOTES);
700 sbitmap_free (refresh_blocks);
701
230d793d
RS
702 total_attempts += combine_attempts;
703 total_merges += combine_merges;
704 total_extras += combine_extras;
705 total_successes += combine_successes;
1a26b032 706
951553af 707 nonzero_sign_valid = 0;
972b320c
R
708
709 /* Make recognizer allow volatile MEMs again. */
710 init_recog ();
230d793d 711}
ef026f91
RS
712
713/* Wipe the reg_last_xxx arrays in preparation for another pass. */
714
715static void
716init_reg_last_arrays ()
717{
718 int nregs = combine_max_regno;
719
4c9a05bc
RK
720 bzero ((char *) reg_last_death, nregs * sizeof (rtx));
721 bzero ((char *) reg_last_set, nregs * sizeof (rtx));
722 bzero ((char *) reg_last_set_value, nregs * sizeof (rtx));
723 bzero ((char *) reg_last_set_table_tick, nregs * sizeof (int));
724 bzero ((char *) reg_last_set_label, nregs * sizeof (int));
ef026f91 725 bzero (reg_last_set_invalid, nregs * sizeof (char));
4c9a05bc
RK
726 bzero ((char *) reg_last_set_mode, nregs * sizeof (enum machine_mode));
727 bzero ((char *) reg_last_set_nonzero_bits, nregs * sizeof (HOST_WIDE_INT));
ef026f91
RS
728 bzero (reg_last_set_sign_bit_copies, nregs * sizeof (char));
729}
230d793d 730\f
7988fd36
RK
731/* Set up any promoted values for incoming argument registers. */
732
ee791cc3 733static void
7988fd36
RK
734setup_incoming_promotions ()
735{
736#ifdef PROMOTE_FUNCTION_ARGS
737 int regno;
738 rtx reg;
739 enum machine_mode mode;
740 int unsignedp;
741 rtx first = get_insns ();
742
743 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
744 if (FUNCTION_ARG_REGNO_P (regno)
745 && (reg = promoted_input_arg (regno, &mode, &unsignedp)) != 0)
38a448ca
RH
746 {
747 record_value_for_reg
748 (reg, first, gen_rtx_fmt_e ((unsignedp ? ZERO_EXTEND
749 : SIGN_EXTEND),
750 GET_MODE (reg),
751 gen_rtx_CLOBBER (mode, const0_rtx)));
752 }
7988fd36
RK
753#endif
754}
755\f
91102d5a
RK
756/* Called via note_stores. If X is a pseudo that is narrower than
757 HOST_BITS_PER_WIDE_INT and is being set, record what bits are known zero.
230d793d
RS
758
759 If we are setting only a portion of X and we can't figure out what
760 portion, assume all bits will be used since we don't know what will
d0ab8cd3
RK
761 be happening.
762
763 Similarly, set how many bits of X are known to be copies of the sign bit
764 at all locations in the function. This is the smallest number implied
765 by any set of X. */
230d793d
RS
766
767static void
951553af 768set_nonzero_bits_and_sign_copies (x, set)
230d793d
RS
769 rtx x;
770 rtx set;
771{
d0ab8cd3
RK
772 int num;
773
230d793d
RS
774 if (GET_CODE (x) == REG
775 && REGNO (x) >= FIRST_PSEUDO_REGISTER
e8095e80
RK
776 /* If this register is undefined at the start of the file, we can't
777 say what its contents were. */
e881bb1b 778 && ! REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, REGNO (x))
5f4f0e22 779 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
230d793d 780 {
2dab894a 781 if (set == 0 || GET_CODE (set) == CLOBBER)
e8095e80
RK
782 {
783 reg_nonzero_bits[REGNO (x)] = GET_MODE_MASK (GET_MODE (x));
88306d12 784 reg_sign_bit_copies[REGNO (x)] = 1;
e8095e80
RK
785 return;
786 }
230d793d
RS
787
788 /* If this is a complex assignment, see if we can convert it into a
5089e22e 789 simple assignment. */
230d793d 790 set = expand_field_assignment (set);
d79f08e0
RK
791
792 /* If this is a simple assignment, or we have a paradoxical SUBREG,
793 set what we know about X. */
794
795 if (SET_DEST (set) == x
796 || (GET_CODE (SET_DEST (set)) == SUBREG
705c7b3b
JW
797 && (GET_MODE_SIZE (GET_MODE (SET_DEST (set)))
798 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (set)))))
d79f08e0 799 && SUBREG_REG (SET_DEST (set)) == x))
d0ab8cd3 800 {
9afa3d54
RK
801 rtx src = SET_SRC (set);
802
803#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
804 /* If X is narrower than a word and SRC is a non-negative
805 constant that would appear negative in the mode of X,
806 sign-extend it for use in reg_nonzero_bits because some
807 machines (maybe most) will actually do the sign-extension
808 and this is the conservative approach.
809
810 ??? For 2.5, try to tighten up the MD files in this regard
811 instead of this kludge. */
812
813 if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
814 && GET_CODE (src) == CONST_INT
815 && INTVAL (src) > 0
816 && 0 != (INTVAL (src)
817 & ((HOST_WIDE_INT) 1
9e69be8c 818 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
9afa3d54
RK
819 src = GEN_INT (INTVAL (src)
820 | ((HOST_WIDE_INT) (-1)
821 << GET_MODE_BITSIZE (GET_MODE (x))));
822#endif
823
951553af 824 reg_nonzero_bits[REGNO (x)]
9afa3d54 825 |= nonzero_bits (src, nonzero_bits_mode);
d0ab8cd3
RK
826 num = num_sign_bit_copies (SET_SRC (set), GET_MODE (x));
827 if (reg_sign_bit_copies[REGNO (x)] == 0
828 || reg_sign_bit_copies[REGNO (x)] > num)
829 reg_sign_bit_copies[REGNO (x)] = num;
830 }
230d793d 831 else
d0ab8cd3 832 {
951553af 833 reg_nonzero_bits[REGNO (x)] = GET_MODE_MASK (GET_MODE (x));
88306d12 834 reg_sign_bit_copies[REGNO (x)] = 1;
d0ab8cd3 835 }
230d793d
RS
836 }
837}
838\f
839/* See if INSN can be combined into I3. PRED and SUCC are optionally
840 insns that were previously combined into I3 or that will be combined
841 into the merger of INSN and I3.
842
843 Return 0 if the combination is not allowed for any reason.
844
845 If the combination is allowed, *PDEST will be set to the single
846 destination of INSN and *PSRC to the single source, and this function
847 will return 1. */
848
849static int
850can_combine_p (insn, i3, pred, succ, pdest, psrc)
851 rtx insn;
852 rtx i3;
e51712db
KG
853 rtx pred ATTRIBUTE_UNUSED;
854 rtx succ;
230d793d
RS
855 rtx *pdest, *psrc;
856{
857 int i;
858 rtx set = 0, src, dest;
b729186a
JL
859 rtx p;
860#ifdef AUTO_INC_DEC
76d31c63 861 rtx link;
b729186a 862#endif
230d793d
RS
863 int all_adjacent = (succ ? (next_active_insn (insn) == succ
864 && next_active_insn (succ) == i3)
865 : next_active_insn (insn) == i3);
866
867 /* Can combine only if previous insn is a SET of a REG, a SUBREG or CC0.
868 or a PARALLEL consisting of such a SET and CLOBBERs.
869
870 If INSN has CLOBBER parallel parts, ignore them for our processing.
871 By definition, these happen during the execution of the insn. When it
872 is merged with another insn, all bets are off. If they are, in fact,
873 needed and aren't also supplied in I3, they may be added by
874 recog_for_combine. Otherwise, it won't match.
875
876 We can also ignore a SET whose SET_DEST is mentioned in a REG_UNUSED
877 note.
878
879 Get the source and destination of INSN. If more than one, can't
880 combine. */
881
882 if (GET_CODE (PATTERN (insn)) == SET)
883 set = PATTERN (insn);
884 else if (GET_CODE (PATTERN (insn)) == PARALLEL
885 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
886 {
887 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
888 {
889 rtx elt = XVECEXP (PATTERN (insn), 0, i);
890
891 switch (GET_CODE (elt))
892 {
e3258cef
R
893 /* This is important to combine floating point insns
894 for the SH4 port. */
895 case USE:
896 /* Combining an isolated USE doesn't make sense.
897 We depend here on combinable_i3_pat to reject them. */
898 /* The code below this loop only verifies that the inputs of
899 the SET in INSN do not change. We call reg_set_between_p
900 to verify that the REG in the USE does not change betweeen
901 I3 and INSN.
902 If the USE in INSN was for a pseudo register, the matching
903 insn pattern will likely match any register; combining this
904 with any other USE would only be safe if we knew that the
905 used registers have identical values, or if there was
906 something to tell them apart, e.g. different modes. For
907 now, we forgo such compilcated tests and simply disallow
908 combining of USES of pseudo registers with any other USE. */
909 if (GET_CODE (XEXP (elt, 0)) == REG
910 && GET_CODE (PATTERN (i3)) == PARALLEL)
911 {
912 rtx i3pat = PATTERN (i3);
913 int i = XVECLEN (i3pat, 0) - 1;
914 int regno = REGNO (XEXP (elt, 0));
915 do
916 {
917 rtx i3elt = XVECEXP (i3pat, 0, i);
918 if (GET_CODE (i3elt) == USE
919 && GET_CODE (XEXP (i3elt, 0)) == REG
920 && (REGNO (XEXP (i3elt, 0)) == regno
921 ? reg_set_between_p (XEXP (elt, 0),
922 PREV_INSN (insn), i3)
923 : regno >= FIRST_PSEUDO_REGISTER))
924 return 0;
925 }
926 while (--i >= 0);
927 }
928 break;
929
230d793d
RS
930 /* We can ignore CLOBBERs. */
931 case CLOBBER:
932 break;
933
934 case SET:
935 /* Ignore SETs whose result isn't used but not those that
936 have side-effects. */
937 if (find_reg_note (insn, REG_UNUSED, SET_DEST (elt))
938 && ! side_effects_p (elt))
939 break;
940
941 /* If we have already found a SET, this is a second one and
942 so we cannot combine with this insn. */
943 if (set)
944 return 0;
945
946 set = elt;
947 break;
948
949 default:
950 /* Anything else means we can't combine. */
951 return 0;
952 }
953 }
954
955 if (set == 0
956 /* If SET_SRC is an ASM_OPERANDS we can't throw away these CLOBBERs,
957 so don't do anything with it. */
958 || GET_CODE (SET_SRC (set)) == ASM_OPERANDS)
959 return 0;
960 }
961 else
962 return 0;
963
964 if (set == 0)
965 return 0;
966
967 set = expand_field_assignment (set);
968 src = SET_SRC (set), dest = SET_DEST (set);
969
970 /* Don't eliminate a store in the stack pointer. */
971 if (dest == stack_pointer_rtx
230d793d
RS
972 /* If we couldn't eliminate a field assignment, we can't combine. */
973 || GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == STRICT_LOW_PART
974 /* Don't combine with an insn that sets a register to itself if it has
975 a REG_EQUAL note. This may be part of a REG_NO_CONFLICT sequence. */
5f4f0e22 976 || (rtx_equal_p (src, dest) && find_reg_note (insn, REG_EQUAL, NULL_RTX))
230d793d
RS
977 /* Can't merge a function call. */
978 || GET_CODE (src) == CALL
cd5e8f1f 979 /* Don't eliminate a function call argument. */
4dca5ec5
RK
980 || (GET_CODE (i3) == CALL_INSN
981 && (find_reg_fusage (i3, USE, dest)
982 || (GET_CODE (dest) == REG
983 && REGNO (dest) < FIRST_PSEUDO_REGISTER
984 && global_regs[REGNO (dest)])))
230d793d
RS
985 /* Don't substitute into an incremented register. */
986 || FIND_REG_INC_NOTE (i3, dest)
987 || (succ && FIND_REG_INC_NOTE (succ, dest))
ec35104c 988#if 0
230d793d 989 /* Don't combine the end of a libcall into anything. */
ec35104c
JL
990 /* ??? This gives worse code, and appears to be unnecessary, since no
991 pass after flow uses REG_LIBCALL/REG_RETVAL notes. Local-alloc does
992 use REG_RETVAL notes for noconflict blocks, but other code here
993 makes sure that those insns don't disappear. */
5f4f0e22 994 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
ec35104c 995#endif
230d793d
RS
996 /* Make sure that DEST is not used after SUCC but before I3. */
997 || (succ && ! all_adjacent
998 && reg_used_between_p (dest, succ, i3))
999 /* Make sure that the value that is to be substituted for the register
1000 does not use any registers whose values alter in between. However,
1001 If the insns are adjacent, a use can't cross a set even though we
1002 think it might (this can happen for a sequence of insns each setting
1003 the same destination; reg_last_set of that register might point to
d81481d3
RK
1004 a NOTE). If INSN has a REG_EQUIV note, the register is always
1005 equivalent to the memory so the substitution is valid even if there
1006 are intervening stores. Also, don't move a volatile asm or
1007 UNSPEC_VOLATILE across any other insns. */
230d793d 1008 || (! all_adjacent
d81481d3
RK
1009 && (((GET_CODE (src) != MEM
1010 || ! find_reg_note (insn, REG_EQUIV, src))
1011 && use_crosses_set_p (src, INSN_CUID (insn)))
a66a10c7
RS
1012 || (GET_CODE (src) == ASM_OPERANDS && MEM_VOLATILE_P (src))
1013 || GET_CODE (src) == UNSPEC_VOLATILE))
230d793d
RS
1014 /* If there is a REG_NO_CONFLICT note for DEST in I3 or SUCC, we get
1015 better register allocation by not doing the combine. */
1016 || find_reg_note (i3, REG_NO_CONFLICT, dest)
1017 || (succ && find_reg_note (succ, REG_NO_CONFLICT, dest))
1018 /* Don't combine across a CALL_INSN, because that would possibly
1019 change whether the life span of some REGs crosses calls or not,
1020 and it is a pain to update that information.
1021 Exception: if source is a constant, moving it later can't hurt.
1022 Accept that special case, because it helps -fforce-addr a lot. */
1023 || (INSN_CUID (insn) < last_call_cuid && ! CONSTANT_P (src)))
1024 return 0;
1025
1026 /* DEST must either be a REG or CC0. */
1027 if (GET_CODE (dest) == REG)
1028 {
1029 /* If register alignment is being enforced for multi-word items in all
1030 cases except for parameters, it is possible to have a register copy
1031 insn referencing a hard register that is not allowed to contain the
1032 mode being copied and which would not be valid as an operand of most
1033 insns. Eliminate this problem by not combining with such an insn.
1034
1035 Also, on some machines we don't want to extend the life of a hard
4d2c432d
RK
1036 register.
1037
1038 This is the same test done in can_combine except that we don't test
1039 if SRC is a CALL operation to permit a hard register with
1040 SMALL_REGISTER_CLASSES, and that we have to take all_adjacent
1041 into account. */
230d793d
RS
1042
1043 if (GET_CODE (src) == REG
1044 && ((REGNO (dest) < FIRST_PSEUDO_REGISTER
1045 && ! HARD_REGNO_MODE_OK (REGNO (dest), GET_MODE (dest)))
c448a43e
RK
1046 /* Don't extend the life of a hard register unless it is
1047 user variable (if we have few registers) or it can't
1048 fit into the desired register (meaning something special
ecd40809
RK
1049 is going on).
1050 Also avoid substituting a return register into I3, because
1051 reload can't handle a conflict with constraints of other
1052 inputs. */
230d793d 1053 || (REGNO (src) < FIRST_PSEUDO_REGISTER
c448a43e 1054 && (! HARD_REGNO_MODE_OK (REGNO (src), GET_MODE (src))
f95182a4
ILT
1055 || (SMALL_REGISTER_CLASSES
1056 && ((! all_adjacent && ! REG_USERVAR_P (src))
1057 || (FUNCTION_VALUE_REGNO_P (REGNO (src))
e9a25f70 1058 && ! REG_USERVAR_P (src))))))))
230d793d
RS
1059 return 0;
1060 }
1061 else if (GET_CODE (dest) != CC0)
1062 return 0;
1063
5f96750d
RS
1064 /* Don't substitute for a register intended as a clobberable operand.
1065 Similarly, don't substitute an expression containing a register that
1066 will be clobbered in I3. */
230d793d
RS
1067 if (GET_CODE (PATTERN (i3)) == PARALLEL)
1068 for (i = XVECLEN (PATTERN (i3), 0) - 1; i >= 0; i--)
1069 if (GET_CODE (XVECEXP (PATTERN (i3), 0, i)) == CLOBBER
5f96750d
RS
1070 && (reg_overlap_mentioned_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0),
1071 src)
1072 || rtx_equal_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0), dest)))
230d793d
RS
1073 return 0;
1074
1075 /* If INSN contains anything volatile, or is an `asm' (whether volatile
d276f2bb 1076 or not), reject, unless nothing volatile comes between it and I3 */
230d793d
RS
1077
1078 if (GET_CODE (src) == ASM_OPERANDS || volatile_refs_p (src))
d276f2bb
CM
1079 {
1080 /* Make sure succ doesn't contain a volatile reference. */
1081 if (succ != 0 && volatile_refs_p (PATTERN (succ)))
1082 return 0;
1083
1084 for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
1085 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
1086 && p != succ && volatile_refs_p (PATTERN (p)))
1087 return 0;
1088 }
230d793d 1089
b79ee7eb
RH
1090 /* If INSN is an asm, and DEST is a hard register, reject, since it has
1091 to be an explicit register variable, and was chosen for a reason. */
1092
1093 if (GET_CODE (src) == ASM_OPERANDS
1094 && GET_CODE (dest) == REG && REGNO (dest) < FIRST_PSEUDO_REGISTER)
1095 return 0;
1096
4b2cb4a2
RS
1097 /* If there are any volatile insns between INSN and I3, reject, because
1098 they might affect machine state. */
1099
1100 for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
1101 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
1102 && p != succ && volatile_insn_p (PATTERN (p)))
1103 return 0;
1104
230d793d
RS
1105 /* If INSN or I2 contains an autoincrement or autodecrement,
1106 make sure that register is not used between there and I3,
1107 and not already used in I3 either.
1108 Also insist that I3 not be a jump; if it were one
1109 and the incremented register were spilled, we would lose. */
1110
1111#ifdef AUTO_INC_DEC
1112 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1113 if (REG_NOTE_KIND (link) == REG_INC
1114 && (GET_CODE (i3) == JUMP_INSN
1115 || reg_used_between_p (XEXP (link, 0), insn, i3)
1116 || reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i3))))
1117 return 0;
1118#endif
1119
1120#ifdef HAVE_cc0
1121 /* Don't combine an insn that follows a CC0-setting insn.
1122 An insn that uses CC0 must not be separated from the one that sets it.
1123 We do, however, allow I2 to follow a CC0-setting insn if that insn
1124 is passed as I1; in that case it will be deleted also.
1125 We also allow combining in this case if all the insns are adjacent
1126 because that would leave the two CC0 insns adjacent as well.
1127 It would be more logical to test whether CC0 occurs inside I1 or I2,
1128 but that would be much slower, and this ought to be equivalent. */
1129
1130 p = prev_nonnote_insn (insn);
1131 if (p && p != pred && GET_CODE (p) == INSN && sets_cc0_p (PATTERN (p))
1132 && ! all_adjacent)
1133 return 0;
1134#endif
1135
1136 /* If we get here, we have passed all the tests and the combination is
1137 to be allowed. */
1138
1139 *pdest = dest;
1140 *psrc = src;
1141
1142 return 1;
1143}
1144\f
956d6950
JL
1145/* Check if PAT is an insn - or a part of it - used to set up an
1146 argument for a function in a hard register. */
1147
1148static int
1149sets_function_arg_p (pat)
1150 rtx pat;
1151{
1152 int i;
1153 rtx inner_dest;
1154
1155 switch (GET_CODE (pat))
1156 {
1157 case INSN:
1158 return sets_function_arg_p (PATTERN (pat));
1159
1160 case PARALLEL:
1161 for (i = XVECLEN (pat, 0); --i >= 0;)
1162 if (sets_function_arg_p (XVECEXP (pat, 0, i)))
1163 return 1;
1164
1165 break;
1166
1167 case SET:
1168 inner_dest = SET_DEST (pat);
1169 while (GET_CODE (inner_dest) == STRICT_LOW_PART
1170 || GET_CODE (inner_dest) == SUBREG
1171 || GET_CODE (inner_dest) == ZERO_EXTRACT)
1172 inner_dest = XEXP (inner_dest, 0);
1173
1174 return (GET_CODE (inner_dest) == REG
1175 && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER
1176 && FUNCTION_ARG_REGNO_P (REGNO (inner_dest)));
1d300e19
KG
1177
1178 default:
1179 break;
956d6950
JL
1180 }
1181
1182 return 0;
1183}
1184
230d793d
RS
1185/* LOC is the location within I3 that contains its pattern or the component
1186 of a PARALLEL of the pattern. We validate that it is valid for combining.
1187
1188 One problem is if I3 modifies its output, as opposed to replacing it
1189 entirely, we can't allow the output to contain I2DEST or I1DEST as doing
1190 so would produce an insn that is not equivalent to the original insns.
1191
1192 Consider:
1193
1194 (set (reg:DI 101) (reg:DI 100))
1195 (set (subreg:SI (reg:DI 101) 0) <foo>)
1196
1197 This is NOT equivalent to:
1198
1199 (parallel [(set (subreg:SI (reg:DI 100) 0) <foo>)
1200 (set (reg:DI 101) (reg:DI 100))])
1201
1202 Not only does this modify 100 (in which case it might still be valid
1203 if 100 were dead in I2), it sets 101 to the ORIGINAL value of 100.
1204
1205 We can also run into a problem if I2 sets a register that I1
1206 uses and I1 gets directly substituted into I3 (not via I2). In that
1207 case, we would be getting the wrong value of I2DEST into I3, so we
1208 must reject the combination. This case occurs when I2 and I1 both
1209 feed into I3, rather than when I1 feeds into I2, which feeds into I3.
1210 If I1_NOT_IN_SRC is non-zero, it means that finding I1 in the source
1211 of a SET must prevent combination from occurring.
1212
e9a25f70 1213 On machines where SMALL_REGISTER_CLASSES is non-zero, we don't combine
c448a43e
RK
1214 if the destination of a SET is a hard register that isn't a user
1215 variable.
230d793d
RS
1216
1217 Before doing the above check, we first try to expand a field assignment
1218 into a set of logical operations.
1219
1220 If PI3_DEST_KILLED is non-zero, it is a pointer to a location in which
1221 we place a register that is both set and used within I3. If more than one
1222 such register is detected, we fail.
1223
1224 Return 1 if the combination is valid, zero otherwise. */
1225
1226static int
1227combinable_i3pat (i3, loc, i2dest, i1dest, i1_not_in_src, pi3dest_killed)
1228 rtx i3;
1229 rtx *loc;
1230 rtx i2dest;
1231 rtx i1dest;
1232 int i1_not_in_src;
1233 rtx *pi3dest_killed;
1234{
1235 rtx x = *loc;
1236
1237 if (GET_CODE (x) == SET)
1238 {
1239 rtx set = expand_field_assignment (x);
1240 rtx dest = SET_DEST (set);
1241 rtx src = SET_SRC (set);
29a82058
JL
1242 rtx inner_dest = dest;
1243
1244#if 0
1245 rtx inner_src = src;
1246#endif
230d793d
RS
1247
1248 SUBST (*loc, set);
1249
1250 while (GET_CODE (inner_dest) == STRICT_LOW_PART
1251 || GET_CODE (inner_dest) == SUBREG
1252 || GET_CODE (inner_dest) == ZERO_EXTRACT)
1253 inner_dest = XEXP (inner_dest, 0);
1254
1255 /* We probably don't need this any more now that LIMIT_RELOAD_CLASS
1256 was added. */
1257#if 0
1258 while (GET_CODE (inner_src) == STRICT_LOW_PART
1259 || GET_CODE (inner_src) == SUBREG
1260 || GET_CODE (inner_src) == ZERO_EXTRACT)
1261 inner_src = XEXP (inner_src, 0);
1262
1263 /* If it is better that two different modes keep two different pseudos,
1264 avoid combining them. This avoids producing the following pattern
1265 on a 386:
1266 (set (subreg:SI (reg/v:QI 21) 0)
1267 (lshiftrt:SI (reg/v:SI 20)
1268 (const_int 24)))
1269 If that were made, reload could not handle the pair of
1270 reg 20/21, since it would try to get any GENERAL_REGS
1271 but some of them don't handle QImode. */
1272
1273 if (rtx_equal_p (inner_src, i2dest)
1274 && GET_CODE (inner_dest) == REG
1275 && ! MODES_TIEABLE_P (GET_MODE (i2dest), GET_MODE (inner_dest)))
1276 return 0;
1277#endif
1278
1279 /* Check for the case where I3 modifies its output, as
1280 discussed above. */
1281 if ((inner_dest != dest
1282 && (reg_overlap_mentioned_p (i2dest, inner_dest)
1283 || (i1dest && reg_overlap_mentioned_p (i1dest, inner_dest))))
956d6950 1284
3f508eca
RK
1285 /* This is the same test done in can_combine_p except that we
1286 allow a hard register with SMALL_REGISTER_CLASSES if SRC is a
956d6950
JL
1287 CALL operation. Moreover, we can't test all_adjacent; we don't
1288 have to, since this instruction will stay in place, thus we are
1289 not considering increasing the lifetime of INNER_DEST.
1290
1291 Also, if this insn sets a function argument, combining it with
1292 something that might need a spill could clobber a previous
1293 function argument; the all_adjacent test in can_combine_p also
1294 checks this; here, we do a more specific test for this case. */
1295
230d793d 1296 || (GET_CODE (inner_dest) == REG
dfbe1b2f 1297 && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER
c448a43e
RK
1298 && (! HARD_REGNO_MODE_OK (REGNO (inner_dest),
1299 GET_MODE (inner_dest))
e9a25f70
JL
1300 || (SMALL_REGISTER_CLASSES && GET_CODE (src) != CALL
1301 && ! REG_USERVAR_P (inner_dest)
956d6950
JL
1302 && (FUNCTION_VALUE_REGNO_P (REGNO (inner_dest))
1303 || (FUNCTION_ARG_REGNO_P (REGNO (inner_dest))
1304 && i3 != 0
1305 && sets_function_arg_p (prev_nonnote_insn (i3)))))))
230d793d
RS
1306 || (i1_not_in_src && reg_overlap_mentioned_p (i1dest, src)))
1307 return 0;
1308
1309 /* If DEST is used in I3, it is being killed in this insn,
36a9c2e9
JL
1310 so record that for later.
1311 Never add REG_DEAD notes for the FRAME_POINTER_REGNUM or the
1312 STACK_POINTER_REGNUM, since these are always considered to be
1313 live. Similarly for ARG_POINTER_REGNUM if it is fixed. */
230d793d 1314 if (pi3dest_killed && GET_CODE (dest) == REG
36a9c2e9
JL
1315 && reg_referenced_p (dest, PATTERN (i3))
1316 && REGNO (dest) != FRAME_POINTER_REGNUM
6d7096b0
DE
1317#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1318 && REGNO (dest) != HARD_FRAME_POINTER_REGNUM
1319#endif
36a9c2e9
JL
1320#if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
1321 && (REGNO (dest) != ARG_POINTER_REGNUM
1322 || ! fixed_regs [REGNO (dest)])
1323#endif
1324 && REGNO (dest) != STACK_POINTER_REGNUM)
230d793d
RS
1325 {
1326 if (*pi3dest_killed)
1327 return 0;
1328
1329 *pi3dest_killed = dest;
1330 }
1331 }
1332
1333 else if (GET_CODE (x) == PARALLEL)
1334 {
1335 int i;
1336
1337 for (i = 0; i < XVECLEN (x, 0); i++)
1338 if (! combinable_i3pat (i3, &XVECEXP (x, 0, i), i2dest, i1dest,
1339 i1_not_in_src, pi3dest_killed))
1340 return 0;
1341 }
1342
1343 return 1;
1344}
1345\f
1346/* Try to combine the insns I1 and I2 into I3.
1347 Here I1 and I2 appear earlier than I3.
1348 I1 can be zero; then we combine just I2 into I3.
1349
1350 It we are combining three insns and the resulting insn is not recognized,
1351 try splitting it into two insns. If that happens, I2 and I3 are retained
1352 and I1 is pseudo-deleted by turning it into a NOTE. Otherwise, I1 and I2
1353 are pseudo-deleted.
1354
abe6e52f
RK
1355 Return 0 if the combination does not work. Then nothing is changed.
1356 If we did the combination, return the insn at which combine should
1357 resume scanning. */
230d793d
RS
1358
1359static rtx
1360try_combine (i3, i2, i1)
1361 register rtx i3, i2, i1;
1362{
1363 /* New patterns for I3 and I3, respectively. */
1364 rtx newpat, newi2pat = 0;
1365 /* Indicates need to preserve SET in I1 or I2 in I3 if it is not dead. */
1366 int added_sets_1, added_sets_2;
1367 /* Total number of SETs to put into I3. */
1368 int total_sets;
1369 /* Nonzero is I2's body now appears in I3. */
1370 int i2_is_used;
1371 /* INSN_CODEs for new I3, new I2, and user of condition code. */
6a651371 1372 int insn_code_number, i2_code_number = 0, other_code_number = 0;
230d793d
RS
1373 /* Contains I3 if the destination of I3 is used in its source, which means
1374 that the old life of I3 is being killed. If that usage is placed into
1375 I2 and not in I3, a REG_DEAD note must be made. */
1376 rtx i3dest_killed = 0;
1377 /* SET_DEST and SET_SRC of I2 and I1. */
1378 rtx i2dest, i2src, i1dest = 0, i1src = 0;
1379 /* PATTERN (I2), or a copy of it in certain cases. */
1380 rtx i2pat;
1381 /* Indicates if I2DEST or I1DEST is in I2SRC or I1_SRC. */
c4e861e8 1382 int i2dest_in_i2src = 0, i1dest_in_i1src = 0, i2dest_in_i1src = 0;
230d793d
RS
1383 int i1_feeds_i3 = 0;
1384 /* Notes that must be added to REG_NOTES in I3 and I2. */
1385 rtx new_i3_notes, new_i2_notes;
176c9e6b
JW
1386 /* Notes that we substituted I3 into I2 instead of the normal case. */
1387 int i3_subst_into_i2 = 0;
df7d75de
RK
1388 /* Notes that I1, I2 or I3 is a MULT operation. */
1389 int have_mult = 0;
230d793d
RS
1390
1391 int maxreg;
1392 rtx temp;
1393 register rtx link;
1394 int i;
1395
1396 /* If any of I1, I2, and I3 isn't really an insn, we can't do anything.
1397 This can occur when flow deletes an insn that it has merged into an
1398 auto-increment address. We also can't do anything if I3 has a
1399 REG_LIBCALL note since we don't want to disrupt the contiguity of a
1400 libcall. */
1401
1402 if (GET_RTX_CLASS (GET_CODE (i3)) != 'i'
1403 || GET_RTX_CLASS (GET_CODE (i2)) != 'i'
1404 || (i1 && GET_RTX_CLASS (GET_CODE (i1)) != 'i')
ec35104c
JL
1405#if 0
1406 /* ??? This gives worse code, and appears to be unnecessary, since no
1407 pass after flow uses REG_LIBCALL/REG_RETVAL notes. */
1408 || find_reg_note (i3, REG_LIBCALL, NULL_RTX)
1409#endif
1410)
230d793d
RS
1411 return 0;
1412
1413 combine_attempts++;
1414
241cea85 1415 undobuf.undos = undobuf.previous_undos = 0;
230d793d
RS
1416 undobuf.other_insn = 0;
1417
1418 /* Save the current high-water-mark so we can free storage if we didn't
1419 accept this combination. */
1420 undobuf.storage = (char *) oballoc (0);
1421
6e25d159
RK
1422 /* Reset the hard register usage information. */
1423 CLEAR_HARD_REG_SET (newpat_used_regs);
1424
230d793d
RS
1425 /* If I1 and I2 both feed I3, they can be in any order. To simplify the
1426 code below, set I1 to be the earlier of the two insns. */
1427 if (i1 && INSN_CUID (i1) > INSN_CUID (i2))
1428 temp = i1, i1 = i2, i2 = temp;
1429
abe6e52f 1430 added_links_insn = 0;
137e889e 1431
230d793d
RS
1432 /* First check for one important special-case that the code below will
1433 not handle. Namely, the case where I1 is zero, I2 has multiple sets,
1434 and I3 is a SET whose SET_SRC is a SET_DEST in I2. In that case,
1435 we may be able to replace that destination with the destination of I3.
1436 This occurs in the common code where we compute both a quotient and
1437 remainder into a structure, in which case we want to do the computation
1438 directly into the structure to avoid register-register copies.
1439
1440 We make very conservative checks below and only try to handle the
1441 most common cases of this. For example, we only handle the case
1442 where I2 and I3 are adjacent to avoid making difficult register
1443 usage tests. */
1444
1445 if (i1 == 0 && GET_CODE (i3) == INSN && GET_CODE (PATTERN (i3)) == SET
1446 && GET_CODE (SET_SRC (PATTERN (i3))) == REG
1447 && REGNO (SET_SRC (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER
f95182a4 1448 && (! SMALL_REGISTER_CLASSES
e9a25f70
JL
1449 || (GET_CODE (SET_DEST (PATTERN (i3))) != REG
1450 || REGNO (SET_DEST (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER
1451 || REG_USERVAR_P (SET_DEST (PATTERN (i3)))))
230d793d
RS
1452 && find_reg_note (i3, REG_DEAD, SET_SRC (PATTERN (i3)))
1453 && GET_CODE (PATTERN (i2)) == PARALLEL
1454 && ! side_effects_p (SET_DEST (PATTERN (i3)))
5089e22e
RS
1455 /* If the dest of I3 is a ZERO_EXTRACT or STRICT_LOW_PART, the code
1456 below would need to check what is inside (and reg_overlap_mentioned_p
1457 doesn't support those codes anyway). Don't allow those destinations;
1458 the resulting insn isn't likely to be recognized anyway. */
1459 && GET_CODE (SET_DEST (PATTERN (i3))) != ZERO_EXTRACT
1460 && GET_CODE (SET_DEST (PATTERN (i3))) != STRICT_LOW_PART
230d793d
RS
1461 && ! reg_overlap_mentioned_p (SET_SRC (PATTERN (i3)),
1462 SET_DEST (PATTERN (i3)))
1463 && next_real_insn (i2) == i3)
5089e22e
RS
1464 {
1465 rtx p2 = PATTERN (i2);
1466
1467 /* Make sure that the destination of I3,
1468 which we are going to substitute into one output of I2,
1469 is not used within another output of I2. We must avoid making this:
1470 (parallel [(set (mem (reg 69)) ...)
1471 (set (reg 69) ...)])
1472 which is not well-defined as to order of actions.
1473 (Besides, reload can't handle output reloads for this.)
1474
1475 The problem can also happen if the dest of I3 is a memory ref,
1476 if another dest in I2 is an indirect memory ref. */
1477 for (i = 0; i < XVECLEN (p2, 0); i++)
7ca919b7
RK
1478 if ((GET_CODE (XVECEXP (p2, 0, i)) == SET
1479 || GET_CODE (XVECEXP (p2, 0, i)) == CLOBBER)
5089e22e
RS
1480 && reg_overlap_mentioned_p (SET_DEST (PATTERN (i3)),
1481 SET_DEST (XVECEXP (p2, 0, i))))
1482 break;
230d793d 1483
5089e22e
RS
1484 if (i == XVECLEN (p2, 0))
1485 for (i = 0; i < XVECLEN (p2, 0); i++)
1486 if (SET_DEST (XVECEXP (p2, 0, i)) == SET_SRC (PATTERN (i3)))
1487 {
1488 combine_merges++;
230d793d 1489
5089e22e
RS
1490 subst_insn = i3;
1491 subst_low_cuid = INSN_CUID (i2);
230d793d 1492
c4e861e8 1493 added_sets_2 = added_sets_1 = 0;
5089e22e 1494 i2dest = SET_SRC (PATTERN (i3));
230d793d 1495
5089e22e
RS
1496 /* Replace the dest in I2 with our dest and make the resulting
1497 insn the new pattern for I3. Then skip to where we
1498 validate the pattern. Everything was set up above. */
1499 SUBST (SET_DEST (XVECEXP (p2, 0, i)),
1500 SET_DEST (PATTERN (i3)));
1501
1502 newpat = p2;
176c9e6b 1503 i3_subst_into_i2 = 1;
5089e22e
RS
1504 goto validate_replacement;
1505 }
1506 }
230d793d
RS
1507
1508#ifndef HAVE_cc0
1509 /* If we have no I1 and I2 looks like:
1510 (parallel [(set (reg:CC X) (compare:CC OP (const_int 0)))
1511 (set Y OP)])
1512 make up a dummy I1 that is
1513 (set Y OP)
1514 and change I2 to be
1515 (set (reg:CC X) (compare:CC Y (const_int 0)))
1516
1517 (We can ignore any trailing CLOBBERs.)
1518
1519 This undoes a previous combination and allows us to match a branch-and-
1520 decrement insn. */
1521
1522 if (i1 == 0 && GET_CODE (PATTERN (i2)) == PARALLEL
1523 && XVECLEN (PATTERN (i2), 0) >= 2
1524 && GET_CODE (XVECEXP (PATTERN (i2), 0, 0)) == SET
1525 && (GET_MODE_CLASS (GET_MODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 0))))
1526 == MODE_CC)
1527 && GET_CODE (SET_SRC (XVECEXP (PATTERN (i2), 0, 0))) == COMPARE
1528 && XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 1) == const0_rtx
1529 && GET_CODE (XVECEXP (PATTERN (i2), 0, 1)) == SET
1530 && GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 1))) == REG
1531 && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 0),
1532 SET_SRC (XVECEXP (PATTERN (i2), 0, 1))))
1533 {
1534 for (i = XVECLEN (PATTERN (i2), 0) - 1; i >= 2; i--)
1535 if (GET_CODE (XVECEXP (PATTERN (i2), 0, i)) != CLOBBER)
1536 break;
1537
1538 if (i == 1)
1539 {
1540 /* We make I1 with the same INSN_UID as I2. This gives it
1541 the same INSN_CUID for value tracking. Our fake I1 will
1542 never appear in the insn stream so giving it the same INSN_UID
1543 as I2 will not cause a problem. */
1544
0d9641d1 1545 subst_prev_insn = i1
38a448ca
RH
1546 = gen_rtx_INSN (VOIDmode, INSN_UID (i2), NULL_RTX, i2,
1547 XVECEXP (PATTERN (i2), 0, 1), -1, NULL_RTX,
1548 NULL_RTX);
230d793d
RS
1549
1550 SUBST (PATTERN (i2), XVECEXP (PATTERN (i2), 0, 0));
1551 SUBST (XEXP (SET_SRC (PATTERN (i2)), 0),
1552 SET_DEST (PATTERN (i1)));
1553 }
1554 }
1555#endif
1556
1557 /* Verify that I2 and I1 are valid for combining. */
5f4f0e22
CH
1558 if (! can_combine_p (i2, i3, i1, NULL_RTX, &i2dest, &i2src)
1559 || (i1 && ! can_combine_p (i1, i3, NULL_RTX, i2, &i1dest, &i1src)))
230d793d
RS
1560 {
1561 undo_all ();
1562 return 0;
1563 }
1564
1565 /* Record whether I2DEST is used in I2SRC and similarly for the other
1566 cases. Knowing this will help in register status updating below. */
1567 i2dest_in_i2src = reg_overlap_mentioned_p (i2dest, i2src);
1568 i1dest_in_i1src = i1 && reg_overlap_mentioned_p (i1dest, i1src);
1569 i2dest_in_i1src = i1 && reg_overlap_mentioned_p (i2dest, i1src);
1570
916f14f1 1571 /* See if I1 directly feeds into I3. It does if I1DEST is not used
230d793d
RS
1572 in I2SRC. */
1573 i1_feeds_i3 = i1 && ! reg_overlap_mentioned_p (i1dest, i2src);
1574
1575 /* Ensure that I3's pattern can be the destination of combines. */
1576 if (! combinable_i3pat (i3, &PATTERN (i3), i2dest, i1dest,
1577 i1 && i2dest_in_i1src && i1_feeds_i3,
1578 &i3dest_killed))
1579 {
1580 undo_all ();
1581 return 0;
1582 }
1583
df7d75de
RK
1584 /* See if any of the insns is a MULT operation. Unless one is, we will
1585 reject a combination that is, since it must be slower. Be conservative
1586 here. */
1587 if (GET_CODE (i2src) == MULT
1588 || (i1 != 0 && GET_CODE (i1src) == MULT)
1589 || (GET_CODE (PATTERN (i3)) == SET
1590 && GET_CODE (SET_SRC (PATTERN (i3))) == MULT))
1591 have_mult = 1;
1592
230d793d
RS
1593 /* If I3 has an inc, then give up if I1 or I2 uses the reg that is inc'd.
1594 We used to do this EXCEPT in one case: I3 has a post-inc in an
1595 output operand. However, that exception can give rise to insns like
1596 mov r3,(r3)+
1597 which is a famous insn on the PDP-11 where the value of r3 used as the
5089e22e 1598 source was model-dependent. Avoid this sort of thing. */
230d793d
RS
1599
1600#if 0
1601 if (!(GET_CODE (PATTERN (i3)) == SET
1602 && GET_CODE (SET_SRC (PATTERN (i3))) == REG
1603 && GET_CODE (SET_DEST (PATTERN (i3))) == MEM
1604 && (GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_INC
1605 || GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_DEC)))
1606 /* It's not the exception. */
1607#endif
1608#ifdef AUTO_INC_DEC
1609 for (link = REG_NOTES (i3); link; link = XEXP (link, 1))
1610 if (REG_NOTE_KIND (link) == REG_INC
1611 && (reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i2))
1612 || (i1 != 0
1613 && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i1)))))
1614 {
1615 undo_all ();
1616 return 0;
1617 }
1618#endif
1619
1620 /* See if the SETs in I1 or I2 need to be kept around in the merged
1621 instruction: whenever the value set there is still needed past I3.
1622 For the SETs in I2, this is easy: we see if I2DEST dies or is set in I3.
1623
1624 For the SET in I1, we have two cases: If I1 and I2 independently
1625 feed into I3, the set in I1 needs to be kept around if I1DEST dies
1626 or is set in I3. Otherwise (if I1 feeds I2 which feeds I3), the set
1627 in I1 needs to be kept around unless I1DEST dies or is set in either
1628 I2 or I3. We can distinguish these cases by seeing if I2SRC mentions
1629 I1DEST. If so, we know I1 feeds into I2. */
1630
1631 added_sets_2 = ! dead_or_set_p (i3, i2dest);
1632
1633 added_sets_1
1634 = i1 && ! (i1_feeds_i3 ? dead_or_set_p (i3, i1dest)
1635 : (dead_or_set_p (i3, i1dest) || dead_or_set_p (i2, i1dest)));
1636
1637 /* If the set in I2 needs to be kept around, we must make a copy of
1638 PATTERN (I2), so that when we substitute I1SRC for I1DEST in
5089e22e 1639 PATTERN (I2), we are only substituting for the original I1DEST, not into
230d793d
RS
1640 an already-substituted copy. This also prevents making self-referential
1641 rtx. If I2 is a PARALLEL, we just need the piece that assigns I2SRC to
1642 I2DEST. */
1643
1644 i2pat = (GET_CODE (PATTERN (i2)) == PARALLEL
38a448ca 1645 ? gen_rtx_SET (VOIDmode, i2dest, i2src)
230d793d
RS
1646 : PATTERN (i2));
1647
1648 if (added_sets_2)
1649 i2pat = copy_rtx (i2pat);
1650
1651 combine_merges++;
1652
1653 /* Substitute in the latest insn for the regs set by the earlier ones. */
1654
1655 maxreg = max_reg_num ();
1656
1657 subst_insn = i3;
230d793d
RS
1658
1659 /* It is possible that the source of I2 or I1 may be performing an
1660 unneeded operation, such as a ZERO_EXTEND of something that is known
1661 to have the high part zero. Handle that case by letting subst look at
1662 the innermost one of them.
1663
1664 Another way to do this would be to have a function that tries to
1665 simplify a single insn instead of merging two or more insns. We don't
1666 do this because of the potential of infinite loops and because
1667 of the potential extra memory required. However, doing it the way
1668 we are is a bit of a kludge and doesn't catch all cases.
1669
1670 But only do this if -fexpensive-optimizations since it slows things down
1671 and doesn't usually win. */
1672
1673 if (flag_expensive_optimizations)
1674 {
1675 /* Pass pc_rtx so no substitutions are done, just simplifications.
1676 The cases that we are interested in here do not involve the few
1677 cases were is_replaced is checked. */
1678 if (i1)
d0ab8cd3
RK
1679 {
1680 subst_low_cuid = INSN_CUID (i1);
1681 i1src = subst (i1src, pc_rtx, pc_rtx, 0, 0);
1682 }
230d793d 1683 else
d0ab8cd3
RK
1684 {
1685 subst_low_cuid = INSN_CUID (i2);
1686 i2src = subst (i2src, pc_rtx, pc_rtx, 0, 0);
1687 }
230d793d 1688
241cea85 1689 undobuf.previous_undos = undobuf.undos;
230d793d
RS
1690 }
1691
1692#ifndef HAVE_cc0
1693 /* Many machines that don't use CC0 have insns that can both perform an
1694 arithmetic operation and set the condition code. These operations will
1695 be represented as a PARALLEL with the first element of the vector
1696 being a COMPARE of an arithmetic operation with the constant zero.
1697 The second element of the vector will set some pseudo to the result
1698 of the same arithmetic operation. If we simplify the COMPARE, we won't
1699 match such a pattern and so will generate an extra insn. Here we test
1700 for this case, where both the comparison and the operation result are
1701 needed, and make the PARALLEL by just replacing I2DEST in I3SRC with
1702 I2SRC. Later we will make the PARALLEL that contains I2. */
1703
1704 if (i1 == 0 && added_sets_2 && GET_CODE (PATTERN (i3)) == SET
1705 && GET_CODE (SET_SRC (PATTERN (i3))) == COMPARE
1706 && XEXP (SET_SRC (PATTERN (i3)), 1) == const0_rtx
1707 && rtx_equal_p (XEXP (SET_SRC (PATTERN (i3)), 0), i2dest))
1708 {
081f5e7e 1709#ifdef EXTRA_CC_MODES
230d793d
RS
1710 rtx *cc_use;
1711 enum machine_mode compare_mode;
081f5e7e 1712#endif
230d793d
RS
1713
1714 newpat = PATTERN (i3);
1715 SUBST (XEXP (SET_SRC (newpat), 0), i2src);
1716
1717 i2_is_used = 1;
1718
1719#ifdef EXTRA_CC_MODES
1720 /* See if a COMPARE with the operand we substituted in should be done
1721 with the mode that is currently being used. If not, do the same
1722 processing we do in `subst' for a SET; namely, if the destination
1723 is used only once, try to replace it with a register of the proper
1724 mode and also replace the COMPARE. */
1725 if (undobuf.other_insn == 0
1726 && (cc_use = find_single_use (SET_DEST (newpat), i3,
1727 &undobuf.other_insn))
77fa0940
RK
1728 && ((compare_mode = SELECT_CC_MODE (GET_CODE (*cc_use),
1729 i2src, const0_rtx))
230d793d
RS
1730 != GET_MODE (SET_DEST (newpat))))
1731 {
1732 int regno = REGNO (SET_DEST (newpat));
38a448ca 1733 rtx new_dest = gen_rtx_REG (compare_mode, regno);
230d793d
RS
1734
1735 if (regno < FIRST_PSEUDO_REGISTER
b1f21e0a 1736 || (REG_N_SETS (regno) == 1 && ! added_sets_2
230d793d
RS
1737 && ! REG_USERVAR_P (SET_DEST (newpat))))
1738 {
1739 if (regno >= FIRST_PSEUDO_REGISTER)
1740 SUBST (regno_reg_rtx[regno], new_dest);
1741
1742 SUBST (SET_DEST (newpat), new_dest);
1743 SUBST (XEXP (*cc_use, 0), new_dest);
1744 SUBST (SET_SRC (newpat),
1745 gen_rtx_combine (COMPARE, compare_mode,
1746 i2src, const0_rtx));
1747 }
1748 else
1749 undobuf.other_insn = 0;
1750 }
1751#endif
1752 }
1753 else
1754#endif
1755 {
1756 n_occurrences = 0; /* `subst' counts here */
1757
1758 /* If I1 feeds into I2 (not into I3) and I1DEST is in I1SRC, we
1759 need to make a unique copy of I2SRC each time we substitute it
1760 to avoid self-referential rtl. */
1761
d0ab8cd3 1762 subst_low_cuid = INSN_CUID (i2);
230d793d
RS
1763 newpat = subst (PATTERN (i3), i2dest, i2src, 0,
1764 ! i1_feeds_i3 && i1dest_in_i1src);
241cea85 1765 undobuf.previous_undos = undobuf.undos;
230d793d
RS
1766
1767 /* Record whether i2's body now appears within i3's body. */
1768 i2_is_used = n_occurrences;
1769 }
1770
1771 /* If we already got a failure, don't try to do more. Otherwise,
1772 try to substitute in I1 if we have it. */
1773
1774 if (i1 && GET_CODE (newpat) != CLOBBER)
1775 {
1776 /* Before we can do this substitution, we must redo the test done
1777 above (see detailed comments there) that ensures that I1DEST
0f41302f 1778 isn't mentioned in any SETs in NEWPAT that are field assignments. */
230d793d 1779
5f4f0e22
CH
1780 if (! combinable_i3pat (NULL_RTX, &newpat, i1dest, NULL_RTX,
1781 0, NULL_PTR))
230d793d
RS
1782 {
1783 undo_all ();
1784 return 0;
1785 }
1786
1787 n_occurrences = 0;
d0ab8cd3 1788 subst_low_cuid = INSN_CUID (i1);
230d793d 1789 newpat = subst (newpat, i1dest, i1src, 0, 0);
241cea85 1790 undobuf.previous_undos = undobuf.undos;
230d793d
RS
1791 }
1792
916f14f1
RK
1793 /* Fail if an autoincrement side-effect has been duplicated. Be careful
1794 to count all the ways that I2SRC and I1SRC can be used. */
5f4f0e22 1795 if ((FIND_REG_INC_NOTE (i2, NULL_RTX) != 0
916f14f1 1796 && i2_is_used + added_sets_2 > 1)
5f4f0e22 1797 || (i1 != 0 && FIND_REG_INC_NOTE (i1, NULL_RTX) != 0
916f14f1
RK
1798 && (n_occurrences + added_sets_1 + (added_sets_2 && ! i1_feeds_i3)
1799 > 1))
230d793d
RS
1800 /* Fail if we tried to make a new register (we used to abort, but there's
1801 really no reason to). */
1802 || max_reg_num () != maxreg
1803 /* Fail if we couldn't do something and have a CLOBBER. */
df7d75de
RK
1804 || GET_CODE (newpat) == CLOBBER
1805 /* Fail if this new pattern is a MULT and we didn't have one before
1806 at the outer level. */
1807 || (GET_CODE (newpat) == SET && GET_CODE (SET_SRC (newpat)) == MULT
1808 && ! have_mult))
230d793d
RS
1809 {
1810 undo_all ();
1811 return 0;
1812 }
1813
1814 /* If the actions of the earlier insns must be kept
1815 in addition to substituting them into the latest one,
1816 we must make a new PARALLEL for the latest insn
1817 to hold additional the SETs. */
1818
1819 if (added_sets_1 || added_sets_2)
1820 {
1821 combine_extras++;
1822
1823 if (GET_CODE (newpat) == PARALLEL)
1824 {
1825 rtvec old = XVEC (newpat, 0);
1826 total_sets = XVECLEN (newpat, 0) + added_sets_1 + added_sets_2;
38a448ca 1827 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_sets));
59888de2 1828 bcopy ((char *) &old->elem[0], (char *) XVEC (newpat, 0)->elem,
230d793d
RS
1829 sizeof (old->elem[0]) * old->num_elem);
1830 }
1831 else
1832 {
1833 rtx old = newpat;
1834 total_sets = 1 + added_sets_1 + added_sets_2;
38a448ca 1835 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_sets));
230d793d
RS
1836 XVECEXP (newpat, 0, 0) = old;
1837 }
1838
1839 if (added_sets_1)
1840 XVECEXP (newpat, 0, --total_sets)
1841 = (GET_CODE (PATTERN (i1)) == PARALLEL
38a448ca 1842 ? gen_rtx_SET (VOIDmode, i1dest, i1src) : PATTERN (i1));
230d793d
RS
1843
1844 if (added_sets_2)
c5c76735
JL
1845 {
1846 /* If there is no I1, use I2's body as is. We used to also not do
1847 the subst call below if I2 was substituted into I3,
1848 but that could lose a simplification. */
1849 if (i1 == 0)
1850 XVECEXP (newpat, 0, --total_sets) = i2pat;
1851 else
1852 /* See comment where i2pat is assigned. */
1853 XVECEXP (newpat, 0, --total_sets)
1854 = subst (i2pat, i1dest, i1src, 0, 0);
1855 }
230d793d
RS
1856 }
1857
1858 /* We come here when we are replacing a destination in I2 with the
1859 destination of I3. */
1860 validate_replacement:
1861
6e25d159
RK
1862 /* Note which hard regs this insn has as inputs. */
1863 mark_used_regs_combine (newpat);
1864
230d793d 1865 /* Is the result of combination a valid instruction? */
8e2f6e35 1866 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
230d793d
RS
1867
1868 /* If the result isn't valid, see if it is a PARALLEL of two SETs where
1869 the second SET's destination is a register that is unused. In that case,
1870 we just need the first SET. This can occur when simplifying a divmod
1871 insn. We *must* test for this case here because the code below that
1872 splits two independent SETs doesn't handle this case correctly when it
1873 updates the register status. Also check the case where the first
1874 SET's destination is unused. That would not cause incorrect code, but
1875 does cause an unneeded insn to remain. */
1876
1877 if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
1878 && XVECLEN (newpat, 0) == 2
1879 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1880 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1881 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == REG
1882 && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 1)))
1883 && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 1)))
1884 && asm_noperands (newpat) < 0)
1885 {
1886 newpat = XVECEXP (newpat, 0, 0);
8e2f6e35 1887 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
230d793d
RS
1888 }
1889
1890 else if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
1891 && XVECLEN (newpat, 0) == 2
1892 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1893 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1894 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) == REG
1895 && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 0)))
1896 && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 0)))
1897 && asm_noperands (newpat) < 0)
1898 {
1899 newpat = XVECEXP (newpat, 0, 1);
8e2f6e35 1900 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
230d793d
RS
1901 }
1902
1903 /* If we were combining three insns and the result is a simple SET
1904 with no ASM_OPERANDS that wasn't recognized, try to split it into two
916f14f1
RK
1905 insns. There are two ways to do this. It can be split using a
1906 machine-specific method (like when you have an addition of a large
1907 constant) or by combine in the function find_split_point. */
1908
230d793d
RS
1909 if (i1 && insn_code_number < 0 && GET_CODE (newpat) == SET
1910 && asm_noperands (newpat) < 0)
1911 {
916f14f1 1912 rtx m_split, *split;
42495ca0 1913 rtx ni2dest = i2dest;
916f14f1
RK
1914
1915 /* See if the MD file can split NEWPAT. If it can't, see if letting it
42495ca0
RK
1916 use I2DEST as a scratch register will help. In the latter case,
1917 convert I2DEST to the mode of the source of NEWPAT if we can. */
916f14f1
RK
1918
1919 m_split = split_insns (newpat, i3);
a70c61d9
JW
1920
1921 /* We can only use I2DEST as a scratch reg if it doesn't overlap any
1922 inputs of NEWPAT. */
1923
1924 /* ??? If I2DEST is not safe, and I1DEST exists, then it would be
1925 possible to try that as a scratch reg. This would require adding
1926 more code to make it work though. */
1927
1928 if (m_split == 0 && ! reg_overlap_mentioned_p (ni2dest, newpat))
42495ca0
RK
1929 {
1930 /* If I2DEST is a hard register or the only use of a pseudo,
1931 we can change its mode. */
1932 if (GET_MODE (SET_DEST (newpat)) != GET_MODE (i2dest)
02f4ada4 1933 && GET_MODE (SET_DEST (newpat)) != VOIDmode
60654f77 1934 && GET_CODE (i2dest) == REG
42495ca0 1935 && (REGNO (i2dest) < FIRST_PSEUDO_REGISTER
b1f21e0a 1936 || (REG_N_SETS (REGNO (i2dest)) == 1 && ! added_sets_2
42495ca0 1937 && ! REG_USERVAR_P (i2dest))))
38a448ca 1938 ni2dest = gen_rtx_REG (GET_MODE (SET_DEST (newpat)),
c5c76735
JL
1939 REGNO (i2dest));
1940
1941 m_split = split_insns (gen_rtx_PARALLEL
1942 (VOIDmode,
1943 gen_rtvec (2, newpat,
1944 gen_rtx_CLOBBER (VOIDmode,
1945 ni2dest))),
1946 i3);
42495ca0 1947 }
916f14f1
RK
1948
1949 if (m_split && GET_CODE (m_split) == SEQUENCE
3f508eca
RK
1950 && XVECLEN (m_split, 0) == 2
1951 && (next_real_insn (i2) == i3
1952 || ! use_crosses_set_p (PATTERN (XVECEXP (m_split, 0, 0)),
1953 INSN_CUID (i2))))
916f14f1 1954 {
1a26b032 1955 rtx i2set, i3set;
d0ab8cd3 1956 rtx newi3pat = PATTERN (XVECEXP (m_split, 0, 1));
916f14f1 1957 newi2pat = PATTERN (XVECEXP (m_split, 0, 0));
916f14f1 1958
e4ba89be
RK
1959 i3set = single_set (XVECEXP (m_split, 0, 1));
1960 i2set = single_set (XVECEXP (m_split, 0, 0));
1a26b032 1961
42495ca0
RK
1962 /* In case we changed the mode of I2DEST, replace it in the
1963 pseudo-register table here. We can't do it above in case this
1964 code doesn't get executed and we do a split the other way. */
1965
1966 if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
1967 SUBST (regno_reg_rtx[REGNO (i2dest)], ni2dest);
1968
8e2f6e35 1969 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
1a26b032
RK
1970
1971 /* If I2 or I3 has multiple SETs, we won't know how to track
9cc96794
RK
1972 register status, so don't use these insns. If I2's destination
1973 is used between I2 and I3, we also can't use these insns. */
1a26b032 1974
9cc96794
RK
1975 if (i2_code_number >= 0 && i2set && i3set
1976 && (next_real_insn (i2) == i3
1977 || ! reg_used_between_p (SET_DEST (i2set), i2, i3)))
8e2f6e35
BS
1978 insn_code_number = recog_for_combine (&newi3pat, i3,
1979 &new_i3_notes);
d0ab8cd3
RK
1980 if (insn_code_number >= 0)
1981 newpat = newi3pat;
1982
c767f54b 1983 /* It is possible that both insns now set the destination of I3.
22609cbf 1984 If so, we must show an extra use of it. */
c767f54b 1985
393de53f
RK
1986 if (insn_code_number >= 0)
1987 {
1988 rtx new_i3_dest = SET_DEST (i3set);
1989 rtx new_i2_dest = SET_DEST (i2set);
1990
1991 while (GET_CODE (new_i3_dest) == ZERO_EXTRACT
1992 || GET_CODE (new_i3_dest) == STRICT_LOW_PART
1993 || GET_CODE (new_i3_dest) == SUBREG)
1994 new_i3_dest = XEXP (new_i3_dest, 0);
1995
d4096689
RK
1996 while (GET_CODE (new_i2_dest) == ZERO_EXTRACT
1997 || GET_CODE (new_i2_dest) == STRICT_LOW_PART
1998 || GET_CODE (new_i2_dest) == SUBREG)
1999 new_i2_dest = XEXP (new_i2_dest, 0);
2000
393de53f
RK
2001 if (GET_CODE (new_i3_dest) == REG
2002 && GET_CODE (new_i2_dest) == REG
2003 && REGNO (new_i3_dest) == REGNO (new_i2_dest))
b1f21e0a 2004 REG_N_SETS (REGNO (new_i2_dest))++;
393de53f 2005 }
916f14f1 2006 }
230d793d
RS
2007
2008 /* If we can split it and use I2DEST, go ahead and see if that
2009 helps things be recognized. Verify that none of the registers
2010 are set between I2 and I3. */
d0ab8cd3 2011 if (insn_code_number < 0 && (split = find_split_point (&newpat, i3)) != 0
230d793d
RS
2012#ifdef HAVE_cc0
2013 && GET_CODE (i2dest) == REG
2014#endif
2015 /* We need I2DEST in the proper mode. If it is a hard register
2016 or the only use of a pseudo, we can change its mode. */
2017 && (GET_MODE (*split) == GET_MODE (i2dest)
2018 || GET_MODE (*split) == VOIDmode
2019 || REGNO (i2dest) < FIRST_PSEUDO_REGISTER
b1f21e0a 2020 || (REG_N_SETS (REGNO (i2dest)) == 1 && ! added_sets_2
230d793d
RS
2021 && ! REG_USERVAR_P (i2dest)))
2022 && (next_real_insn (i2) == i3
2023 || ! use_crosses_set_p (*split, INSN_CUID (i2)))
2024 /* We can't overwrite I2DEST if its value is still used by
2025 NEWPAT. */
2026 && ! reg_referenced_p (i2dest, newpat))
2027 {
2028 rtx newdest = i2dest;
df7d75de
RK
2029 enum rtx_code split_code = GET_CODE (*split);
2030 enum machine_mode split_mode = GET_MODE (*split);
230d793d
RS
2031
2032 /* Get NEWDEST as a register in the proper mode. We have already
2033 validated that we can do this. */
df7d75de 2034 if (GET_MODE (i2dest) != split_mode && split_mode != VOIDmode)
230d793d 2035 {
38a448ca 2036 newdest = gen_rtx_REG (split_mode, REGNO (i2dest));
230d793d
RS
2037
2038 if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
2039 SUBST (regno_reg_rtx[REGNO (i2dest)], newdest);
2040 }
2041
2042 /* If *SPLIT is a (mult FOO (const_int pow2)), convert it to
2043 an ASHIFT. This can occur if it was inside a PLUS and hence
2044 appeared to be a memory address. This is a kludge. */
df7d75de 2045 if (split_code == MULT
230d793d
RS
2046 && GET_CODE (XEXP (*split, 1)) == CONST_INT
2047 && (i = exact_log2 (INTVAL (XEXP (*split, 1)))) >= 0)
1dc8a823
JW
2048 {
2049 SUBST (*split, gen_rtx_combine (ASHIFT, split_mode,
2050 XEXP (*split, 0), GEN_INT (i)));
2051 /* Update split_code because we may not have a multiply
2052 anymore. */
2053 split_code = GET_CODE (*split);
2054 }
230d793d
RS
2055
2056#ifdef INSN_SCHEDULING
2057 /* If *SPLIT is a paradoxical SUBREG, when we split it, it should
2058 be written as a ZERO_EXTEND. */
df7d75de
RK
2059 if (split_code == SUBREG && GET_CODE (SUBREG_REG (*split)) == MEM)
2060 SUBST (*split, gen_rtx_combine (ZERO_EXTEND, split_mode,
230d793d
RS
2061 XEXP (*split, 0)));
2062#endif
2063
2064 newi2pat = gen_rtx_combine (SET, VOIDmode, newdest, *split);
2065 SUBST (*split, newdest);
8e2f6e35 2066 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
df7d75de
RK
2067
2068 /* If the split point was a MULT and we didn't have one before,
2069 don't use one now. */
2070 if (i2_code_number >= 0 && ! (split_code == MULT && ! have_mult))
8e2f6e35 2071 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
230d793d
RS
2072 }
2073 }
2074
2075 /* Check for a case where we loaded from memory in a narrow mode and
2076 then sign extended it, but we need both registers. In that case,
2077 we have a PARALLEL with both loads from the same memory location.
2078 We can split this into a load from memory followed by a register-register
2079 copy. This saves at least one insn, more if register allocation can
f0343c74
RK
2080 eliminate the copy.
2081
2082 We cannot do this if the destination of the second assignment is
2083 a register that we have already assumed is zero-extended. Similarly
2084 for a SUBREG of such a register. */
230d793d
RS
2085
2086 else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
2087 && GET_CODE (newpat) == PARALLEL
2088 && XVECLEN (newpat, 0) == 2
2089 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
2090 && GET_CODE (SET_SRC (XVECEXP (newpat, 0, 0))) == SIGN_EXTEND
2091 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2092 && rtx_equal_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2093 XEXP (SET_SRC (XVECEXP (newpat, 0, 0)), 0))
2094 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2095 INSN_CUID (i2))
2096 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
2097 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
f0343c74
RK
2098 && ! (temp = SET_DEST (XVECEXP (newpat, 0, 1)),
2099 (GET_CODE (temp) == REG
2100 && reg_nonzero_bits[REGNO (temp)] != 0
2101 && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
2102 && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
2103 && (reg_nonzero_bits[REGNO (temp)]
2104 != GET_MODE_MASK (word_mode))))
2105 && ! (GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == SUBREG
2106 && (temp = SUBREG_REG (SET_DEST (XVECEXP (newpat, 0, 1))),
2107 (GET_CODE (temp) == REG
2108 && reg_nonzero_bits[REGNO (temp)] != 0
2109 && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
2110 && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
2111 && (reg_nonzero_bits[REGNO (temp)]
2112 != GET_MODE_MASK (word_mode)))))
230d793d
RS
2113 && ! reg_overlap_mentioned_p (SET_DEST (XVECEXP (newpat, 0, 1)),
2114 SET_SRC (XVECEXP (newpat, 0, 1)))
2115 && ! find_reg_note (i3, REG_UNUSED,
2116 SET_DEST (XVECEXP (newpat, 0, 0))))
2117 {
472fbdd1
RK
2118 rtx ni2dest;
2119
230d793d 2120 newi2pat = XVECEXP (newpat, 0, 0);
472fbdd1 2121 ni2dest = SET_DEST (XVECEXP (newpat, 0, 0));
230d793d
RS
2122 newpat = XVECEXP (newpat, 0, 1);
2123 SUBST (SET_SRC (newpat),
472fbdd1 2124 gen_lowpart_for_combine (GET_MODE (SET_SRC (newpat)), ni2dest));
8e2f6e35 2125 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
a29ca9db 2126
230d793d 2127 if (i2_code_number >= 0)
8e2f6e35 2128 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
5089e22e
RS
2129
2130 if (insn_code_number >= 0)
2131 {
2132 rtx insn;
2133 rtx link;
2134
2135 /* If we will be able to accept this, we have made a change to the
2136 destination of I3. This can invalidate a LOG_LINKS pointing
2137 to I3. No other part of combine.c makes such a transformation.
2138
2139 The new I3 will have a destination that was previously the
2140 destination of I1 or I2 and which was used in i2 or I3. Call
2141 distribute_links to make a LOG_LINK from the next use of
2142 that destination. */
2143
2144 PATTERN (i3) = newpat;
38a448ca 2145 distribute_links (gen_rtx_INSN_LIST (VOIDmode, i3, NULL_RTX));
5089e22e
RS
2146
2147 /* I3 now uses what used to be its destination and which is
2148 now I2's destination. That means we need a LOG_LINK from
2149 I3 to I2. But we used to have one, so we still will.
2150
2151 However, some later insn might be using I2's dest and have
2152 a LOG_LINK pointing at I3. We must remove this link.
2153 The simplest way to remove the link is to point it at I1,
2154 which we know will be a NOTE. */
2155
2156 for (insn = NEXT_INSN (i3);
0d4d42c3 2157 insn && (this_basic_block == n_basic_blocks - 1
3b413743 2158 || insn != BLOCK_HEAD (this_basic_block + 1));
5089e22e
RS
2159 insn = NEXT_INSN (insn))
2160 {
2161 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
472fbdd1 2162 && reg_referenced_p (ni2dest, PATTERN (insn)))
5089e22e
RS
2163 {
2164 for (link = LOG_LINKS (insn); link;
2165 link = XEXP (link, 1))
2166 if (XEXP (link, 0) == i3)
2167 XEXP (link, 0) = i1;
2168
2169 break;
2170 }
2171 }
2172 }
230d793d
RS
2173 }
2174
2175 /* Similarly, check for a case where we have a PARALLEL of two independent
2176 SETs but we started with three insns. In this case, we can do the sets
2177 as two separate insns. This case occurs when some SET allows two
2178 other insns to combine, but the destination of that SET is still live. */
2179
2180 else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
2181 && GET_CODE (newpat) == PARALLEL
2182 && XVECLEN (newpat, 0) == 2
2183 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
2184 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != ZERO_EXTRACT
2185 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != STRICT_LOW_PART
2186 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2187 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
2188 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
2189 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2190 INSN_CUID (i2))
2191 /* Don't pass sets with (USE (MEM ...)) dests to the following. */
2192 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != USE
2193 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != USE
2194 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 1)),
2195 XVECEXP (newpat, 0, 0))
2196 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 0)),
2197 XVECEXP (newpat, 0, 1)))
2198 {
e9a25f70
JL
2199 /* Normally, it doesn't matter which of the two is done first,
2200 but it does if one references cc0. In that case, it has to
2201 be first. */
2202#ifdef HAVE_cc0
2203 if (reg_referenced_p (cc0_rtx, XVECEXP (newpat, 0, 0)))
2204 {
2205 newi2pat = XVECEXP (newpat, 0, 0);
2206 newpat = XVECEXP (newpat, 0, 1);
2207 }
2208 else
2209#endif
2210 {
2211 newi2pat = XVECEXP (newpat, 0, 1);
2212 newpat = XVECEXP (newpat, 0, 0);
2213 }
230d793d 2214
8e2f6e35 2215 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
a29ca9db 2216
230d793d 2217 if (i2_code_number >= 0)
8e2f6e35 2218 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
230d793d
RS
2219 }
2220
2221 /* If it still isn't recognized, fail and change things back the way they
2222 were. */
2223 if ((insn_code_number < 0
2224 /* Is the result a reasonable ASM_OPERANDS? */
2225 && (! check_asm_operands (newpat) || added_sets_1 || added_sets_2)))
2226 {
2227 undo_all ();
2228 return 0;
2229 }
2230
2231 /* If we had to change another insn, make sure it is valid also. */
2232 if (undobuf.other_insn)
2233 {
230d793d
RS
2234 rtx other_pat = PATTERN (undobuf.other_insn);
2235 rtx new_other_notes;
2236 rtx note, next;
2237
6e25d159
RK
2238 CLEAR_HARD_REG_SET (newpat_used_regs);
2239
8e2f6e35
BS
2240 other_code_number = recog_for_combine (&other_pat, undobuf.other_insn,
2241 &new_other_notes);
230d793d
RS
2242
2243 if (other_code_number < 0 && ! check_asm_operands (other_pat))
2244 {
2245 undo_all ();
2246 return 0;
2247 }
2248
2249 PATTERN (undobuf.other_insn) = other_pat;
2250
2251 /* If any of the notes in OTHER_INSN were REG_UNUSED, ensure that they
2252 are still valid. Then add any non-duplicate notes added by
2253 recog_for_combine. */
2254 for (note = REG_NOTES (undobuf.other_insn); note; note = next)
2255 {
2256 next = XEXP (note, 1);
2257
2258 if (REG_NOTE_KIND (note) == REG_UNUSED
2259 && ! reg_set_p (XEXP (note, 0), PATTERN (undobuf.other_insn)))
1a26b032
RK
2260 {
2261 if (GET_CODE (XEXP (note, 0)) == REG)
b1f21e0a 2262 REG_N_DEATHS (REGNO (XEXP (note, 0)))--;
1a26b032
RK
2263
2264 remove_note (undobuf.other_insn, note);
2265 }
230d793d
RS
2266 }
2267
1a26b032
RK
2268 for (note = new_other_notes; note; note = XEXP (note, 1))
2269 if (GET_CODE (XEXP (note, 0)) == REG)
b1f21e0a 2270 REG_N_DEATHS (REGNO (XEXP (note, 0)))++;
1a26b032 2271
230d793d 2272 distribute_notes (new_other_notes, undobuf.other_insn,
5f4f0e22 2273 undobuf.other_insn, NULL_RTX, NULL_RTX, NULL_RTX);
230d793d
RS
2274 }
2275
2276 /* We now know that we can do this combination. Merge the insns and
2277 update the status of registers and LOG_LINKS. */
2278
2279 {
2280 rtx i3notes, i2notes, i1notes = 0;
2281 rtx i3links, i2links, i1links = 0;
2282 rtx midnotes = 0;
230d793d 2283 register int regno;
ff3467a9
JW
2284 /* Compute which registers we expect to eliminate. newi2pat may be setting
2285 either i3dest or i2dest, so we must check it. Also, i1dest may be the
2286 same as i3dest, in which case newi2pat may be setting i1dest. */
2287 rtx elim_i2 = ((newi2pat && reg_set_p (i2dest, newi2pat))
2288 || i2dest_in_i2src || i2dest_in_i1src
230d793d 2289 ? 0 : i2dest);
ff3467a9
JW
2290 rtx elim_i1 = (i1 == 0 || i1dest_in_i1src
2291 || (newi2pat && reg_set_p (i1dest, newi2pat))
2292 ? 0 : i1dest);
230d793d
RS
2293
2294 /* Get the old REG_NOTES and LOG_LINKS from all our insns and
2295 clear them. */
2296 i3notes = REG_NOTES (i3), i3links = LOG_LINKS (i3);
2297 i2notes = REG_NOTES (i2), i2links = LOG_LINKS (i2);
2298 if (i1)
2299 i1notes = REG_NOTES (i1), i1links = LOG_LINKS (i1);
2300
2301 /* Ensure that we do not have something that should not be shared but
2302 occurs multiple times in the new insns. Check this by first
5089e22e 2303 resetting all the `used' flags and then copying anything is shared. */
230d793d
RS
2304
2305 reset_used_flags (i3notes);
2306 reset_used_flags (i2notes);
2307 reset_used_flags (i1notes);
2308 reset_used_flags (newpat);
2309 reset_used_flags (newi2pat);
2310 if (undobuf.other_insn)
2311 reset_used_flags (PATTERN (undobuf.other_insn));
2312
2313 i3notes = copy_rtx_if_shared (i3notes);
2314 i2notes = copy_rtx_if_shared (i2notes);
2315 i1notes = copy_rtx_if_shared (i1notes);
2316 newpat = copy_rtx_if_shared (newpat);
2317 newi2pat = copy_rtx_if_shared (newi2pat);
2318 if (undobuf.other_insn)
2319 reset_used_flags (PATTERN (undobuf.other_insn));
2320
2321 INSN_CODE (i3) = insn_code_number;
2322 PATTERN (i3) = newpat;
2323 if (undobuf.other_insn)
2324 INSN_CODE (undobuf.other_insn) = other_code_number;
2325
2326 /* We had one special case above where I2 had more than one set and
2327 we replaced a destination of one of those sets with the destination
2328 of I3. In that case, we have to update LOG_LINKS of insns later
176c9e6b
JW
2329 in this basic block. Note that this (expensive) case is rare.
2330
2331 Also, in this case, we must pretend that all REG_NOTEs for I2
2332 actually came from I3, so that REG_UNUSED notes from I2 will be
2333 properly handled. */
2334
2335 if (i3_subst_into_i2)
2336 {
2337 for (i = 0; i < XVECLEN (PATTERN (i2), 0); i++)
2338 if (GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, i))) == REG
2339 && SET_DEST (XVECEXP (PATTERN (i2), 0, i)) != i2dest
2340 && ! find_reg_note (i2, REG_UNUSED,
2341 SET_DEST (XVECEXP (PATTERN (i2), 0, i))))
2342 for (temp = NEXT_INSN (i2);
2343 temp && (this_basic_block == n_basic_blocks - 1
3b413743 2344 || BLOCK_HEAD (this_basic_block) != temp);
176c9e6b
JW
2345 temp = NEXT_INSN (temp))
2346 if (temp != i3 && GET_RTX_CLASS (GET_CODE (temp)) == 'i')
2347 for (link = LOG_LINKS (temp); link; link = XEXP (link, 1))
2348 if (XEXP (link, 0) == i2)
2349 XEXP (link, 0) = i3;
2350
2351 if (i3notes)
2352 {
2353 rtx link = i3notes;
2354 while (XEXP (link, 1))
2355 link = XEXP (link, 1);
2356 XEXP (link, 1) = i2notes;
2357 }
2358 else
2359 i3notes = i2notes;
2360 i2notes = 0;
2361 }
230d793d
RS
2362
2363 LOG_LINKS (i3) = 0;
2364 REG_NOTES (i3) = 0;
2365 LOG_LINKS (i2) = 0;
2366 REG_NOTES (i2) = 0;
2367
2368 if (newi2pat)
2369 {
2370 INSN_CODE (i2) = i2_code_number;
2371 PATTERN (i2) = newi2pat;
2372 }
2373 else
2374 {
2375 PUT_CODE (i2, NOTE);
2376 NOTE_LINE_NUMBER (i2) = NOTE_INSN_DELETED;
2377 NOTE_SOURCE_FILE (i2) = 0;
2378 }
2379
2380 if (i1)
2381 {
2382 LOG_LINKS (i1) = 0;
2383 REG_NOTES (i1) = 0;
2384 PUT_CODE (i1, NOTE);
2385 NOTE_LINE_NUMBER (i1) = NOTE_INSN_DELETED;
2386 NOTE_SOURCE_FILE (i1) = 0;
2387 }
2388
2389 /* Get death notes for everything that is now used in either I3 or
6eb12cef
RK
2390 I2 and used to die in a previous insn. If we built two new
2391 patterns, move from I1 to I2 then I2 to I3 so that we get the
2392 proper movement on registers that I2 modifies. */
230d793d 2393
230d793d 2394 if (newi2pat)
6eb12cef
RK
2395 {
2396 move_deaths (newi2pat, NULL_RTX, INSN_CUID (i1), i2, &midnotes);
2397 move_deaths (newpat, newi2pat, INSN_CUID (i1), i3, &midnotes);
2398 }
2399 else
2400 move_deaths (newpat, NULL_RTX, i1 ? INSN_CUID (i1) : INSN_CUID (i2),
2401 i3, &midnotes);
230d793d
RS
2402
2403 /* Distribute all the LOG_LINKS and REG_NOTES from I1, I2, and I3. */
2404 if (i3notes)
5f4f0e22
CH
2405 distribute_notes (i3notes, i3, i3, newi2pat ? i2 : NULL_RTX,
2406 elim_i2, elim_i1);
230d793d 2407 if (i2notes)
5f4f0e22
CH
2408 distribute_notes (i2notes, i2, i3, newi2pat ? i2 : NULL_RTX,
2409 elim_i2, elim_i1);
230d793d 2410 if (i1notes)
5f4f0e22
CH
2411 distribute_notes (i1notes, i1, i3, newi2pat ? i2 : NULL_RTX,
2412 elim_i2, elim_i1);
230d793d 2413 if (midnotes)
5f4f0e22
CH
2414 distribute_notes (midnotes, NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2415 elim_i2, elim_i1);
230d793d
RS
2416
2417 /* Distribute any notes added to I2 or I3 by recog_for_combine. We
2418 know these are REG_UNUSED and want them to go to the desired insn,
1a26b032
RK
2419 so we always pass it as i3. We have not counted the notes in
2420 reg_n_deaths yet, so we need to do so now. */
2421
230d793d 2422 if (newi2pat && new_i2_notes)
1a26b032
RK
2423 {
2424 for (temp = new_i2_notes; temp; temp = XEXP (temp, 1))
2425 if (GET_CODE (XEXP (temp, 0)) == REG)
b1f21e0a 2426 REG_N_DEATHS (REGNO (XEXP (temp, 0)))++;
1a26b032
RK
2427
2428 distribute_notes (new_i2_notes, i2, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2429 }
2430
230d793d 2431 if (new_i3_notes)
1a26b032
RK
2432 {
2433 for (temp = new_i3_notes; temp; temp = XEXP (temp, 1))
2434 if (GET_CODE (XEXP (temp, 0)) == REG)
b1f21e0a 2435 REG_N_DEATHS (REGNO (XEXP (temp, 0)))++;
1a26b032
RK
2436
2437 distribute_notes (new_i3_notes, i3, i3, NULL_RTX, NULL_RTX, NULL_RTX);
2438 }
230d793d
RS
2439
2440 /* If I3DEST was used in I3SRC, it really died in I3. We may need to
e9a25f70
JL
2441 put a REG_DEAD note for it somewhere. If NEWI2PAT exists and sets
2442 I3DEST, the death must be somewhere before I2, not I3. If we passed I3
2443 in that case, it might delete I2. Similarly for I2 and I1.
1a26b032
RK
2444 Show an additional death due to the REG_DEAD note we make here. If
2445 we discard it in distribute_notes, we will decrement it again. */
d0ab8cd3 2446
230d793d 2447 if (i3dest_killed)
1a26b032
RK
2448 {
2449 if (GET_CODE (i3dest_killed) == REG)
b1f21e0a 2450 REG_N_DEATHS (REGNO (i3dest_killed))++;
1a26b032 2451
e9a25f70 2452 if (newi2pat && reg_set_p (i3dest_killed, newi2pat))
38a448ca
RH
2453 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i3dest_killed,
2454 NULL_RTX),
ff3467a9 2455 NULL_RTX, i2, NULL_RTX, elim_i2, elim_i1);
e9a25f70 2456 else
38a448ca
RH
2457 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i3dest_killed,
2458 NULL_RTX),
e9a25f70 2459 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
ff3467a9 2460 elim_i2, elim_i1);
1a26b032 2461 }
58c8c593 2462
230d793d 2463 if (i2dest_in_i2src)
58c8c593 2464 {
1a26b032 2465 if (GET_CODE (i2dest) == REG)
b1f21e0a 2466 REG_N_DEATHS (REGNO (i2dest))++;
1a26b032 2467
58c8c593 2468 if (newi2pat && reg_set_p (i2dest, newi2pat))
38a448ca 2469 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i2dest, NULL_RTX),
58c8c593
RK
2470 NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2471 else
38a448ca 2472 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i2dest, NULL_RTX),
58c8c593
RK
2473 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2474 NULL_RTX, NULL_RTX);
2475 }
2476
230d793d 2477 if (i1dest_in_i1src)
58c8c593 2478 {
1a26b032 2479 if (GET_CODE (i1dest) == REG)
b1f21e0a 2480 REG_N_DEATHS (REGNO (i1dest))++;
1a26b032 2481
58c8c593 2482 if (newi2pat && reg_set_p (i1dest, newi2pat))
38a448ca 2483 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i1dest, NULL_RTX),
58c8c593
RK
2484 NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2485 else
38a448ca 2486 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i1dest, NULL_RTX),
58c8c593
RK
2487 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2488 NULL_RTX, NULL_RTX);
2489 }
230d793d
RS
2490
2491 distribute_links (i3links);
2492 distribute_links (i2links);
2493 distribute_links (i1links);
2494
2495 if (GET_CODE (i2dest) == REG)
2496 {
d0ab8cd3
RK
2497 rtx link;
2498 rtx i2_insn = 0, i2_val = 0, set;
2499
2500 /* The insn that used to set this register doesn't exist, and
2501 this life of the register may not exist either. See if one of
2502 I3's links points to an insn that sets I2DEST. If it does,
2503 that is now the last known value for I2DEST. If we don't update
2504 this and I2 set the register to a value that depended on its old
230d793d
RS
2505 contents, we will get confused. If this insn is used, thing
2506 will be set correctly in combine_instructions. */
d0ab8cd3
RK
2507
2508 for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2509 if ((set = single_set (XEXP (link, 0))) != 0
2510 && rtx_equal_p (i2dest, SET_DEST (set)))
2511 i2_insn = XEXP (link, 0), i2_val = SET_SRC (set);
2512
2513 record_value_for_reg (i2dest, i2_insn, i2_val);
230d793d
RS
2514
2515 /* If the reg formerly set in I2 died only once and that was in I3,
2516 zero its use count so it won't make `reload' do any work. */
538fe8cd
ILT
2517 if (! added_sets_2
2518 && (newi2pat == 0 || ! reg_mentioned_p (i2dest, newi2pat))
2519 && ! i2dest_in_i2src)
230d793d
RS
2520 {
2521 regno = REGNO (i2dest);
b1f21e0a
MM
2522 REG_N_SETS (regno)--;
2523 if (REG_N_SETS (regno) == 0
e881bb1b
RH
2524 && ! REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start,
2525 regno))
b1f21e0a 2526 REG_N_REFS (regno) = 0;
230d793d
RS
2527 }
2528 }
2529
2530 if (i1 && GET_CODE (i1dest) == REG)
2531 {
d0ab8cd3
RK
2532 rtx link;
2533 rtx i1_insn = 0, i1_val = 0, set;
2534
2535 for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2536 if ((set = single_set (XEXP (link, 0))) != 0
2537 && rtx_equal_p (i1dest, SET_DEST (set)))
2538 i1_insn = XEXP (link, 0), i1_val = SET_SRC (set);
2539
2540 record_value_for_reg (i1dest, i1_insn, i1_val);
2541
230d793d 2542 regno = REGNO (i1dest);
5af91171 2543 if (! added_sets_1 && ! i1dest_in_i1src)
230d793d 2544 {
b1f21e0a
MM
2545 REG_N_SETS (regno)--;
2546 if (REG_N_SETS (regno) == 0
e881bb1b
RH
2547 && ! REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start,
2548 regno))
b1f21e0a 2549 REG_N_REFS (regno) = 0;
230d793d
RS
2550 }
2551 }
2552
951553af 2553 /* Update reg_nonzero_bits et al for any changes that may have been made
22609cbf
RK
2554 to this insn. */
2555
951553af 2556 note_stores (newpat, set_nonzero_bits_and_sign_copies);
22609cbf 2557 if (newi2pat)
951553af 2558 note_stores (newi2pat, set_nonzero_bits_and_sign_copies);
22609cbf 2559
230d793d
RS
2560 /* If I3 is now an unconditional jump, ensure that it has a
2561 BARRIER following it since it may have initially been a
381ee8af 2562 conditional jump. It may also be the last nonnote insn. */
230d793d
RS
2563
2564 if ((GET_CODE (newpat) == RETURN || simplejump_p (i3))
381ee8af
TW
2565 && ((temp = next_nonnote_insn (i3)) == NULL_RTX
2566 || GET_CODE (temp) != BARRIER))
230d793d
RS
2567 emit_barrier_after (i3);
2568 }
2569
2570 combine_successes++;
2571
bcd49eb7
JW
2572 /* Clear this here, so that subsequent get_last_value calls are not
2573 affected. */
2574 subst_prev_insn = NULL_RTX;
2575
abe6e52f
RK
2576 if (added_links_insn
2577 && (newi2pat == 0 || INSN_CUID (added_links_insn) < INSN_CUID (i2))
2578 && INSN_CUID (added_links_insn) < INSN_CUID (i3))
2579 return added_links_insn;
2580 else
2581 return newi2pat ? i2 : i3;
230d793d
RS
2582}
2583\f
2584/* Undo all the modifications recorded in undobuf. */
2585
2586static void
2587undo_all ()
2588{
241cea85
RK
2589 struct undo *undo, *next;
2590
2591 for (undo = undobuf.undos; undo; undo = next)
7c046e4e 2592 {
241cea85
RK
2593 next = undo->next;
2594 if (undo->is_int)
2595 *undo->where.i = undo->old_contents.i;
7c046e4e 2596 else
241cea85
RK
2597 *undo->where.r = undo->old_contents.r;
2598
2599 undo->next = undobuf.frees;
2600 undobuf.frees = undo;
7c046e4e 2601 }
230d793d
RS
2602
2603 obfree (undobuf.storage);
845fc875 2604 undobuf.undos = undobuf.previous_undos = 0;
bcd49eb7
JW
2605
2606 /* Clear this here, so that subsequent get_last_value calls are not
2607 affected. */
2608 subst_prev_insn = NULL_RTX;
230d793d
RS
2609}
2610\f
2611/* Find the innermost point within the rtx at LOC, possibly LOC itself,
d0ab8cd3
RK
2612 where we have an arithmetic expression and return that point. LOC will
2613 be inside INSN.
230d793d
RS
2614
2615 try_combine will call this function to see if an insn can be split into
2616 two insns. */
2617
2618static rtx *
d0ab8cd3 2619find_split_point (loc, insn)
230d793d 2620 rtx *loc;
d0ab8cd3 2621 rtx insn;
230d793d
RS
2622{
2623 rtx x = *loc;
2624 enum rtx_code code = GET_CODE (x);
2625 rtx *split;
6a651371
KG
2626 int len = 0, pos = 0, unsignedp = 0;
2627 rtx inner = NULL_RTX;
230d793d
RS
2628
2629 /* First special-case some codes. */
2630 switch (code)
2631 {
2632 case SUBREG:
2633#ifdef INSN_SCHEDULING
2634 /* If we are making a paradoxical SUBREG invalid, it becomes a split
2635 point. */
2636 if (GET_CODE (SUBREG_REG (x)) == MEM)
2637 return loc;
2638#endif
d0ab8cd3 2639 return find_split_point (&SUBREG_REG (x), insn);
230d793d 2640
230d793d 2641 case MEM:
916f14f1 2642#ifdef HAVE_lo_sum
230d793d
RS
2643 /* If we have (mem (const ..)) or (mem (symbol_ref ...)), split it
2644 using LO_SUM and HIGH. */
2645 if (GET_CODE (XEXP (x, 0)) == CONST
2646 || GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
2647 {
2648 SUBST (XEXP (x, 0),
2649 gen_rtx_combine (LO_SUM, Pmode,
2650 gen_rtx_combine (HIGH, Pmode, XEXP (x, 0)),
2651 XEXP (x, 0)));
2652 return &XEXP (XEXP (x, 0), 0);
2653 }
230d793d
RS
2654#endif
2655
916f14f1
RK
2656 /* If we have a PLUS whose second operand is a constant and the
2657 address is not valid, perhaps will can split it up using
2658 the machine-specific way to split large constants. We use
ddd5a7c1 2659 the first pseudo-reg (one of the virtual regs) as a placeholder;
916f14f1
RK
2660 it will not remain in the result. */
2661 if (GET_CODE (XEXP (x, 0)) == PLUS
2662 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2663 && ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
2664 {
2665 rtx reg = regno_reg_rtx[FIRST_PSEUDO_REGISTER];
38a448ca 2666 rtx seq = split_insns (gen_rtx_SET (VOIDmode, reg, XEXP (x, 0)),
916f14f1
RK
2667 subst_insn);
2668
2669 /* This should have produced two insns, each of which sets our
2670 placeholder. If the source of the second is a valid address,
2671 we can make put both sources together and make a split point
2672 in the middle. */
2673
2674 if (seq && XVECLEN (seq, 0) == 2
2675 && GET_CODE (XVECEXP (seq, 0, 0)) == INSN
2676 && GET_CODE (PATTERN (XVECEXP (seq, 0, 0))) == SET
2677 && SET_DEST (PATTERN (XVECEXP (seq, 0, 0))) == reg
2678 && ! reg_mentioned_p (reg,
2679 SET_SRC (PATTERN (XVECEXP (seq, 0, 0))))
2680 && GET_CODE (XVECEXP (seq, 0, 1)) == INSN
2681 && GET_CODE (PATTERN (XVECEXP (seq, 0, 1))) == SET
2682 && SET_DEST (PATTERN (XVECEXP (seq, 0, 1))) == reg
2683 && memory_address_p (GET_MODE (x),
2684 SET_SRC (PATTERN (XVECEXP (seq, 0, 1)))))
2685 {
2686 rtx src1 = SET_SRC (PATTERN (XVECEXP (seq, 0, 0)));
2687 rtx src2 = SET_SRC (PATTERN (XVECEXP (seq, 0, 1)));
2688
2689 /* Replace the placeholder in SRC2 with SRC1. If we can
2690 find where in SRC2 it was placed, that can become our
2691 split point and we can replace this address with SRC2.
2692 Just try two obvious places. */
2693
2694 src2 = replace_rtx (src2, reg, src1);
2695 split = 0;
2696 if (XEXP (src2, 0) == src1)
2697 split = &XEXP (src2, 0);
2698 else if (GET_RTX_FORMAT (GET_CODE (XEXP (src2, 0)))[0] == 'e'
2699 && XEXP (XEXP (src2, 0), 0) == src1)
2700 split = &XEXP (XEXP (src2, 0), 0);
2701
2702 if (split)
2703 {
2704 SUBST (XEXP (x, 0), src2);
2705 return split;
2706 }
2707 }
1a26b032
RK
2708
2709 /* If that didn't work, perhaps the first operand is complex and
2710 needs to be computed separately, so make a split point there.
2711 This will occur on machines that just support REG + CONST
2712 and have a constant moved through some previous computation. */
2713
2714 else if (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (x, 0), 0))) != 'o'
2715 && ! (GET_CODE (XEXP (XEXP (x, 0), 0)) == SUBREG
2716 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (XEXP (x, 0), 0))))
2717 == 'o')))
2718 return &XEXP (XEXP (x, 0), 0);
916f14f1
RK
2719 }
2720 break;
2721
230d793d
RS
2722 case SET:
2723#ifdef HAVE_cc0
2724 /* If SET_DEST is CC0 and SET_SRC is not an operand, a COMPARE, or a
2725 ZERO_EXTRACT, the most likely reason why this doesn't match is that
2726 we need to put the operand into a register. So split at that
2727 point. */
2728
2729 if (SET_DEST (x) == cc0_rtx
2730 && GET_CODE (SET_SRC (x)) != COMPARE
2731 && GET_CODE (SET_SRC (x)) != ZERO_EXTRACT
2732 && GET_RTX_CLASS (GET_CODE (SET_SRC (x))) != 'o'
2733 && ! (GET_CODE (SET_SRC (x)) == SUBREG
2734 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (SET_SRC (x)))) == 'o'))
2735 return &SET_SRC (x);
2736#endif
2737
2738 /* See if we can split SET_SRC as it stands. */
d0ab8cd3 2739 split = find_split_point (&SET_SRC (x), insn);
230d793d
RS
2740 if (split && split != &SET_SRC (x))
2741 return split;
2742
041d7180
JL
2743 /* See if we can split SET_DEST as it stands. */
2744 split = find_split_point (&SET_DEST (x), insn);
2745 if (split && split != &SET_DEST (x))
2746 return split;
2747
230d793d
RS
2748 /* See if this is a bitfield assignment with everything constant. If
2749 so, this is an IOR of an AND, so split it into that. */
2750 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2751 && (GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
5f4f0e22 2752 <= HOST_BITS_PER_WIDE_INT)
230d793d
RS
2753 && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT
2754 && GET_CODE (XEXP (SET_DEST (x), 2)) == CONST_INT
2755 && GET_CODE (SET_SRC (x)) == CONST_INT
2756 && ((INTVAL (XEXP (SET_DEST (x), 1))
2757 + INTVAL (XEXP (SET_DEST (x), 2)))
2758 <= GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0))))
2759 && ! side_effects_p (XEXP (SET_DEST (x), 0)))
2760 {
2761 int pos = INTVAL (XEXP (SET_DEST (x), 2));
2762 int len = INTVAL (XEXP (SET_DEST (x), 1));
2763 int src = INTVAL (SET_SRC (x));
2764 rtx dest = XEXP (SET_DEST (x), 0);
2765 enum machine_mode mode = GET_MODE (dest);
5f4f0e22 2766 unsigned HOST_WIDE_INT mask = ((HOST_WIDE_INT) 1 << len) - 1;
230d793d 2767
f76b9db2
ILT
2768 if (BITS_BIG_ENDIAN)
2769 pos = GET_MODE_BITSIZE (mode) - len - pos;
230d793d 2770
e51712db 2771 if ((unsigned HOST_WIDE_INT) src == mask)
230d793d 2772 SUBST (SET_SRC (x),
5f4f0e22 2773 gen_binary (IOR, mode, dest, GEN_INT (src << pos)));
230d793d
RS
2774 else
2775 SUBST (SET_SRC (x),
2776 gen_binary (IOR, mode,
2777 gen_binary (AND, mode, dest,
5f4f0e22
CH
2778 GEN_INT (~ (mask << pos)
2779 & GET_MODE_MASK (mode))),
2780 GEN_INT (src << pos)));
230d793d
RS
2781
2782 SUBST (SET_DEST (x), dest);
2783
d0ab8cd3 2784 split = find_split_point (&SET_SRC (x), insn);
230d793d
RS
2785 if (split && split != &SET_SRC (x))
2786 return split;
2787 }
2788
2789 /* Otherwise, see if this is an operation that we can split into two.
2790 If so, try to split that. */
2791 code = GET_CODE (SET_SRC (x));
2792
2793 switch (code)
2794 {
d0ab8cd3
RK
2795 case AND:
2796 /* If we are AND'ing with a large constant that is only a single
2797 bit and the result is only being used in a context where we
2798 need to know if it is zero or non-zero, replace it with a bit
2799 extraction. This will avoid the large constant, which might
2800 have taken more than one insn to make. If the constant were
2801 not a valid argument to the AND but took only one insn to make,
2802 this is no worse, but if it took more than one insn, it will
2803 be better. */
2804
2805 if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
2806 && GET_CODE (XEXP (SET_SRC (x), 0)) == REG
2807 && (pos = exact_log2 (INTVAL (XEXP (SET_SRC (x), 1)))) >= 7
2808 && GET_CODE (SET_DEST (x)) == REG
2809 && (split = find_single_use (SET_DEST (x), insn, NULL_PTR)) != 0
2810 && (GET_CODE (*split) == EQ || GET_CODE (*split) == NE)
2811 && XEXP (*split, 0) == SET_DEST (x)
2812 && XEXP (*split, 1) == const0_rtx)
2813 {
76184def
DE
2814 rtx extraction = make_extraction (GET_MODE (SET_DEST (x)),
2815 XEXP (SET_SRC (x), 0),
2816 pos, NULL_RTX, 1, 1, 0, 0);
2817 if (extraction != 0)
2818 {
2819 SUBST (SET_SRC (x), extraction);
2820 return find_split_point (loc, insn);
2821 }
d0ab8cd3
RK
2822 }
2823 break;
2824
1a6ec070
RK
2825 case NE:
2826 /* if STORE_FLAG_VALUE is -1, this is (NE X 0) and only one bit of X
2827 is known to be on, this can be converted into a NEG of a shift. */
2828 if (STORE_FLAG_VALUE == -1 && XEXP (SET_SRC (x), 1) == const0_rtx
2829 && GET_MODE (SET_SRC (x)) == GET_MODE (XEXP (SET_SRC (x), 0))
4eb2cb10 2830 && 1 <= (pos = exact_log2
1a6ec070
RK
2831 (nonzero_bits (XEXP (SET_SRC (x), 0),
2832 GET_MODE (XEXP (SET_SRC (x), 0))))))
2833 {
2834 enum machine_mode mode = GET_MODE (XEXP (SET_SRC (x), 0));
2835
2836 SUBST (SET_SRC (x),
2837 gen_rtx_combine (NEG, mode,
2838 gen_rtx_combine (LSHIFTRT, mode,
2839 XEXP (SET_SRC (x), 0),
4eb2cb10 2840 GEN_INT (pos))));
1a6ec070
RK
2841
2842 split = find_split_point (&SET_SRC (x), insn);
2843 if (split && split != &SET_SRC (x))
2844 return split;
2845 }
2846 break;
2847
230d793d
RS
2848 case SIGN_EXTEND:
2849 inner = XEXP (SET_SRC (x), 0);
101c1a3d
JL
2850
2851 /* We can't optimize if either mode is a partial integer
2852 mode as we don't know how many bits are significant
2853 in those modes. */
2854 if (GET_MODE_CLASS (GET_MODE (inner)) == MODE_PARTIAL_INT
2855 || GET_MODE_CLASS (GET_MODE (SET_SRC (x))) == MODE_PARTIAL_INT)
2856 break;
2857
230d793d
RS
2858 pos = 0;
2859 len = GET_MODE_BITSIZE (GET_MODE (inner));
2860 unsignedp = 0;
2861 break;
2862
2863 case SIGN_EXTRACT:
2864 case ZERO_EXTRACT:
2865 if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
2866 && GET_CODE (XEXP (SET_SRC (x), 2)) == CONST_INT)
2867 {
2868 inner = XEXP (SET_SRC (x), 0);
2869 len = INTVAL (XEXP (SET_SRC (x), 1));
2870 pos = INTVAL (XEXP (SET_SRC (x), 2));
2871
f76b9db2
ILT
2872 if (BITS_BIG_ENDIAN)
2873 pos = GET_MODE_BITSIZE (GET_MODE (inner)) - len - pos;
230d793d
RS
2874 unsignedp = (code == ZERO_EXTRACT);
2875 }
2876 break;
e9a25f70
JL
2877
2878 default:
2879 break;
230d793d
RS
2880 }
2881
2882 if (len && pos >= 0 && pos + len <= GET_MODE_BITSIZE (GET_MODE (inner)))
2883 {
2884 enum machine_mode mode = GET_MODE (SET_SRC (x));
2885
d0ab8cd3
RK
2886 /* For unsigned, we have a choice of a shift followed by an
2887 AND or two shifts. Use two shifts for field sizes where the
2888 constant might be too large. We assume here that we can
2889 always at least get 8-bit constants in an AND insn, which is
2890 true for every current RISC. */
2891
2892 if (unsignedp && len <= 8)
230d793d
RS
2893 {
2894 SUBST (SET_SRC (x),
2895 gen_rtx_combine
2896 (AND, mode,
2897 gen_rtx_combine (LSHIFTRT, mode,
2898 gen_lowpart_for_combine (mode, inner),
5f4f0e22
CH
2899 GEN_INT (pos)),
2900 GEN_INT (((HOST_WIDE_INT) 1 << len) - 1)));
230d793d 2901
d0ab8cd3 2902 split = find_split_point (&SET_SRC (x), insn);
230d793d
RS
2903 if (split && split != &SET_SRC (x))
2904 return split;
2905 }
2906 else
2907 {
2908 SUBST (SET_SRC (x),
2909 gen_rtx_combine
d0ab8cd3 2910 (unsignedp ? LSHIFTRT : ASHIFTRT, mode,
230d793d
RS
2911 gen_rtx_combine (ASHIFT, mode,
2912 gen_lowpart_for_combine (mode, inner),
5f4f0e22
CH
2913 GEN_INT (GET_MODE_BITSIZE (mode)
2914 - len - pos)),
2915 GEN_INT (GET_MODE_BITSIZE (mode) - len)));
230d793d 2916
d0ab8cd3 2917 split = find_split_point (&SET_SRC (x), insn);
230d793d
RS
2918 if (split && split != &SET_SRC (x))
2919 return split;
2920 }
2921 }
2922
2923 /* See if this is a simple operation with a constant as the second
2924 operand. It might be that this constant is out of range and hence
2925 could be used as a split point. */
2926 if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
2927 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
2928 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<')
2929 && CONSTANT_P (XEXP (SET_SRC (x), 1))
2930 && (GET_RTX_CLASS (GET_CODE (XEXP (SET_SRC (x), 0))) == 'o'
2931 || (GET_CODE (XEXP (SET_SRC (x), 0)) == SUBREG
2932 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (SET_SRC (x), 0))))
2933 == 'o'))))
2934 return &XEXP (SET_SRC (x), 1);
2935
2936 /* Finally, see if this is a simple operation with its first operand
2937 not in a register. The operation might require this operand in a
2938 register, so return it as a split point. We can always do this
2939 because if the first operand were another operation, we would have
2940 already found it as a split point. */
2941 if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
2942 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
2943 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<'
2944 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '1')
2945 && ! register_operand (XEXP (SET_SRC (x), 0), VOIDmode))
2946 return &XEXP (SET_SRC (x), 0);
2947
2948 return 0;
2949
2950 case AND:
2951 case IOR:
2952 /* We write NOR as (and (not A) (not B)), but if we don't have a NOR,
2953 it is better to write this as (not (ior A B)) so we can split it.
2954 Similarly for IOR. */
2955 if (GET_CODE (XEXP (x, 0)) == NOT && GET_CODE (XEXP (x, 1)) == NOT)
2956 {
2957 SUBST (*loc,
2958 gen_rtx_combine (NOT, GET_MODE (x),
2959 gen_rtx_combine (code == IOR ? AND : IOR,
2960 GET_MODE (x),
2961 XEXP (XEXP (x, 0), 0),
2962 XEXP (XEXP (x, 1), 0))));
d0ab8cd3 2963 return find_split_point (loc, insn);
230d793d
RS
2964 }
2965
2966 /* Many RISC machines have a large set of logical insns. If the
2967 second operand is a NOT, put it first so we will try to split the
2968 other operand first. */
2969 if (GET_CODE (XEXP (x, 1)) == NOT)
2970 {
2971 rtx tem = XEXP (x, 0);
2972 SUBST (XEXP (x, 0), XEXP (x, 1));
2973 SUBST (XEXP (x, 1), tem);
2974 }
2975 break;
e9a25f70
JL
2976
2977 default:
2978 break;
230d793d
RS
2979 }
2980
2981 /* Otherwise, select our actions depending on our rtx class. */
2982 switch (GET_RTX_CLASS (code))
2983 {
2984 case 'b': /* This is ZERO_EXTRACT and SIGN_EXTRACT. */
2985 case '3':
d0ab8cd3 2986 split = find_split_point (&XEXP (x, 2), insn);
230d793d
RS
2987 if (split)
2988 return split;
0f41302f 2989 /* ... fall through ... */
230d793d
RS
2990 case '2':
2991 case 'c':
2992 case '<':
d0ab8cd3 2993 split = find_split_point (&XEXP (x, 1), insn);
230d793d
RS
2994 if (split)
2995 return split;
0f41302f 2996 /* ... fall through ... */
230d793d
RS
2997 case '1':
2998 /* Some machines have (and (shift ...) ...) insns. If X is not
2999 an AND, but XEXP (X, 0) is, use it as our split point. */
3000 if (GET_CODE (x) != AND && GET_CODE (XEXP (x, 0)) == AND)
3001 return &XEXP (x, 0);
3002
d0ab8cd3 3003 split = find_split_point (&XEXP (x, 0), insn);
230d793d
RS
3004 if (split)
3005 return split;
3006 return loc;
3007 }
3008
3009 /* Otherwise, we don't have a split point. */
3010 return 0;
3011}
3012\f
3013/* Throughout X, replace FROM with TO, and return the result.
3014 The result is TO if X is FROM;
3015 otherwise the result is X, but its contents may have been modified.
3016 If they were modified, a record was made in undobuf so that
3017 undo_all will (among other things) return X to its original state.
3018
3019 If the number of changes necessary is too much to record to undo,
3020 the excess changes are not made, so the result is invalid.
3021 The changes already made can still be undone.
3022 undobuf.num_undo is incremented for such changes, so by testing that
3023 the caller can tell whether the result is valid.
3024
3025 `n_occurrences' is incremented each time FROM is replaced.
3026
3027 IN_DEST is non-zero if we are processing the SET_DEST of a SET.
3028
5089e22e 3029 UNIQUE_COPY is non-zero if each substitution must be unique. We do this
230d793d
RS
3030 by copying if `n_occurrences' is non-zero. */
3031
3032static rtx
3033subst (x, from, to, in_dest, unique_copy)
3034 register rtx x, from, to;
3035 int in_dest;
3036 int unique_copy;
3037{
f24ad0e4 3038 register enum rtx_code code = GET_CODE (x);
230d793d 3039 enum machine_mode op0_mode = VOIDmode;
6f7d635c 3040 register const char *fmt;
8079805d
RK
3041 register int len, i;
3042 rtx new;
230d793d
RS
3043
3044/* Two expressions are equal if they are identical copies of a shared
3045 RTX or if they are both registers with the same register number
3046 and mode. */
3047
3048#define COMBINE_RTX_EQUAL_P(X,Y) \
3049 ((X) == (Y) \
3050 || (GET_CODE (X) == REG && GET_CODE (Y) == REG \
3051 && REGNO (X) == REGNO (Y) && GET_MODE (X) == GET_MODE (Y)))
3052
3053 if (! in_dest && COMBINE_RTX_EQUAL_P (x, from))
3054 {
3055 n_occurrences++;
3056 return (unique_copy && n_occurrences > 1 ? copy_rtx (to) : to);
3057 }
3058
3059 /* If X and FROM are the same register but different modes, they will
3060 not have been seen as equal above. However, flow.c will make a
3061 LOG_LINKS entry for that case. If we do nothing, we will try to
3062 rerecognize our original insn and, when it succeeds, we will
3063 delete the feeding insn, which is incorrect.
3064
3065 So force this insn not to match in this (rare) case. */
3066 if (! in_dest && code == REG && GET_CODE (from) == REG
3067 && REGNO (x) == REGNO (from))
38a448ca 3068 return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
230d793d
RS
3069
3070 /* If this is an object, we are done unless it is a MEM or LO_SUM, both
3071 of which may contain things that can be combined. */
3072 if (code != MEM && code != LO_SUM && GET_RTX_CLASS (code) == 'o')
3073 return x;
3074
3075 /* It is possible to have a subexpression appear twice in the insn.
3076 Suppose that FROM is a register that appears within TO.
3077 Then, after that subexpression has been scanned once by `subst',
3078 the second time it is scanned, TO may be found. If we were
3079 to scan TO here, we would find FROM within it and create a
3080 self-referent rtl structure which is completely wrong. */
3081 if (COMBINE_RTX_EQUAL_P (x, to))
3082 return to;
3083
4f4b3679
RH
3084 /* Parallel asm_operands need special attention because all of the
3085 inputs are shared across the arms. Furthermore, unsharing the
3086 rtl results in recognition failures. Failure to handle this case
3087 specially can result in circular rtl.
3088
3089 Solve this by doing a normal pass across the first entry of the
3090 parallel, and only processing the SET_DESTs of the subsequent
3091 entries. Ug. */
3092
3093 if (code == PARALLEL
3094 && GET_CODE (XVECEXP (x, 0, 0)) == SET
3095 && GET_CODE (SET_SRC (XVECEXP (x, 0, 0))) == ASM_OPERANDS)
230d793d 3096 {
4f4b3679
RH
3097 new = subst (XVECEXP (x, 0, 0), from, to, 0, unique_copy);
3098
3099 /* If this substitution failed, this whole thing fails. */
3100 if (GET_CODE (new) == CLOBBER
3101 && XEXP (new, 0) == const0_rtx)
3102 return new;
3103
3104 SUBST (XVECEXP (x, 0, 0), new);
3105
3106 for (i = XVECLEN (x, 0) - 1; i >= 1; i--)
230d793d 3107 {
4f4b3679
RH
3108 rtx dest = SET_DEST (XVECEXP (x, 0, i));
3109
3110 if (GET_CODE (dest) != REG
3111 && GET_CODE (dest) != CC0
3112 && GET_CODE (dest) != PC)
230d793d 3113 {
4f4b3679 3114 new = subst (dest, from, to, 0, unique_copy);
230d793d 3115
4f4b3679
RH
3116 /* If this substitution failed, this whole thing fails. */
3117 if (GET_CODE (new) == CLOBBER
3118 && XEXP (new, 0) == const0_rtx)
3119 return new;
230d793d 3120
4f4b3679 3121 SUBST (SET_DEST (XVECEXP (x, 0, i)), new);
230d793d
RS
3122 }
3123 }
4f4b3679
RH
3124 }
3125 else
3126 {
3127 len = GET_RTX_LENGTH (code);
3128 fmt = GET_RTX_FORMAT (code);
3129
3130 /* We don't need to process a SET_DEST that is a register, CC0,
3131 or PC, so set up to skip this common case. All other cases
3132 where we want to suppress replacing something inside a
3133 SET_SRC are handled via the IN_DEST operand. */
3134 if (code == SET
3135 && (GET_CODE (SET_DEST (x)) == REG
3136 || GET_CODE (SET_DEST (x)) == CC0
3137 || GET_CODE (SET_DEST (x)) == PC))
3138 fmt = "ie";
3139
3140 /* Get the mode of operand 0 in case X is now a SIGN_EXTEND of a
3141 constant. */
3142 if (fmt[0] == 'e')
3143 op0_mode = GET_MODE (XEXP (x, 0));
3144
3145 for (i = 0; i < len; i++)
230d793d 3146 {
4f4b3679 3147 if (fmt[i] == 'E')
230d793d 3148 {
4f4b3679
RH
3149 register int j;
3150 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3151 {
3152 if (COMBINE_RTX_EQUAL_P (XVECEXP (x, i, j), from))
3153 {
3154 new = (unique_copy && n_occurrences
3155 ? copy_rtx (to) : to);
3156 n_occurrences++;
3157 }
3158 else
3159 {
3160 new = subst (XVECEXP (x, i, j), from, to, 0,
3161 unique_copy);
3162
3163 /* If this substitution failed, this whole thing
3164 fails. */
3165 if (GET_CODE (new) == CLOBBER
3166 && XEXP (new, 0) == const0_rtx)
3167 return new;
3168 }
3169
3170 SUBST (XVECEXP (x, i, j), new);
3171 }
3172 }
3173 else if (fmt[i] == 'e')
3174 {
3175 if (COMBINE_RTX_EQUAL_P (XEXP (x, i), from))
3176 {
3177 /* In general, don't install a subreg involving two
3178 modes not tieable. It can worsen register
3179 allocation, and can even make invalid reload
3180 insns, since the reg inside may need to be copied
3181 from in the outside mode, and that may be invalid
3182 if it is an fp reg copied in integer mode.
3183
3184 We allow two exceptions to this: It is valid if
3185 it is inside another SUBREG and the mode of that
3186 SUBREG and the mode of the inside of TO is
3187 tieable and it is valid if X is a SET that copies
3188 FROM to CC0. */
3189
3190 if (GET_CODE (to) == SUBREG
3191 && ! MODES_TIEABLE_P (GET_MODE (to),
3192 GET_MODE (SUBREG_REG (to)))
3193 && ! (code == SUBREG
3194 && MODES_TIEABLE_P (GET_MODE (x),
3195 GET_MODE (SUBREG_REG (to))))
42301240 3196#ifdef HAVE_cc0
4f4b3679 3197 && ! (code == SET && i == 1 && XEXP (x, 0) == cc0_rtx)
42301240 3198#endif
4f4b3679
RH
3199 )
3200 return gen_rtx_CLOBBER (VOIDmode, const0_rtx);
42301240 3201
4f4b3679
RH
3202 new = (unique_copy && n_occurrences ? copy_rtx (to) : to);
3203 n_occurrences++;
3204 }
3205 else
3206 /* If we are in a SET_DEST, suppress most cases unless we
3207 have gone inside a MEM, in which case we want to
3208 simplify the address. We assume here that things that
3209 are actually part of the destination have their inner
3210 parts in the first expression. This is true for SUBREG,
3211 STRICT_LOW_PART, and ZERO_EXTRACT, which are the only
3212 things aside from REG and MEM that should appear in a
3213 SET_DEST. */
3214 new = subst (XEXP (x, i), from, to,
3215 (((in_dest
3216 && (code == SUBREG || code == STRICT_LOW_PART
3217 || code == ZERO_EXTRACT))
3218 || code == SET)
3219 && i == 0), unique_copy);
3220
3221 /* If we found that we will have to reject this combination,
3222 indicate that by returning the CLOBBER ourselves, rather than
3223 an expression containing it. This will speed things up as
3224 well as prevent accidents where two CLOBBERs are considered
3225 to be equal, thus producing an incorrect simplification. */
3226
3227 if (GET_CODE (new) == CLOBBER && XEXP (new, 0) == const0_rtx)
3228 return new;
3229
3230 SUBST (XEXP (x, i), new);
230d793d 3231 }
230d793d
RS
3232 }
3233 }
3234
8079805d
RK
3235 /* Try to simplify X. If the simplification changed the code, it is likely
3236 that further simplification will help, so loop, but limit the number
3237 of repetitions that will be performed. */
3238
3239 for (i = 0; i < 4; i++)
3240 {
3241 /* If X is sufficiently simple, don't bother trying to do anything
3242 with it. */
3243 if (code != CONST_INT && code != REG && code != CLOBBER)
3244 x = simplify_rtx (x, op0_mode, i == 3, in_dest);
d0ab8cd3 3245
8079805d
RK
3246 if (GET_CODE (x) == code)
3247 break;
d0ab8cd3 3248
8079805d 3249 code = GET_CODE (x);
eeb43d32 3250
8079805d
RK
3251 /* We no longer know the original mode of operand 0 since we
3252 have changed the form of X) */
3253 op0_mode = VOIDmode;
3254 }
eeb43d32 3255
8079805d
RK
3256 return x;
3257}
3258\f
3259/* Simplify X, a piece of RTL. We just operate on the expression at the
3260 outer level; call `subst' to simplify recursively. Return the new
3261 expression.
3262
3263 OP0_MODE is the original mode of XEXP (x, 0); LAST is nonzero if this
3264 will be the iteration even if an expression with a code different from
3265 X is returned; IN_DEST is nonzero if we are inside a SET_DEST. */
eeb43d32 3266
8079805d
RK
3267static rtx
3268simplify_rtx (x, op0_mode, last, in_dest)
3269 rtx x;
3270 enum machine_mode op0_mode;
3271 int last;
3272 int in_dest;
3273{
3274 enum rtx_code code = GET_CODE (x);
3275 enum machine_mode mode = GET_MODE (x);
3276 rtx temp;
3277 int i;
d0ab8cd3 3278
230d793d
RS
3279 /* If this is a commutative operation, put a constant last and a complex
3280 expression first. We don't need to do this for comparisons here. */
3281 if (GET_RTX_CLASS (code) == 'c'
3282 && ((CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
3283 || (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == 'o'
3284 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o')
3285 || (GET_CODE (XEXP (x, 0)) == SUBREG
3286 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0)))) == 'o'
3287 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o')))
3288 {
3289 temp = XEXP (x, 0);
3290 SUBST (XEXP (x, 0), XEXP (x, 1));
3291 SUBST (XEXP (x, 1), temp);
3292 }
3293
22609cbf
RK
3294 /* If this is a PLUS, MINUS, or MULT, and the first operand is the
3295 sign extension of a PLUS with a constant, reverse the order of the sign
3296 extension and the addition. Note that this not the same as the original
3297 code, but overflow is undefined for signed values. Also note that the
3298 PLUS will have been partially moved "inside" the sign-extension, so that
3299 the first operand of X will really look like:
3300 (ashiftrt (plus (ashift A C4) C5) C4).
3301 We convert this to
3302 (plus (ashiftrt (ashift A C4) C2) C4)
3303 and replace the first operand of X with that expression. Later parts
3304 of this function may simplify the expression further.
3305
3306 For example, if we start with (mult (sign_extend (plus A C1)) C2),
3307 we swap the SIGN_EXTEND and PLUS. Later code will apply the
3308 distributive law to produce (plus (mult (sign_extend X) C1) C3).
3309
3310 We do this to simplify address expressions. */
3311
3312 if ((code == PLUS || code == MINUS || code == MULT)
3313 && GET_CODE (XEXP (x, 0)) == ASHIFTRT
3314 && GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS
3315 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ASHIFT
3316 && GET_CODE (XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1)) == CONST_INT
3317 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3318 && XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1) == XEXP (XEXP (x, 0), 1)
3319 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
3320 && (temp = simplify_binary_operation (ASHIFTRT, mode,
3321 XEXP (XEXP (XEXP (x, 0), 0), 1),
3322 XEXP (XEXP (x, 0), 1))) != 0)
3323 {
3324 rtx new
3325 = simplify_shift_const (NULL_RTX, ASHIFT, mode,
3326 XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 0),
3327 INTVAL (XEXP (XEXP (x, 0), 1)));
3328
3329 new = simplify_shift_const (NULL_RTX, ASHIFTRT, mode, new,
3330 INTVAL (XEXP (XEXP (x, 0), 1)));
3331
3332 SUBST (XEXP (x, 0), gen_binary (PLUS, mode, new, temp));
3333 }
3334
d0ab8cd3
RK
3335 /* If this is a simple operation applied to an IF_THEN_ELSE, try
3336 applying it to the arms of the IF_THEN_ELSE. This often simplifies
abe6e52f
RK
3337 things. Check for cases where both arms are testing the same
3338 condition.
3339
3340 Don't do anything if all operands are very simple. */
3341
3342 if (((GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c'
3343 || GET_RTX_CLASS (code) == '<')
3344 && ((GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) != 'o'
3345 && ! (GET_CODE (XEXP (x, 0)) == SUBREG
3346 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0))))
3347 == 'o')))
3348 || (GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o'
3349 && ! (GET_CODE (XEXP (x, 1)) == SUBREG
3350 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 1))))
3351 == 'o')))))
3352 || (GET_RTX_CLASS (code) == '1'
3353 && ((GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) != 'o'
3354 && ! (GET_CODE (XEXP (x, 0)) == SUBREG
3355 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0))))
3356 == 'o'))))))
d0ab8cd3 3357 {
abe6e52f
RK
3358 rtx cond, true, false;
3359
3360 cond = if_then_else_cond (x, &true, &false);
0802d516
RK
3361 if (cond != 0
3362 /* If everything is a comparison, what we have is highly unlikely
3363 to be simpler, so don't use it. */
3364 && ! (GET_RTX_CLASS (code) == '<'
3365 && (GET_RTX_CLASS (GET_CODE (true)) == '<'
3366 || GET_RTX_CLASS (GET_CODE (false)) == '<')))
abe6e52f
RK
3367 {
3368 rtx cop1 = const0_rtx;
3369 enum rtx_code cond_code = simplify_comparison (NE, &cond, &cop1);
3370
15448afc
RK
3371 if (cond_code == NE && GET_RTX_CLASS (GET_CODE (cond)) == '<')
3372 return x;
3373
9210df58
RK
3374 /* Simplify the alternative arms; this may collapse the true and
3375 false arms to store-flag values. */
3376 true = subst (true, pc_rtx, pc_rtx, 0, 0);
3377 false = subst (false, pc_rtx, pc_rtx, 0, 0);
3378
3379 /* Restarting if we generate a store-flag expression will cause
3380 us to loop. Just drop through in this case. */
3381
abe6e52f
RK
3382 /* If the result values are STORE_FLAG_VALUE and zero, we can
3383 just make the comparison operation. */
3384 if (true == const_true_rtx && false == const0_rtx)
3385 x = gen_binary (cond_code, mode, cond, cop1);
3386 else if (true == const0_rtx && false == const_true_rtx)
3387 x = gen_binary (reverse_condition (cond_code), mode, cond, cop1);
3388
3389 /* Likewise, we can make the negate of a comparison operation
3390 if the result values are - STORE_FLAG_VALUE and zero. */
3391 else if (GET_CODE (true) == CONST_INT
3392 && INTVAL (true) == - STORE_FLAG_VALUE
3393 && false == const0_rtx)
0c1c8ea6 3394 x = gen_unary (NEG, mode, mode,
abe6e52f
RK
3395 gen_binary (cond_code, mode, cond, cop1));
3396 else if (GET_CODE (false) == CONST_INT
3397 && INTVAL (false) == - STORE_FLAG_VALUE
3398 && true == const0_rtx)
0c1c8ea6 3399 x = gen_unary (NEG, mode, mode,
abe6e52f
RK
3400 gen_binary (reverse_condition (cond_code),
3401 mode, cond, cop1));
3402 else
38a448ca
RH
3403 return gen_rtx_IF_THEN_ELSE (mode,
3404 gen_binary (cond_code, VOIDmode,
3405 cond, cop1),
3406 true, false);
5109d49f 3407
9210df58
RK
3408 code = GET_CODE (x);
3409 op0_mode = VOIDmode;
abe6e52f 3410 }
d0ab8cd3
RK
3411 }
3412
230d793d
RS
3413 /* Try to fold this expression in case we have constants that weren't
3414 present before. */
3415 temp = 0;
3416 switch (GET_RTX_CLASS (code))
3417 {
3418 case '1':
3419 temp = simplify_unary_operation (code, mode, XEXP (x, 0), op0_mode);
3420 break;
3421 case '<':
3422 temp = simplify_relational_operation (code, op0_mode,
3423 XEXP (x, 0), XEXP (x, 1));
77fa0940
RK
3424#ifdef FLOAT_STORE_FLAG_VALUE
3425 if (temp != 0 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
3426 temp = ((temp == const0_rtx) ? CONST0_RTX (GET_MODE (x))
3427 : immed_real_const_1 (FLOAT_STORE_FLAG_VALUE, GET_MODE (x)));
3428#endif
230d793d
RS
3429 break;
3430 case 'c':
3431 case '2':
3432 temp = simplify_binary_operation (code, mode, XEXP (x, 0), XEXP (x, 1));
3433 break;
3434 case 'b':
3435 case '3':
3436 temp = simplify_ternary_operation (code, mode, op0_mode, XEXP (x, 0),
3437 XEXP (x, 1), XEXP (x, 2));
3438 break;
3439 }
3440
3441 if (temp)
d0ab8cd3 3442 x = temp, code = GET_CODE (temp);
230d793d 3443
230d793d 3444 /* First see if we can apply the inverse distributive law. */
224eeff2
RK
3445 if (code == PLUS || code == MINUS
3446 || code == AND || code == IOR || code == XOR)
230d793d
RS
3447 {
3448 x = apply_distributive_law (x);
3449 code = GET_CODE (x);
3450 }
3451
3452 /* If CODE is an associative operation not otherwise handled, see if we
3453 can associate some operands. This can win if they are constants or
3454 if they are logically related (i.e. (a & b) & a. */
3455 if ((code == PLUS || code == MINUS
3456 || code == MULT || code == AND || code == IOR || code == XOR
3457 || code == DIV || code == UDIV
3458 || code == SMAX || code == SMIN || code == UMAX || code == UMIN)
3ad2180a 3459 && INTEGRAL_MODE_P (mode))
230d793d
RS
3460 {
3461 if (GET_CODE (XEXP (x, 0)) == code)
3462 {
3463 rtx other = XEXP (XEXP (x, 0), 0);
3464 rtx inner_op0 = XEXP (XEXP (x, 0), 1);
3465 rtx inner_op1 = XEXP (x, 1);
3466 rtx inner;
3467
3468 /* Make sure we pass the constant operand if any as the second
3469 one if this is a commutative operation. */
3470 if (CONSTANT_P (inner_op0) && GET_RTX_CLASS (code) == 'c')
3471 {
3472 rtx tem = inner_op0;
3473 inner_op0 = inner_op1;
3474 inner_op1 = tem;
3475 }
3476 inner = simplify_binary_operation (code == MINUS ? PLUS
3477 : code == DIV ? MULT
3478 : code == UDIV ? MULT
3479 : code,
3480 mode, inner_op0, inner_op1);
3481
3482 /* For commutative operations, try the other pair if that one
3483 didn't simplify. */
3484 if (inner == 0 && GET_RTX_CLASS (code) == 'c')
3485 {
3486 other = XEXP (XEXP (x, 0), 1);
3487 inner = simplify_binary_operation (code, mode,
3488 XEXP (XEXP (x, 0), 0),
3489 XEXP (x, 1));
3490 }
3491
3492 if (inner)
8079805d 3493 return gen_binary (code, mode, other, inner);
230d793d
RS
3494 }
3495 }
3496
3497 /* A little bit of algebraic simplification here. */
3498 switch (code)
3499 {
3500 case MEM:
3501 /* Ensure that our address has any ASHIFTs converted to MULT in case
3502 address-recognizing predicates are called later. */
3503 temp = make_compound_operation (XEXP (x, 0), MEM);
3504 SUBST (XEXP (x, 0), temp);
3505 break;
3506
3507 case SUBREG:
3508 /* (subreg:A (mem:B X) N) becomes a modified MEM unless the SUBREG
3509 is paradoxical. If we can't do that safely, then it becomes
3510 something nonsensical so that this combination won't take place. */
3511
3512 if (GET_CODE (SUBREG_REG (x)) == MEM
3513 && (GET_MODE_SIZE (mode)
3514 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
3515 {
3516 rtx inner = SUBREG_REG (x);
3517 int endian_offset = 0;
3518 /* Don't change the mode of the MEM
3519 if that would change the meaning of the address. */
3520 if (MEM_VOLATILE_P (SUBREG_REG (x))
3521 || mode_dependent_address_p (XEXP (inner, 0)))
38a448ca 3522 return gen_rtx_CLOBBER (mode, const0_rtx);
230d793d 3523
f76b9db2
ILT
3524 if (BYTES_BIG_ENDIAN)
3525 {
3526 if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
3527 endian_offset += UNITS_PER_WORD - GET_MODE_SIZE (mode);
3528 if (GET_MODE_SIZE (GET_MODE (inner)) < UNITS_PER_WORD)
3529 endian_offset -= (UNITS_PER_WORD
3530 - GET_MODE_SIZE (GET_MODE (inner)));
3531 }
230d793d
RS
3532 /* Note if the plus_constant doesn't make a valid address
3533 then this combination won't be accepted. */
38a448ca
RH
3534 x = gen_rtx_MEM (mode,
3535 plus_constant (XEXP (inner, 0),
3536 (SUBREG_WORD (x) * UNITS_PER_WORD
3537 + endian_offset)));
230d793d 3538 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (inner);
c6df88cb 3539 MEM_COPY_ATTRIBUTES (x, inner);
230d793d
RS
3540 return x;
3541 }
3542
3543 /* If we are in a SET_DEST, these other cases can't apply. */
3544 if (in_dest)
3545 return x;
3546
3547 /* Changing mode twice with SUBREG => just change it once,
3548 or not at all if changing back to starting mode. */
3549 if (GET_CODE (SUBREG_REG (x)) == SUBREG)
3550 {
3551 if (mode == GET_MODE (SUBREG_REG (SUBREG_REG (x)))
3552 && SUBREG_WORD (x) == 0 && SUBREG_WORD (SUBREG_REG (x)) == 0)
3553 return SUBREG_REG (SUBREG_REG (x));
3554
3555 SUBST_INT (SUBREG_WORD (x),
3556 SUBREG_WORD (x) + SUBREG_WORD (SUBREG_REG (x)));
3557 SUBST (SUBREG_REG (x), SUBREG_REG (SUBREG_REG (x)));
3558 }
3559
3560 /* SUBREG of a hard register => just change the register number
3561 and/or mode. If the hard register is not valid in that mode,
26ecfc76
RK
3562 suppress this combination. If the hard register is the stack,
3563 frame, or argument pointer, leave this as a SUBREG. */
230d793d
RS
3564
3565 if (GET_CODE (SUBREG_REG (x)) == REG
26ecfc76
RK
3566 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
3567 && REGNO (SUBREG_REG (x)) != FRAME_POINTER_REGNUM
6d7096b0
DE
3568#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3569 && REGNO (SUBREG_REG (x)) != HARD_FRAME_POINTER_REGNUM
3570#endif
26ecfc76
RK
3571#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3572 && REGNO (SUBREG_REG (x)) != ARG_POINTER_REGNUM
3573#endif
3574 && REGNO (SUBREG_REG (x)) != STACK_POINTER_REGNUM)
230d793d
RS
3575 {
3576 if (HARD_REGNO_MODE_OK (REGNO (SUBREG_REG (x)) + SUBREG_WORD (x),
3577 mode))
38a448ca
RH
3578 return gen_rtx_REG (mode,
3579 REGNO (SUBREG_REG (x)) + SUBREG_WORD (x));
230d793d 3580 else
38a448ca 3581 return gen_rtx_CLOBBER (mode, const0_rtx);
230d793d
RS
3582 }
3583
3584 /* For a constant, try to pick up the part we want. Handle a full
a4bde0b1
RK
3585 word and low-order part. Only do this if we are narrowing
3586 the constant; if it is being widened, we have no idea what
3587 the extra bits will have been set to. */
230d793d
RS
3588
3589 if (CONSTANT_P (SUBREG_REG (x)) && op0_mode != VOIDmode
3590 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
3c99d5ff 3591 && GET_MODE_SIZE (op0_mode) > UNITS_PER_WORD
230d793d
RS
3592 && GET_MODE_CLASS (mode) == MODE_INT)
3593 {
3594 temp = operand_subword (SUBREG_REG (x), SUBREG_WORD (x),
5f4f0e22 3595 0, op0_mode);
230d793d
RS
3596 if (temp)
3597 return temp;
3598 }
3599
19808e22
RS
3600 /* If we want a subreg of a constant, at offset 0,
3601 take the low bits. On a little-endian machine, that's
3602 always valid. On a big-endian machine, it's valid
3c99d5ff 3603 only if the constant's mode fits in one word. Note that we
61b1bece 3604 cannot use subreg_lowpart_p since SUBREG_REG may be VOIDmode. */
3c99d5ff
RK
3605 if (CONSTANT_P (SUBREG_REG (x))
3606 && ((GET_MODE_SIZE (op0_mode) <= UNITS_PER_WORD
3607 || ! WORDS_BIG_ENDIAN)
3608 ? SUBREG_WORD (x) == 0
3609 : (SUBREG_WORD (x)
3610 == ((GET_MODE_SIZE (op0_mode)
3611 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD))
3612 / UNITS_PER_WORD)))
f82da7d2 3613 && GET_MODE_SIZE (mode) <= GET_MODE_SIZE (op0_mode)
f76b9db2
ILT
3614 && (! WORDS_BIG_ENDIAN
3615 || GET_MODE_BITSIZE (op0_mode) <= BITS_PER_WORD))
230d793d
RS
3616 return gen_lowpart_for_combine (mode, SUBREG_REG (x));
3617
b65c1b5b
RK
3618 /* A paradoxical SUBREG of a VOIDmode constant is the same constant,
3619 since we are saying that the high bits don't matter. */
3620 if (CONSTANT_P (SUBREG_REG (x)) && GET_MODE (SUBREG_REG (x)) == VOIDmode
3621 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (op0_mode))
3622 return SUBREG_REG (x);
3623
87e3e0c1
RK
3624 /* Note that we cannot do any narrowing for non-constants since
3625 we might have been counting on using the fact that some bits were
3626 zero. We now do this in the SET. */
3627
230d793d
RS
3628 break;
3629
3630 case NOT:
3631 /* (not (plus X -1)) can become (neg X). */
3632 if (GET_CODE (XEXP (x, 0)) == PLUS
3633 && XEXP (XEXP (x, 0), 1) == constm1_rtx)
8079805d 3634 return gen_rtx_combine (NEG, mode, XEXP (XEXP (x, 0), 0));
230d793d
RS
3635
3636 /* Similarly, (not (neg X)) is (plus X -1). */
3637 if (GET_CODE (XEXP (x, 0)) == NEG)
8079805d
RK
3638 return gen_rtx_combine (PLUS, mode, XEXP (XEXP (x, 0), 0),
3639 constm1_rtx);
230d793d 3640
d0ab8cd3
RK
3641 /* (not (xor X C)) for C constant is (xor X D) with D = ~ C. */
3642 if (GET_CODE (XEXP (x, 0)) == XOR
3643 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3644 && (temp = simplify_unary_operation (NOT, mode,
3645 XEXP (XEXP (x, 0), 1),
3646 mode)) != 0)
787745f5 3647 return gen_binary (XOR, mode, XEXP (XEXP (x, 0), 0), temp);
d0ab8cd3 3648
230d793d
RS
3649 /* (not (ashift 1 X)) is (rotate ~1 X). We used to do this for operands
3650 other than 1, but that is not valid. We could do a similar
3651 simplification for (not (lshiftrt C X)) where C is just the sign bit,
3652 but this doesn't seem common enough to bother with. */
3653 if (GET_CODE (XEXP (x, 0)) == ASHIFT
3654 && XEXP (XEXP (x, 0), 0) == const1_rtx)
38a448ca
RH
3655 return gen_rtx_ROTATE (mode, gen_unary (NOT, mode, mode, const1_rtx),
3656 XEXP (XEXP (x, 0), 1));
230d793d
RS
3657
3658 if (GET_CODE (XEXP (x, 0)) == SUBREG
3659 && subreg_lowpart_p (XEXP (x, 0))
3660 && (GET_MODE_SIZE (GET_MODE (XEXP (x, 0)))
3661 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (x, 0)))))
3662 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == ASHIFT
3663 && XEXP (SUBREG_REG (XEXP (x, 0)), 0) == const1_rtx)
3664 {
3665 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (XEXP (x, 0)));
3666
38a448ca
RH
3667 x = gen_rtx_ROTATE (inner_mode,
3668 gen_unary (NOT, inner_mode, inner_mode,
3669 const1_rtx),
3670 XEXP (SUBREG_REG (XEXP (x, 0)), 1));
8079805d 3671 return gen_lowpart_for_combine (mode, x);
230d793d
RS
3672 }
3673
0802d516
RK
3674 /* If STORE_FLAG_VALUE is -1, (not (comparison foo bar)) can be done by
3675 reversing the comparison code if valid. */
3676 if (STORE_FLAG_VALUE == -1
3677 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
230d793d
RS
3678 && reversible_comparison_p (XEXP (x, 0)))
3679 return gen_rtx_combine (reverse_condition (GET_CODE (XEXP (x, 0))),
3680 mode, XEXP (XEXP (x, 0), 0),
3681 XEXP (XEXP (x, 0), 1));
500c518b
RK
3682
3683 /* (ashiftrt foo C) where C is the number of bits in FOO minus 1
0802d516
RK
3684 is (lt foo (const_int 0)) if STORE_FLAG_VALUE is -1, so we can
3685 perform the above simplification. */
500c518b 3686
0802d516
RK
3687 if (STORE_FLAG_VALUE == -1
3688 && XEXP (x, 1) == const1_rtx
500c518b
RK
3689 && GET_CODE (XEXP (x, 0)) == ASHIFTRT
3690 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3691 && INTVAL (XEXP (XEXP (x, 0), 1)) == GET_MODE_BITSIZE (mode) - 1)
3692 return gen_rtx_combine (GE, mode, XEXP (XEXP (x, 0), 0), const0_rtx);
230d793d
RS
3693
3694 /* Apply De Morgan's laws to reduce number of patterns for machines
3695 with negating logical insns (and-not, nand, etc.). If result has
3696 only one NOT, put it first, since that is how the patterns are
3697 coded. */
3698
3699 if (GET_CODE (XEXP (x, 0)) == IOR || GET_CODE (XEXP (x, 0)) == AND)
3700 {
3701 rtx in1 = XEXP (XEXP (x, 0), 0), in2 = XEXP (XEXP (x, 0), 1);
3702
3703 if (GET_CODE (in1) == NOT)
3704 in1 = XEXP (in1, 0);
3705 else
3706 in1 = gen_rtx_combine (NOT, GET_MODE (in1), in1);
3707
3708 if (GET_CODE (in2) == NOT)
3709 in2 = XEXP (in2, 0);
3710 else if (GET_CODE (in2) == CONST_INT
5f4f0e22
CH
3711 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3712 in2 = GEN_INT (GET_MODE_MASK (mode) & ~ INTVAL (in2));
230d793d
RS
3713 else
3714 in2 = gen_rtx_combine (NOT, GET_MODE (in2), in2);
3715
3716 if (GET_CODE (in2) == NOT)
3717 {
3718 rtx tem = in2;
3719 in2 = in1; in1 = tem;
3720 }
3721
8079805d
RK
3722 return gen_rtx_combine (GET_CODE (XEXP (x, 0)) == IOR ? AND : IOR,
3723 mode, in1, in2);
230d793d
RS
3724 }
3725 break;
3726
3727 case NEG:
3728 /* (neg (plus X 1)) can become (not X). */
3729 if (GET_CODE (XEXP (x, 0)) == PLUS
3730 && XEXP (XEXP (x, 0), 1) == const1_rtx)
8079805d 3731 return gen_rtx_combine (NOT, mode, XEXP (XEXP (x, 0), 0));
230d793d
RS
3732
3733 /* Similarly, (neg (not X)) is (plus X 1). */
3734 if (GET_CODE (XEXP (x, 0)) == NOT)
8079805d 3735 return plus_constant (XEXP (XEXP (x, 0), 0), 1);
230d793d 3736
230d793d
RS
3737 /* (neg (minus X Y)) can become (minus Y X). */
3738 if (GET_CODE (XEXP (x, 0)) == MINUS
3ad2180a 3739 && (! FLOAT_MODE_P (mode)
0f41302f 3740 /* x-y != -(y-x) with IEEE floating point. */
7e2a0d8e
RK
3741 || TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3742 || flag_fast_math))
8079805d
RK
3743 return gen_binary (MINUS, mode, XEXP (XEXP (x, 0), 1),
3744 XEXP (XEXP (x, 0), 0));
230d793d 3745
0f41302f 3746 /* (neg (xor A 1)) is (plus A -1) if A is known to be either 0 or 1. */
d0ab8cd3 3747 if (GET_CODE (XEXP (x, 0)) == XOR && XEXP (XEXP (x, 0), 1) == const1_rtx
951553af 3748 && nonzero_bits (XEXP (XEXP (x, 0), 0), mode) == 1)
8079805d 3749 return gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0), constm1_rtx);
d0ab8cd3 3750
230d793d
RS
3751 /* NEG commutes with ASHIFT since it is multiplication. Only do this
3752 if we can then eliminate the NEG (e.g.,
3753 if the operand is a constant). */
3754
3755 if (GET_CODE (XEXP (x, 0)) == ASHIFT)
3756 {
3757 temp = simplify_unary_operation (NEG, mode,
3758 XEXP (XEXP (x, 0), 0), mode);
3759 if (temp)
3760 {
3761 SUBST (XEXP (XEXP (x, 0), 0), temp);
3762 return XEXP (x, 0);
3763 }
3764 }
3765
3766 temp = expand_compound_operation (XEXP (x, 0));
3767
3768 /* For C equal to the width of MODE minus 1, (neg (ashiftrt X C)) can be
3769 replaced by (lshiftrt X C). This will convert
3770 (neg (sign_extract X 1 Y)) to (zero_extract X 1 Y). */
3771
3772 if (GET_CODE (temp) == ASHIFTRT
3773 && GET_CODE (XEXP (temp, 1)) == CONST_INT
3774 && INTVAL (XEXP (temp, 1)) == GET_MODE_BITSIZE (mode) - 1)
8079805d
RK
3775 return simplify_shift_const (temp, LSHIFTRT, mode, XEXP (temp, 0),
3776 INTVAL (XEXP (temp, 1)));
230d793d 3777
951553af 3778 /* If X has only a single bit that might be nonzero, say, bit I, convert
230d793d
RS
3779 (neg X) to (ashiftrt (ashift X C-I) C-I) where C is the bitsize of
3780 MODE minus 1. This will convert (neg (zero_extract X 1 Y)) to
3781 (sign_extract X 1 Y). But only do this if TEMP isn't a register
3782 or a SUBREG of one since we'd be making the expression more
3783 complex if it was just a register. */
3784
3785 if (GET_CODE (temp) != REG
3786 && ! (GET_CODE (temp) == SUBREG
3787 && GET_CODE (SUBREG_REG (temp)) == REG)
951553af 3788 && (i = exact_log2 (nonzero_bits (temp, mode))) >= 0)
230d793d
RS
3789 {
3790 rtx temp1 = simplify_shift_const
5f4f0e22
CH
3791 (NULL_RTX, ASHIFTRT, mode,
3792 simplify_shift_const (NULL_RTX, ASHIFT, mode, temp,
230d793d
RS
3793 GET_MODE_BITSIZE (mode) - 1 - i),
3794 GET_MODE_BITSIZE (mode) - 1 - i);
3795
3796 /* If all we did was surround TEMP with the two shifts, we
3797 haven't improved anything, so don't use it. Otherwise,
3798 we are better off with TEMP1. */
3799 if (GET_CODE (temp1) != ASHIFTRT
3800 || GET_CODE (XEXP (temp1, 0)) != ASHIFT
3801 || XEXP (XEXP (temp1, 0), 0) != temp)
8079805d 3802 return temp1;
230d793d
RS
3803 }
3804 break;
3805
2ca9ae17 3806 case TRUNCATE:
e30fb98f
JL
3807 /* We can't handle truncation to a partial integer mode here
3808 because we don't know the real bitsize of the partial
3809 integer mode. */
3810 if (GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
3811 break;
3812
80608e27
JL
3813 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3814 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
3815 GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))))
2ca9ae17
JW
3816 SUBST (XEXP (x, 0),
3817 force_to_mode (XEXP (x, 0), GET_MODE (XEXP (x, 0)),
3818 GET_MODE_MASK (mode), NULL_RTX, 0));
0f13a422
ILT
3819
3820 /* (truncate:SI ({sign,zero}_extend:DI foo:SI)) == foo:SI. */
3821 if ((GET_CODE (XEXP (x, 0)) == SIGN_EXTEND
3822 || GET_CODE (XEXP (x, 0)) == ZERO_EXTEND)
3823 && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode)
3824 return XEXP (XEXP (x, 0), 0);
3825
3826 /* (truncate:SI (OP:DI ({sign,zero}_extend:DI foo:SI))) is
3827 (OP:SI foo:SI) if OP is NEG or ABS. */
3828 if ((GET_CODE (XEXP (x, 0)) == ABS
3829 || GET_CODE (XEXP (x, 0)) == NEG)
3830 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == SIGN_EXTEND
3831 || GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND)
3832 && GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == mode)
3833 return gen_unary (GET_CODE (XEXP (x, 0)), mode, mode,
3834 XEXP (XEXP (XEXP (x, 0), 0), 0));
3835
3836 /* (truncate:SI (subreg:DI (truncate:SI X) 0)) is
3837 (truncate:SI x). */
3838 if (GET_CODE (XEXP (x, 0)) == SUBREG
3839 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == TRUNCATE
3840 && subreg_lowpart_p (XEXP (x, 0)))
3841 return SUBREG_REG (XEXP (x, 0));
3842
3843 /* If we know that the value is already truncated, we can
6a992214
JL
3844 replace the TRUNCATE with a SUBREG if TRULY_NOOP_TRUNCATION is
3845 nonzero for the corresponding modes. */
3846 if (TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
3847 GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
3848 && num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
3849 >= GET_MODE_BITSIZE (mode) + 1)
0f13a422
ILT
3850 return gen_lowpart_for_combine (mode, XEXP (x, 0));
3851
3852 /* A truncate of a comparison can be replaced with a subreg if
3853 STORE_FLAG_VALUE permits. This is like the previous test,
3854 but it works even if the comparison is done in a mode larger
3855 than HOST_BITS_PER_WIDE_INT. */
3856 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3857 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3858 && ((HOST_WIDE_INT) STORE_FLAG_VALUE &~ GET_MODE_MASK (mode)) == 0)
3859 return gen_lowpart_for_combine (mode, XEXP (x, 0));
3860
3861 /* Similarly, a truncate of a register whose value is a
3862 comparison can be replaced with a subreg if STORE_FLAG_VALUE
3863 permits. */
3864 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3865 && ((HOST_WIDE_INT) STORE_FLAG_VALUE &~ GET_MODE_MASK (mode)) == 0
3866 && (temp = get_last_value (XEXP (x, 0)))
3867 && GET_RTX_CLASS (GET_CODE (temp)) == '<')
3868 return gen_lowpart_for_combine (mode, XEXP (x, 0));
3869
2ca9ae17
JW
3870 break;
3871
230d793d
RS
3872 case FLOAT_TRUNCATE:
3873 /* (float_truncate:SF (float_extend:DF foo:SF)) = foo:SF. */
3874 if (GET_CODE (XEXP (x, 0)) == FLOAT_EXTEND
3875 && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode)
3876 return XEXP (XEXP (x, 0), 0);
4635f748
RK
3877
3878 /* (float_truncate:SF (OP:DF (float_extend:DF foo:sf))) is
3879 (OP:SF foo:SF) if OP is NEG or ABS. */
3880 if ((GET_CODE (XEXP (x, 0)) == ABS
3881 || GET_CODE (XEXP (x, 0)) == NEG)
3882 && GET_CODE (XEXP (XEXP (x, 0), 0)) == FLOAT_EXTEND
3883 && GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == mode)
0c1c8ea6
RK
3884 return gen_unary (GET_CODE (XEXP (x, 0)), mode, mode,
3885 XEXP (XEXP (XEXP (x, 0), 0), 0));
1d12df72
RK
3886
3887 /* (float_truncate:SF (subreg:DF (float_truncate:SF X) 0))
3888 is (float_truncate:SF x). */
3889 if (GET_CODE (XEXP (x, 0)) == SUBREG
3890 && subreg_lowpart_p (XEXP (x, 0))
3891 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == FLOAT_TRUNCATE)
3892 return SUBREG_REG (XEXP (x, 0));
230d793d
RS
3893 break;
3894
3895#ifdef HAVE_cc0
3896 case COMPARE:
3897 /* Convert (compare FOO (const_int 0)) to FOO unless we aren't
3898 using cc0, in which case we want to leave it as a COMPARE
3899 so we can distinguish it from a register-register-copy. */
3900 if (XEXP (x, 1) == const0_rtx)
3901 return XEXP (x, 0);
3902
3903 /* In IEEE floating point, x-0 is not the same as x. */
3904 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
7e2a0d8e
RK
3905 || ! FLOAT_MODE_P (GET_MODE (XEXP (x, 0)))
3906 || flag_fast_math)
230d793d
RS
3907 && XEXP (x, 1) == CONST0_RTX (GET_MODE (XEXP (x, 0))))
3908 return XEXP (x, 0);
3909 break;
3910#endif
3911
3912 case CONST:
3913 /* (const (const X)) can become (const X). Do it this way rather than
3914 returning the inner CONST since CONST can be shared with a
3915 REG_EQUAL note. */
3916 if (GET_CODE (XEXP (x, 0)) == CONST)
3917 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
3918 break;
3919
3920#ifdef HAVE_lo_sum
3921 case LO_SUM:
3922 /* Convert (lo_sum (high FOO) FOO) to FOO. This is necessary so we
3923 can add in an offset. find_split_point will split this address up
3924 again if it doesn't match. */
3925 if (GET_CODE (XEXP (x, 0)) == HIGH
3926 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1)))
3927 return XEXP (x, 1);
3928 break;
3929#endif
3930
3931 case PLUS:
3932 /* If we have (plus (plus (A const) B)), associate it so that CONST is
3933 outermost. That's because that's the way indexed addresses are
3934 supposed to appear. This code used to check many more cases, but
3935 they are now checked elsewhere. */
3936 if (GET_CODE (XEXP (x, 0)) == PLUS
3937 && CONSTANT_ADDRESS_P (XEXP (XEXP (x, 0), 1)))
3938 return gen_binary (PLUS, mode,
3939 gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0),
3940 XEXP (x, 1)),
3941 XEXP (XEXP (x, 0), 1));
3942
3943 /* (plus (xor (and <foo> (const_int pow2 - 1)) <c>) <-c>)
3944 when c is (const_int (pow2 + 1) / 2) is a sign extension of a
3945 bit-field and can be replaced by either a sign_extend or a
e6380233
JL
3946 sign_extract. The `and' may be a zero_extend and the two
3947 <c>, -<c> constants may be reversed. */
230d793d
RS
3948 if (GET_CODE (XEXP (x, 0)) == XOR
3949 && GET_CODE (XEXP (x, 1)) == CONST_INT
3950 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3951 && INTVAL (XEXP (x, 1)) == - INTVAL (XEXP (XEXP (x, 0), 1))
e6380233
JL
3952 && ((i = exact_log2 (INTVAL (XEXP (XEXP (x, 0), 1)))) >= 0
3953 || (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
5f4f0e22 3954 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
230d793d
RS
3955 && ((GET_CODE (XEXP (XEXP (x, 0), 0)) == AND
3956 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
3957 && (INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))
5f4f0e22 3958 == ((HOST_WIDE_INT) 1 << (i + 1)) - 1))
230d793d
RS
3959 || (GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND
3960 && (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)))
3961 == i + 1))))
8079805d
RK
3962 return simplify_shift_const
3963 (NULL_RTX, ASHIFTRT, mode,
3964 simplify_shift_const (NULL_RTX, ASHIFT, mode,
3965 XEXP (XEXP (XEXP (x, 0), 0), 0),
3966 GET_MODE_BITSIZE (mode) - (i + 1)),
3967 GET_MODE_BITSIZE (mode) - (i + 1));
230d793d 3968
bc0776c6
RK
3969 /* (plus (comparison A B) C) can become (neg (rev-comp A B)) if
3970 C is 1 and STORE_FLAG_VALUE is -1 or if C is -1 and STORE_FLAG_VALUE
3971 is 1. This produces better code than the alternative immediately
3972 below. */
3973 if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3974 && reversible_comparison_p (XEXP (x, 0))
3975 && ((STORE_FLAG_VALUE == -1 && XEXP (x, 1) == const1_rtx)
3976 || (STORE_FLAG_VALUE == 1 && XEXP (x, 1) == constm1_rtx)))
8079805d 3977 return
0c1c8ea6 3978 gen_unary (NEG, mode, mode,
8079805d
RK
3979 gen_binary (reverse_condition (GET_CODE (XEXP (x, 0))),
3980 mode, XEXP (XEXP (x, 0), 0),
3981 XEXP (XEXP (x, 0), 1)));
bc0776c6
RK
3982
3983 /* If only the low-order bit of X is possibly nonzero, (plus x -1)
230d793d
RS
3984 can become (ashiftrt (ashift (xor x 1) C) C) where C is
3985 the bitsize of the mode - 1. This allows simplification of
3986 "a = (b & 8) == 0;" */
3987 if (XEXP (x, 1) == constm1_rtx
3988 && GET_CODE (XEXP (x, 0)) != REG
3989 && ! (GET_CODE (XEXP (x,0)) == SUBREG
3990 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG)
951553af 3991 && nonzero_bits (XEXP (x, 0), mode) == 1)
8079805d
RK
3992 return simplify_shift_const (NULL_RTX, ASHIFTRT, mode,
3993 simplify_shift_const (NULL_RTX, ASHIFT, mode,
3994 gen_rtx_combine (XOR, mode,
3995 XEXP (x, 0), const1_rtx),
3996 GET_MODE_BITSIZE (mode) - 1),
3997 GET_MODE_BITSIZE (mode) - 1);
02f4ada4
RK
3998
3999 /* If we are adding two things that have no bits in common, convert
4000 the addition into an IOR. This will often be further simplified,
4001 for example in cases like ((a & 1) + (a & 2)), which can
4002 become a & 3. */
4003
ac49a949 4004 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
951553af
RK
4005 && (nonzero_bits (XEXP (x, 0), mode)
4006 & nonzero_bits (XEXP (x, 1), mode)) == 0)
8079805d 4007 return gen_binary (IOR, mode, XEXP (x, 0), XEXP (x, 1));
230d793d
RS
4008 break;
4009
4010 case MINUS:
0802d516
RK
4011 /* If STORE_FLAG_VALUE is 1, (minus 1 (comparison foo bar)) can be done
4012 by reversing the comparison code if valid. */
4013 if (STORE_FLAG_VALUE == 1
4014 && XEXP (x, 0) == const1_rtx
5109d49f
RK
4015 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<'
4016 && reversible_comparison_p (XEXP (x, 1)))
4017 return gen_binary (reverse_condition (GET_CODE (XEXP (x, 1))),
4018 mode, XEXP (XEXP (x, 1), 0),
4019 XEXP (XEXP (x, 1), 1));
5109d49f 4020
230d793d
RS
4021 /* (minus <foo> (and <foo> (const_int -pow2))) becomes
4022 (and <foo> (const_int pow2-1)) */
4023 if (GET_CODE (XEXP (x, 1)) == AND
4024 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
4025 && exact_log2 (- INTVAL (XEXP (XEXP (x, 1), 1))) >= 0
4026 && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0)))
8079805d
RK
4027 return simplify_and_const_int (NULL_RTX, mode, XEXP (x, 0),
4028 - INTVAL (XEXP (XEXP (x, 1), 1)) - 1);
7bef8680
RK
4029
4030 /* Canonicalize (minus A (plus B C)) to (minus (minus A B) C) for
4031 integers. */
4032 if (GET_CODE (XEXP (x, 1)) == PLUS && INTEGRAL_MODE_P (mode))
8079805d
RK
4033 return gen_binary (MINUS, mode,
4034 gen_binary (MINUS, mode, XEXP (x, 0),
4035 XEXP (XEXP (x, 1), 0)),
4036 XEXP (XEXP (x, 1), 1));
230d793d
RS
4037 break;
4038
4039 case MULT:
4040 /* If we have (mult (plus A B) C), apply the distributive law and then
4041 the inverse distributive law to see if things simplify. This
4042 occurs mostly in addresses, often when unrolling loops. */
4043
4044 if (GET_CODE (XEXP (x, 0)) == PLUS)
4045 {
4046 x = apply_distributive_law
4047 (gen_binary (PLUS, mode,
4048 gen_binary (MULT, mode,
4049 XEXP (XEXP (x, 0), 0), XEXP (x, 1)),
4050 gen_binary (MULT, mode,
4051 XEXP (XEXP (x, 0), 1), XEXP (x, 1))));
4052
4053 if (GET_CODE (x) != MULT)
8079805d 4054 return x;
230d793d 4055 }
230d793d
RS
4056 break;
4057
4058 case UDIV:
4059 /* If this is a divide by a power of two, treat it as a shift if
4060 its first operand is a shift. */
4061 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4062 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0
4063 && (GET_CODE (XEXP (x, 0)) == ASHIFT
4064 || GET_CODE (XEXP (x, 0)) == LSHIFTRT
4065 || GET_CODE (XEXP (x, 0)) == ASHIFTRT
4066 || GET_CODE (XEXP (x, 0)) == ROTATE
4067 || GET_CODE (XEXP (x, 0)) == ROTATERT))
8079805d 4068 return simplify_shift_const (NULL_RTX, LSHIFTRT, mode, XEXP (x, 0), i);
230d793d
RS
4069 break;
4070
4071 case EQ: case NE:
4072 case GT: case GTU: case GE: case GEU:
4073 case LT: case LTU: case LE: case LEU:
4074 /* If the first operand is a condition code, we can't do anything
4075 with it. */
4076 if (GET_CODE (XEXP (x, 0)) == COMPARE
4077 || (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) != MODE_CC
4078#ifdef HAVE_cc0
4079 && XEXP (x, 0) != cc0_rtx
4080#endif
4081 ))
4082 {
4083 rtx op0 = XEXP (x, 0);
4084 rtx op1 = XEXP (x, 1);
4085 enum rtx_code new_code;
4086
4087 if (GET_CODE (op0) == COMPARE)
4088 op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
4089
4090 /* Simplify our comparison, if possible. */
4091 new_code = simplify_comparison (code, &op0, &op1);
4092
230d793d 4093 /* If STORE_FLAG_VALUE is 1, we can convert (ne x 0) to simply X
951553af 4094 if only the low-order bit is possibly nonzero in X (such as when
5109d49f
RK
4095 X is a ZERO_EXTRACT of one bit). Similarly, we can convert EQ to
4096 (xor X 1) or (minus 1 X); we use the former. Finally, if X is
4097 known to be either 0 or -1, NE becomes a NEG and EQ becomes
4098 (plus X 1).
4099
4100 Remove any ZERO_EXTRACT we made when thinking this was a
4101 comparison. It may now be simpler to use, e.g., an AND. If a
4102 ZERO_EXTRACT is indeed appropriate, it will be placed back by
4103 the call to make_compound_operation in the SET case. */
4104
0802d516
RK
4105 if (STORE_FLAG_VALUE == 1
4106 && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4107 && op1 == const0_rtx && nonzero_bits (op0, mode) == 1)
818b11b9
RK
4108 return gen_lowpart_for_combine (mode,
4109 expand_compound_operation (op0));
5109d49f 4110
0802d516
RK
4111 else if (STORE_FLAG_VALUE == 1
4112 && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
5109d49f
RK
4113 && op1 == const0_rtx
4114 && (num_sign_bit_copies (op0, mode)
4115 == GET_MODE_BITSIZE (mode)))
4116 {
4117 op0 = expand_compound_operation (op0);
0c1c8ea6 4118 return gen_unary (NEG, mode, mode,
8079805d 4119 gen_lowpart_for_combine (mode, op0));
5109d49f
RK
4120 }
4121
0802d516
RK
4122 else if (STORE_FLAG_VALUE == 1
4123 && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
230d793d 4124 && op1 == const0_rtx
5109d49f 4125 && nonzero_bits (op0, mode) == 1)
818b11b9
RK
4126 {
4127 op0 = expand_compound_operation (op0);
8079805d
RK
4128 return gen_binary (XOR, mode,
4129 gen_lowpart_for_combine (mode, op0),
4130 const1_rtx);
5109d49f 4131 }
818b11b9 4132
0802d516
RK
4133 else if (STORE_FLAG_VALUE == 1
4134 && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
5109d49f
RK
4135 && op1 == const0_rtx
4136 && (num_sign_bit_copies (op0, mode)
4137 == GET_MODE_BITSIZE (mode)))
4138 {
4139 op0 = expand_compound_operation (op0);
8079805d 4140 return plus_constant (gen_lowpart_for_combine (mode, op0), 1);
818b11b9 4141 }
230d793d 4142
5109d49f
RK
4143 /* If STORE_FLAG_VALUE is -1, we have cases similar to
4144 those above. */
0802d516
RK
4145 if (STORE_FLAG_VALUE == -1
4146 && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
230d793d 4147 && op1 == const0_rtx
5109d49f
RK
4148 && (num_sign_bit_copies (op0, mode)
4149 == GET_MODE_BITSIZE (mode)))
4150 return gen_lowpart_for_combine (mode,
4151 expand_compound_operation (op0));
4152
0802d516
RK
4153 else if (STORE_FLAG_VALUE == -1
4154 && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
5109d49f
RK
4155 && op1 == const0_rtx
4156 && nonzero_bits (op0, mode) == 1)
4157 {
4158 op0 = expand_compound_operation (op0);
0c1c8ea6 4159 return gen_unary (NEG, mode, mode,
8079805d 4160 gen_lowpart_for_combine (mode, op0));
5109d49f
RK
4161 }
4162
0802d516
RK
4163 else if (STORE_FLAG_VALUE == -1
4164 && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
5109d49f
RK
4165 && op1 == const0_rtx
4166 && (num_sign_bit_copies (op0, mode)
4167 == GET_MODE_BITSIZE (mode)))
230d793d 4168 {
818b11b9 4169 op0 = expand_compound_operation (op0);
0c1c8ea6 4170 return gen_unary (NOT, mode, mode,
8079805d 4171 gen_lowpart_for_combine (mode, op0));
5109d49f
RK
4172 }
4173
4174 /* If X is 0/1, (eq X 0) is X-1. */
0802d516
RK
4175 else if (STORE_FLAG_VALUE == -1
4176 && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
5109d49f
RK
4177 && op1 == const0_rtx
4178 && nonzero_bits (op0, mode) == 1)
4179 {
4180 op0 = expand_compound_operation (op0);
8079805d 4181 return plus_constant (gen_lowpart_for_combine (mode, op0), -1);
230d793d 4182 }
230d793d
RS
4183
4184 /* If STORE_FLAG_VALUE says to just test the sign bit and X has just
951553af
RK
4185 one bit that might be nonzero, we can convert (ne x 0) to
4186 (ashift x c) where C puts the bit in the sign bit. Remove any
4187 AND with STORE_FLAG_VALUE when we are done, since we are only
4188 going to test the sign bit. */
3f508eca 4189 if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
5f4f0e22 4190 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
0802d516 4191 && ((STORE_FLAG_VALUE & GET_MODE_MASK (mode))
e51712db 4192 == (unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE(mode)-1))
230d793d
RS
4193 && op1 == const0_rtx
4194 && mode == GET_MODE (op0)
5109d49f 4195 && (i = exact_log2 (nonzero_bits (op0, mode))) >= 0)
230d793d 4196 {
818b11b9
RK
4197 x = simplify_shift_const (NULL_RTX, ASHIFT, mode,
4198 expand_compound_operation (op0),
230d793d
RS
4199 GET_MODE_BITSIZE (mode) - 1 - i);
4200 if (GET_CODE (x) == AND && XEXP (x, 1) == const_true_rtx)
4201 return XEXP (x, 0);
4202 else
4203 return x;
4204 }
4205
4206 /* If the code changed, return a whole new comparison. */
4207 if (new_code != code)
4208 return gen_rtx_combine (new_code, mode, op0, op1);
4209
4210 /* Otherwise, keep this operation, but maybe change its operands.
4211 This also converts (ne (compare FOO BAR) 0) to (ne FOO BAR). */
4212 SUBST (XEXP (x, 0), op0);
4213 SUBST (XEXP (x, 1), op1);
4214 }
4215 break;
4216
4217 case IF_THEN_ELSE:
8079805d 4218 return simplify_if_then_else (x);
9210df58 4219
8079805d
RK
4220 case ZERO_EXTRACT:
4221 case SIGN_EXTRACT:
4222 case ZERO_EXTEND:
4223 case SIGN_EXTEND:
0f41302f 4224 /* If we are processing SET_DEST, we are done. */
8079805d
RK
4225 if (in_dest)
4226 return x;
d0ab8cd3 4227
8079805d 4228 return expand_compound_operation (x);
d0ab8cd3 4229
8079805d
RK
4230 case SET:
4231 return simplify_set (x);
1a26b032 4232
8079805d
RK
4233 case AND:
4234 case IOR:
4235 case XOR:
4236 return simplify_logical (x, last);
d0ab8cd3 4237
b472527b 4238 case ABS:
8079805d
RK
4239 /* (abs (neg <foo>)) -> (abs <foo>) */
4240 if (GET_CODE (XEXP (x, 0)) == NEG)
4241 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
1a26b032 4242
b472527b
JL
4243 /* If the mode of the operand is VOIDmode (i.e. if it is ASM_OPERANDS),
4244 do nothing. */
4245 if (GET_MODE (XEXP (x, 0)) == VOIDmode)
4246 break;
f40421ce 4247
8079805d
RK
4248 /* If operand is something known to be positive, ignore the ABS. */
4249 if (GET_CODE (XEXP (x, 0)) == FFS || GET_CODE (XEXP (x, 0)) == ABS
4250 || ((GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4251 <= HOST_BITS_PER_WIDE_INT)
4252 && ((nonzero_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
4253 & ((HOST_WIDE_INT) 1
4254 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1)))
4255 == 0)))
4256 return XEXP (x, 0);
1a26b032 4257
1a26b032 4258
8079805d
RK
4259 /* If operand is known to be only -1 or 0, convert ABS to NEG. */
4260 if (num_sign_bit_copies (XEXP (x, 0), mode) == GET_MODE_BITSIZE (mode))
4261 return gen_rtx_combine (NEG, mode, XEXP (x, 0));
1a26b032 4262
8079805d 4263 break;
1a26b032 4264
8079805d
RK
4265 case FFS:
4266 /* (ffs (*_extend <X>)) = (ffs <X>) */
4267 if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND
4268 || GET_CODE (XEXP (x, 0)) == ZERO_EXTEND)
4269 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4270 break;
1a26b032 4271
8079805d
RK
4272 case FLOAT:
4273 /* (float (sign_extend <X>)) = (float <X>). */
4274 if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND)
4275 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4276 break;
1a26b032 4277
8079805d
RK
4278 case ASHIFT:
4279 case LSHIFTRT:
4280 case ASHIFTRT:
4281 case ROTATE:
4282 case ROTATERT:
4283 /* If this is a shift by a constant amount, simplify it. */
4284 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
4285 return simplify_shift_const (x, code, mode, XEXP (x, 0),
4286 INTVAL (XEXP (x, 1)));
4287
4288#ifdef SHIFT_COUNT_TRUNCATED
4289 else if (SHIFT_COUNT_TRUNCATED && GET_CODE (XEXP (x, 1)) != REG)
4290 SUBST (XEXP (x, 1),
4291 force_to_mode (XEXP (x, 1), GET_MODE (x),
4292 ((HOST_WIDE_INT) 1
4293 << exact_log2 (GET_MODE_BITSIZE (GET_MODE (x))))
4294 - 1,
4295 NULL_RTX, 0));
4296#endif
4297
4298 break;
e9a25f70
JL
4299
4300 default:
4301 break;
8079805d
RK
4302 }
4303
4304 return x;
4305}
4306\f
4307/* Simplify X, an IF_THEN_ELSE expression. Return the new expression. */
5109d49f 4308
8079805d
RK
4309static rtx
4310simplify_if_then_else (x)
4311 rtx x;
4312{
4313 enum machine_mode mode = GET_MODE (x);
4314 rtx cond = XEXP (x, 0);
4315 rtx true = XEXP (x, 1);
4316 rtx false = XEXP (x, 2);
4317 enum rtx_code true_code = GET_CODE (cond);
4318 int comparison_p = GET_RTX_CLASS (true_code) == '<';
4319 rtx temp;
4320 int i;
4321
0f41302f 4322 /* Simplify storing of the truth value. */
8079805d
RK
4323 if (comparison_p && true == const_true_rtx && false == const0_rtx)
4324 return gen_binary (true_code, mode, XEXP (cond, 0), XEXP (cond, 1));
4325
0f41302f 4326 /* Also when the truth value has to be reversed. */
8079805d
RK
4327 if (comparison_p && reversible_comparison_p (cond)
4328 && true == const0_rtx && false == const_true_rtx)
4329 return gen_binary (reverse_condition (true_code),
4330 mode, XEXP (cond, 0), XEXP (cond, 1));
4331
4332 /* Sometimes we can simplify the arm of an IF_THEN_ELSE if a register used
4333 in it is being compared against certain values. Get the true and false
4334 comparisons and see if that says anything about the value of each arm. */
4335
4336 if (comparison_p && reversible_comparison_p (cond)
4337 && GET_CODE (XEXP (cond, 0)) == REG)
4338 {
4339 HOST_WIDE_INT nzb;
4340 rtx from = XEXP (cond, 0);
4341 enum rtx_code false_code = reverse_condition (true_code);
4342 rtx true_val = XEXP (cond, 1);
4343 rtx false_val = true_val;
4344 int swapped = 0;
9210df58 4345
8079805d 4346 /* If FALSE_CODE is EQ, swap the codes and arms. */
5109d49f 4347
8079805d 4348 if (false_code == EQ)
1a26b032 4349 {
8079805d
RK
4350 swapped = 1, true_code = EQ, false_code = NE;
4351 temp = true, true = false, false = temp;
4352 }
5109d49f 4353
8079805d
RK
4354 /* If we are comparing against zero and the expression being tested has
4355 only a single bit that might be nonzero, that is its value when it is
4356 not equal to zero. Similarly if it is known to be -1 or 0. */
4357
4358 if (true_code == EQ && true_val == const0_rtx
4359 && exact_log2 (nzb = nonzero_bits (from, GET_MODE (from))) >= 0)
4360 false_code = EQ, false_val = GEN_INT (nzb);
4361 else if (true_code == EQ && true_val == const0_rtx
4362 && (num_sign_bit_copies (from, GET_MODE (from))
4363 == GET_MODE_BITSIZE (GET_MODE (from))))
4364 false_code = EQ, false_val = constm1_rtx;
4365
4366 /* Now simplify an arm if we know the value of the register in the
4367 branch and it is used in the arm. Be careful due to the potential
4368 of locally-shared RTL. */
4369
4370 if (reg_mentioned_p (from, true))
4371 true = subst (known_cond (copy_rtx (true), true_code, from, true_val),
4372 pc_rtx, pc_rtx, 0, 0);
4373 if (reg_mentioned_p (from, false))
4374 false = subst (known_cond (copy_rtx (false), false_code,
4375 from, false_val),
4376 pc_rtx, pc_rtx, 0, 0);
4377
4378 SUBST (XEXP (x, 1), swapped ? false : true);
4379 SUBST (XEXP (x, 2), swapped ? true : false);
4380
4381 true = XEXP (x, 1), false = XEXP (x, 2), true_code = GET_CODE (cond);
4382 }
5109d49f 4383
8079805d
RK
4384 /* If we have (if_then_else FOO (pc) (label_ref BAR)) and FOO can be
4385 reversed, do so to avoid needing two sets of patterns for
4386 subtract-and-branch insns. Similarly if we have a constant in the true
4387 arm, the false arm is the same as the first operand of the comparison, or
4388 the false arm is more complicated than the true arm. */
4389
4390 if (comparison_p && reversible_comparison_p (cond)
4391 && (true == pc_rtx
4392 || (CONSTANT_P (true)
4393 && GET_CODE (false) != CONST_INT && false != pc_rtx)
4394 || true == const0_rtx
4395 || (GET_RTX_CLASS (GET_CODE (true)) == 'o'
4396 && GET_RTX_CLASS (GET_CODE (false)) != 'o')
4397 || (GET_CODE (true) == SUBREG
4398 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (true))) == 'o'
4399 && GET_RTX_CLASS (GET_CODE (false)) != 'o')
4400 || reg_mentioned_p (true, false)
4401 || rtx_equal_p (false, XEXP (cond, 0))))
4402 {
4403 true_code = reverse_condition (true_code);
4404 SUBST (XEXP (x, 0),
4405 gen_binary (true_code, GET_MODE (cond), XEXP (cond, 0),
4406 XEXP (cond, 1)));
5109d49f 4407
8079805d
RK
4408 SUBST (XEXP (x, 1), false);
4409 SUBST (XEXP (x, 2), true);
1a26b032 4410
8079805d 4411 temp = true, true = false, false = temp, cond = XEXP (x, 0);
bb821298 4412
0f41302f 4413 /* It is possible that the conditional has been simplified out. */
bb821298
RK
4414 true_code = GET_CODE (cond);
4415 comparison_p = GET_RTX_CLASS (true_code) == '<';
8079805d 4416 }
abe6e52f 4417
8079805d 4418 /* If the two arms are identical, we don't need the comparison. */
1a26b032 4419
8079805d
RK
4420 if (rtx_equal_p (true, false) && ! side_effects_p (cond))
4421 return true;
1a26b032 4422
5be669c7
RK
4423 /* Convert a == b ? b : a to "a". */
4424 if (true_code == EQ && ! side_effects_p (cond)
4425 && rtx_equal_p (XEXP (cond, 0), false)
4426 && rtx_equal_p (XEXP (cond, 1), true))
4427 return false;
4428 else if (true_code == NE && ! side_effects_p (cond)
4429 && rtx_equal_p (XEXP (cond, 0), true)
4430 && rtx_equal_p (XEXP (cond, 1), false))
4431 return true;
4432
8079805d
RK
4433 /* Look for cases where we have (abs x) or (neg (abs X)). */
4434
4435 if (GET_MODE_CLASS (mode) == MODE_INT
4436 && GET_CODE (false) == NEG
4437 && rtx_equal_p (true, XEXP (false, 0))
4438 && comparison_p
4439 && rtx_equal_p (true, XEXP (cond, 0))
4440 && ! side_effects_p (true))
4441 switch (true_code)
4442 {
4443 case GT:
4444 case GE:
0c1c8ea6 4445 return gen_unary (ABS, mode, mode, true);
8079805d
RK
4446 case LT:
4447 case LE:
0c1c8ea6 4448 return gen_unary (NEG, mode, mode, gen_unary (ABS, mode, mode, true));
e9a25f70
JL
4449 default:
4450 break;
8079805d
RK
4451 }
4452
4453 /* Look for MIN or MAX. */
4454
34c8be72 4455 if ((! FLOAT_MODE_P (mode) || flag_fast_math)
8079805d
RK
4456 && comparison_p
4457 && rtx_equal_p (XEXP (cond, 0), true)
4458 && rtx_equal_p (XEXP (cond, 1), false)
4459 && ! side_effects_p (cond))
4460 switch (true_code)
4461 {
4462 case GE:
4463 case GT:
4464 return gen_binary (SMAX, mode, true, false);
4465 case LE:
4466 case LT:
4467 return gen_binary (SMIN, mode, true, false);
4468 case GEU:
4469 case GTU:
4470 return gen_binary (UMAX, mode, true, false);
4471 case LEU:
4472 case LTU:
4473 return gen_binary (UMIN, mode, true, false);
e9a25f70
JL
4474 default:
4475 break;
8079805d
RK
4476 }
4477
8079805d
RK
4478 /* If we have (if_then_else COND (OP Z C1) Z) and OP is an identity when its
4479 second operand is zero, this can be done as (OP Z (mult COND C2)) where
4480 C2 = C1 * STORE_FLAG_VALUE. Similarly if OP has an outer ZERO_EXTEND or
4481 SIGN_EXTEND as long as Z is already extended (so we don't destroy it).
4482 We can do this kind of thing in some cases when STORE_FLAG_VALUE is
0802d516 4483 neither 1 or -1, but it isn't worth checking for. */
8079805d 4484
0802d516
RK
4485 if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
4486 && comparison_p && mode != VOIDmode && ! side_effects_p (x))
8079805d
RK
4487 {
4488 rtx t = make_compound_operation (true, SET);
4489 rtx f = make_compound_operation (false, SET);
4490 rtx cond_op0 = XEXP (cond, 0);
4491 rtx cond_op1 = XEXP (cond, 1);
6a651371 4492 enum rtx_code op = NIL, extend_op = NIL;
8079805d 4493 enum machine_mode m = mode;
6a651371 4494 rtx z = 0, c1 = NULL_RTX;
8079805d 4495
8079805d
RK
4496 if ((GET_CODE (t) == PLUS || GET_CODE (t) == MINUS
4497 || GET_CODE (t) == IOR || GET_CODE (t) == XOR
4498 || GET_CODE (t) == ASHIFT
4499 || GET_CODE (t) == LSHIFTRT || GET_CODE (t) == ASHIFTRT)
4500 && rtx_equal_p (XEXP (t, 0), f))
4501 c1 = XEXP (t, 1), op = GET_CODE (t), z = f;
4502
4503 /* If an identity-zero op is commutative, check whether there
0f41302f 4504 would be a match if we swapped the operands. */
8079805d
RK
4505 else if ((GET_CODE (t) == PLUS || GET_CODE (t) == IOR
4506 || GET_CODE (t) == XOR)
4507 && rtx_equal_p (XEXP (t, 1), f))
4508 c1 = XEXP (t, 0), op = GET_CODE (t), z = f;
4509 else if (GET_CODE (t) == SIGN_EXTEND
4510 && (GET_CODE (XEXP (t, 0)) == PLUS
4511 || GET_CODE (XEXP (t, 0)) == MINUS
4512 || GET_CODE (XEXP (t, 0)) == IOR
4513 || GET_CODE (XEXP (t, 0)) == XOR
4514 || GET_CODE (XEXP (t, 0)) == ASHIFT
4515 || GET_CODE (XEXP (t, 0)) == LSHIFTRT
4516 || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
4517 && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
4518 && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
4519 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
4520 && (num_sign_bit_copies (f, GET_MODE (f))
4521 > (GET_MODE_BITSIZE (mode)
4522 - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 0))))))
4523 {
4524 c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
4525 extend_op = SIGN_EXTEND;
4526 m = GET_MODE (XEXP (t, 0));
1a26b032 4527 }
8079805d
RK
4528 else if (GET_CODE (t) == SIGN_EXTEND
4529 && (GET_CODE (XEXP (t, 0)) == PLUS
4530 || GET_CODE (XEXP (t, 0)) == IOR
4531 || GET_CODE (XEXP (t, 0)) == XOR)
4532 && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
4533 && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
4534 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
4535 && (num_sign_bit_copies (f, GET_MODE (f))
4536 > (GET_MODE_BITSIZE (mode)
4537 - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 1))))))
4538 {
4539 c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
4540 extend_op = SIGN_EXTEND;
4541 m = GET_MODE (XEXP (t, 0));
4542 }
4543 else if (GET_CODE (t) == ZERO_EXTEND
4544 && (GET_CODE (XEXP (t, 0)) == PLUS
4545 || GET_CODE (XEXP (t, 0)) == MINUS
4546 || GET_CODE (XEXP (t, 0)) == IOR
4547 || GET_CODE (XEXP (t, 0)) == XOR
4548 || GET_CODE (XEXP (t, 0)) == ASHIFT
4549 || GET_CODE (XEXP (t, 0)) == LSHIFTRT
4550 || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
4551 && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
4552 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4553 && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
4554 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
4555 && ((nonzero_bits (f, GET_MODE (f))
4556 & ~ GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 0))))
4557 == 0))
4558 {
4559 c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
4560 extend_op = ZERO_EXTEND;
4561 m = GET_MODE (XEXP (t, 0));
4562 }
4563 else if (GET_CODE (t) == ZERO_EXTEND
4564 && (GET_CODE (XEXP (t, 0)) == PLUS
4565 || GET_CODE (XEXP (t, 0)) == IOR
4566 || GET_CODE (XEXP (t, 0)) == XOR)
4567 && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
4568 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4569 && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
4570 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
4571 && ((nonzero_bits (f, GET_MODE (f))
4572 & ~ GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 1))))
4573 == 0))
4574 {
4575 c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
4576 extend_op = ZERO_EXTEND;
4577 m = GET_MODE (XEXP (t, 0));
4578 }
4579
4580 if (z)
4581 {
4582 temp = subst (gen_binary (true_code, m, cond_op0, cond_op1),
4583 pc_rtx, pc_rtx, 0, 0);
4584 temp = gen_binary (MULT, m, temp,
4585 gen_binary (MULT, m, c1, const_true_rtx));
4586 temp = subst (temp, pc_rtx, pc_rtx, 0, 0);
4587 temp = gen_binary (op, m, gen_lowpart_for_combine (m, z), temp);
4588
4589 if (extend_op != NIL)
0c1c8ea6 4590 temp = gen_unary (extend_op, mode, m, temp);
8079805d
RK
4591
4592 return temp;
4593 }
4594 }
224eeff2 4595
8079805d
RK
4596 /* If we have (if_then_else (ne A 0) C1 0) and either A is known to be 0 or
4597 1 and C1 is a single bit or A is known to be 0 or -1 and C1 is the
4598 negation of a single bit, we can convert this operation to a shift. We
4599 can actually do this more generally, but it doesn't seem worth it. */
4600
4601 if (true_code == NE && XEXP (cond, 1) == const0_rtx
4602 && false == const0_rtx && GET_CODE (true) == CONST_INT
4603 && ((1 == nonzero_bits (XEXP (cond, 0), mode)
4604 && (i = exact_log2 (INTVAL (true))) >= 0)
4605 || ((num_sign_bit_copies (XEXP (cond, 0), mode)
4606 == GET_MODE_BITSIZE (mode))
4607 && (i = exact_log2 (- INTVAL (true))) >= 0)))
4608 return
4609 simplify_shift_const (NULL_RTX, ASHIFT, mode,
4610 gen_lowpart_for_combine (mode, XEXP (cond, 0)), i);
230d793d 4611
8079805d
RK
4612 return x;
4613}
4614\f
4615/* Simplify X, a SET expression. Return the new expression. */
230d793d 4616
8079805d
RK
4617static rtx
4618simplify_set (x)
4619 rtx x;
4620{
4621 rtx src = SET_SRC (x);
4622 rtx dest = SET_DEST (x);
4623 enum machine_mode mode
4624 = GET_MODE (src) != VOIDmode ? GET_MODE (src) : GET_MODE (dest);
4625 rtx other_insn;
4626 rtx *cc_use;
4627
4628 /* (set (pc) (return)) gets written as (return). */
4629 if (GET_CODE (dest) == PC && GET_CODE (src) == RETURN)
4630 return src;
230d793d 4631
87e3e0c1
RK
4632 /* Now that we know for sure which bits of SRC we are using, see if we can
4633 simplify the expression for the object knowing that we only need the
4634 low-order bits. */
4635
4636 if (GET_MODE_CLASS (mode) == MODE_INT)
c5c76735
JL
4637 {
4638 src = force_to_mode (src, mode, GET_MODE_MASK (mode), NULL_RTX, 0);
4639 SUBST (SET_SRC (x), src);
4640 }
87e3e0c1 4641
8079805d
RK
4642 /* If we are setting CC0 or if the source is a COMPARE, look for the use of
4643 the comparison result and try to simplify it unless we already have used
4644 undobuf.other_insn. */
4645 if ((GET_CODE (src) == COMPARE
230d793d 4646#ifdef HAVE_cc0
8079805d 4647 || dest == cc0_rtx
230d793d 4648#endif
8079805d
RK
4649 )
4650 && (cc_use = find_single_use (dest, subst_insn, &other_insn)) != 0
4651 && (undobuf.other_insn == 0 || other_insn == undobuf.other_insn)
4652 && GET_RTX_CLASS (GET_CODE (*cc_use)) == '<'
c0d3ac4d 4653 && rtx_equal_p (XEXP (*cc_use, 0), dest))
8079805d
RK
4654 {
4655 enum rtx_code old_code = GET_CODE (*cc_use);
4656 enum rtx_code new_code;
4657 rtx op0, op1;
4658 int other_changed = 0;
4659 enum machine_mode compare_mode = GET_MODE (dest);
4660
4661 if (GET_CODE (src) == COMPARE)
4662 op0 = XEXP (src, 0), op1 = XEXP (src, 1);
4663 else
4664 op0 = src, op1 = const0_rtx;
230d793d 4665
8079805d
RK
4666 /* Simplify our comparison, if possible. */
4667 new_code = simplify_comparison (old_code, &op0, &op1);
230d793d 4668
c141a106 4669#ifdef EXTRA_CC_MODES
8079805d
RK
4670 /* If this machine has CC modes other than CCmode, check to see if we
4671 need to use a different CC mode here. */
4672 compare_mode = SELECT_CC_MODE (new_code, op0, op1);
c141a106 4673#endif /* EXTRA_CC_MODES */
230d793d 4674
c141a106 4675#if !defined (HAVE_cc0) && defined (EXTRA_CC_MODES)
8079805d
RK
4676 /* If the mode changed, we have to change SET_DEST, the mode in the
4677 compare, and the mode in the place SET_DEST is used. If SET_DEST is
4678 a hard register, just build new versions with the proper mode. If it
4679 is a pseudo, we lose unless it is only time we set the pseudo, in
4680 which case we can safely change its mode. */
4681 if (compare_mode != GET_MODE (dest))
4682 {
4683 int regno = REGNO (dest);
38a448ca 4684 rtx new_dest = gen_rtx_REG (compare_mode, regno);
8079805d
RK
4685
4686 if (regno < FIRST_PSEUDO_REGISTER
b1f21e0a 4687 || (REG_N_SETS (regno) == 1 && ! REG_USERVAR_P (dest)))
230d793d 4688 {
8079805d
RK
4689 if (regno >= FIRST_PSEUDO_REGISTER)
4690 SUBST (regno_reg_rtx[regno], new_dest);
230d793d 4691
8079805d
RK
4692 SUBST (SET_DEST (x), new_dest);
4693 SUBST (XEXP (*cc_use, 0), new_dest);
4694 other_changed = 1;
230d793d 4695
8079805d 4696 dest = new_dest;
230d793d 4697 }
8079805d 4698 }
230d793d
RS
4699#endif
4700
8079805d
RK
4701 /* If the code changed, we have to build a new comparison in
4702 undobuf.other_insn. */
4703 if (new_code != old_code)
4704 {
4705 unsigned HOST_WIDE_INT mask;
4706
4707 SUBST (*cc_use, gen_rtx_combine (new_code, GET_MODE (*cc_use),
4708 dest, const0_rtx));
4709
4710 /* If the only change we made was to change an EQ into an NE or
4711 vice versa, OP0 has only one bit that might be nonzero, and OP1
4712 is zero, check if changing the user of the condition code will
4713 produce a valid insn. If it won't, we can keep the original code
4714 in that insn by surrounding our operation with an XOR. */
4715
4716 if (((old_code == NE && new_code == EQ)
4717 || (old_code == EQ && new_code == NE))
4718 && ! other_changed && op1 == const0_rtx
4719 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
4720 && exact_log2 (mask = nonzero_bits (op0, GET_MODE (op0))) >= 0)
230d793d 4721 {
8079805d 4722 rtx pat = PATTERN (other_insn), note = 0;
230d793d 4723
8e2f6e35 4724 if ((recog_for_combine (&pat, other_insn, &note) < 0
8079805d
RK
4725 && ! check_asm_operands (pat)))
4726 {
4727 PUT_CODE (*cc_use, old_code);
4728 other_insn = 0;
230d793d 4729
8079805d 4730 op0 = gen_binary (XOR, GET_MODE (op0), op0, GEN_INT (mask));
230d793d 4731 }
230d793d
RS
4732 }
4733
8079805d
RK
4734 other_changed = 1;
4735 }
4736
4737 if (other_changed)
4738 undobuf.other_insn = other_insn;
230d793d
RS
4739
4740#ifdef HAVE_cc0
8079805d
RK
4741 /* If we are now comparing against zero, change our source if
4742 needed. If we do not use cc0, we always have a COMPARE. */
4743 if (op1 == const0_rtx && dest == cc0_rtx)
4744 {
4745 SUBST (SET_SRC (x), op0);
4746 src = op0;
4747 }
4748 else
230d793d
RS
4749#endif
4750
8079805d
RK
4751 /* Otherwise, if we didn't previously have a COMPARE in the
4752 correct mode, we need one. */
4753 if (GET_CODE (src) != COMPARE || GET_MODE (src) != compare_mode)
4754 {
4755 SUBST (SET_SRC (x),
4756 gen_rtx_combine (COMPARE, compare_mode, op0, op1));
4757 src = SET_SRC (x);
230d793d
RS
4758 }
4759 else
4760 {
8079805d
RK
4761 /* Otherwise, update the COMPARE if needed. */
4762 SUBST (XEXP (src, 0), op0);
4763 SUBST (XEXP (src, 1), op1);
230d793d 4764 }
8079805d
RK
4765 }
4766 else
4767 {
4768 /* Get SET_SRC in a form where we have placed back any
4769 compound expressions. Then do the checks below. */
4770 src = make_compound_operation (src, SET);
4771 SUBST (SET_SRC (x), src);
4772 }
230d793d 4773
8079805d
RK
4774 /* If we have (set x (subreg:m1 (op:m2 ...) 0)) with OP being some operation,
4775 and X being a REG or (subreg (reg)), we may be able to convert this to
4776 (set (subreg:m2 x) (op)).
df62f951 4777
8079805d
RK
4778 We can always do this if M1 is narrower than M2 because that means that
4779 we only care about the low bits of the result.
df62f951 4780
8079805d 4781 However, on machines without WORD_REGISTER_OPERATIONS defined, we cannot
9ec36da5 4782 perform a narrower operation than requested since the high-order bits will
8079805d
RK
4783 be undefined. On machine where it is defined, this transformation is safe
4784 as long as M1 and M2 have the same number of words. */
df62f951 4785
8079805d
RK
4786 if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
4787 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (src))) != 'o'
4788 && (((GET_MODE_SIZE (GET_MODE (src)) + (UNITS_PER_WORD - 1))
4789 / UNITS_PER_WORD)
4790 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
4791 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
8baf60bb 4792#ifndef WORD_REGISTER_OPERATIONS
8079805d
RK
4793 && (GET_MODE_SIZE (GET_MODE (src))
4794 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
df62f951 4795#endif
f507a070
RK
4796#ifdef CLASS_CANNOT_CHANGE_SIZE
4797 && ! (GET_CODE (dest) == REG && REGNO (dest) < FIRST_PSEUDO_REGISTER
4798 && (TEST_HARD_REG_BIT
4799 (reg_class_contents[(int) CLASS_CANNOT_CHANGE_SIZE],
4800 REGNO (dest)))
4801 && (GET_MODE_SIZE (GET_MODE (src))
4802 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))))
4803#endif
8079805d
RK
4804 && (GET_CODE (dest) == REG
4805 || (GET_CODE (dest) == SUBREG
4806 && GET_CODE (SUBREG_REG (dest)) == REG)))
4807 {
4808 SUBST (SET_DEST (x),
4809 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (src)),
4810 dest));
4811 SUBST (SET_SRC (x), SUBREG_REG (src));
4812
4813 src = SET_SRC (x), dest = SET_DEST (x);
4814 }
df62f951 4815
8baf60bb 4816#ifdef LOAD_EXTEND_OP
8079805d
RK
4817 /* If we have (set FOO (subreg:M (mem:N BAR) 0)) with M wider than N, this
4818 would require a paradoxical subreg. Replace the subreg with a
0f41302f 4819 zero_extend to avoid the reload that would otherwise be required. */
8079805d
RK
4820
4821 if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
4822 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))) != NIL
4823 && SUBREG_WORD (src) == 0
4824 && (GET_MODE_SIZE (GET_MODE (src))
4825 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
4826 && GET_CODE (SUBREG_REG (src)) == MEM)
4827 {
4828 SUBST (SET_SRC (x),
4829 gen_rtx_combine (LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))),
4830 GET_MODE (src), XEXP (src, 0)));
4831
4832 src = SET_SRC (x);
4833 }
230d793d
RS
4834#endif
4835
8079805d
RK
4836 /* If we don't have a conditional move, SET_SRC is an IF_THEN_ELSE, and we
4837 are comparing an item known to be 0 or -1 against 0, use a logical
4838 operation instead. Check for one of the arms being an IOR of the other
4839 arm with some value. We compute three terms to be IOR'ed together. In
4840 practice, at most two will be nonzero. Then we do the IOR's. */
4841
4842 if (GET_CODE (dest) != PC
4843 && GET_CODE (src) == IF_THEN_ELSE
36b8d792 4844 && GET_MODE_CLASS (GET_MODE (src)) == MODE_INT
8079805d
RK
4845 && (GET_CODE (XEXP (src, 0)) == EQ || GET_CODE (XEXP (src, 0)) == NE)
4846 && XEXP (XEXP (src, 0), 1) == const0_rtx
6dd49058 4847 && GET_MODE (src) == GET_MODE (XEXP (XEXP (src, 0), 0))
ea414472
DE
4848#ifdef HAVE_conditional_move
4849 && ! can_conditionally_move_p (GET_MODE (src))
4850#endif
8079805d
RK
4851 && (num_sign_bit_copies (XEXP (XEXP (src, 0), 0),
4852 GET_MODE (XEXP (XEXP (src, 0), 0)))
4853 == GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (src, 0), 0))))
4854 && ! side_effects_p (src))
4855 {
4856 rtx true = (GET_CODE (XEXP (src, 0)) == NE
4857 ? XEXP (src, 1) : XEXP (src, 2));
4858 rtx false = (GET_CODE (XEXP (src, 0)) == NE
4859 ? XEXP (src, 2) : XEXP (src, 1));
4860 rtx term1 = const0_rtx, term2, term3;
4861
4862 if (GET_CODE (true) == IOR && rtx_equal_p (XEXP (true, 0), false))
4863 term1 = false, true = XEXP (true, 1), false = const0_rtx;
4864 else if (GET_CODE (true) == IOR
4865 && rtx_equal_p (XEXP (true, 1), false))
4866 term1 = false, true = XEXP (true, 0), false = const0_rtx;
4867 else if (GET_CODE (false) == IOR
4868 && rtx_equal_p (XEXP (false, 0), true))
4869 term1 = true, false = XEXP (false, 1), true = const0_rtx;
4870 else if (GET_CODE (false) == IOR
4871 && rtx_equal_p (XEXP (false, 1), true))
4872 term1 = true, false = XEXP (false, 0), true = const0_rtx;
4873
4874 term2 = gen_binary (AND, GET_MODE (src), XEXP (XEXP (src, 0), 0), true);
4875 term3 = gen_binary (AND, GET_MODE (src),
0c1c8ea6 4876 gen_unary (NOT, GET_MODE (src), GET_MODE (src),
8079805d
RK
4877 XEXP (XEXP (src, 0), 0)),
4878 false);
4879
4880 SUBST (SET_SRC (x),
4881 gen_binary (IOR, GET_MODE (src),
4882 gen_binary (IOR, GET_MODE (src), term1, term2),
4883 term3));
4884
4885 src = SET_SRC (x);
4886 }
230d793d 4887
c5c76735
JL
4888#ifdef HAVE_conditional_arithmetic
4889 /* If we have conditional arithmetic and the operand of a SET is
4890 a conditional expression, replace this with an IF_THEN_ELSE.
4891 We can either have a conditional expression or a MULT of that expression
4892 with a constant. */
4893 if ((GET_RTX_CLASS (GET_CODE (src)) == '1'
4894 || GET_RTX_CLASS (GET_CODE (src)) == '2'
4895 || GET_RTX_CLASS (GET_CODE (src)) == 'c')
4896 && (GET_RTX_CLASS (GET_CODE (XEXP (src, 0))) == '<'
4897 || (GET_CODE (XEXP (src, 0)) == MULT
4898 && GET_RTX_CLASS (GET_CODE (XEXP (XEXP (src, 0), 0))) == '<'
4899 && GET_CODE (XEXP (XEXP (src, 0), 1)) == CONST_INT)))
4900 {
4901 rtx cond = XEXP (src, 0);
4902 rtx true_val = const1_rtx;
4903 rtx false_arm, true_arm;
4904
4905 if (GET_CODE (cond) == MULT)
4906 {
4907 true_val = XEXP (cond, 1);
4908 cond = XEXP (cond, 0);
4909 }
4910
4911 if (GET_RTX_CLASS (GET_CODE (src)) == '1')
4912 {
4913 true_arm = gen_unary (GET_CODE (src), GET_MODE (src),
4914 GET_MODE (XEXP (src, 0)), true_val);
4915 false_arm = gen_unary (GET_CODE (src), GET_MODE (src),
4916 GET_MODE (XEXP (src, 0)), const0_rtx);
4917 }
4918 else
4919 {
4920 true_arm = gen_binary (GET_CODE (src), GET_MODE (src),
4921 true_val, XEXP (src, 1));
4922 false_arm = gen_binary (GET_CODE (src), GET_MODE (src),
4923 const0_rtx, XEXP (src, 1));
4924 }
4925
4926 /* Canonicalize if true_arm is the simpler one. */
4927 if (GET_RTX_CLASS (GET_CODE (true_arm)) == 'o'
4928 && GET_RTX_CLASS (GET_CODE (false_arm)) != 'o'
4929 && reversible_comparison_p (cond))
4930 {
4931 rtx temp = true_arm;
4932
4933 true_arm = false_arm;
4934 false_arm = temp;
4935
4936 cond = gen_rtx_combine (reverse_condition (GET_CODE (cond)),
4937 GET_MODE (cond), XEXP (cond, 0),
4938 XEXP (cond, 1));
4939 }
4940
4941 src = gen_rtx_combine (IF_THEN_ELSE, GET_MODE (src),
4942 gen_rtx_combine (GET_CODE (cond), VOIDmode,
4943 XEXP (cond, 0),
4944 XEXP (cond, 1)),
4945 true_arm, false_arm);
4946 SUBST (SET_SRC (x), src);
4947 }
4948#endif
4949
246e00f2
RK
4950 /* If either SRC or DEST is a CLOBBER of (const_int 0), make this
4951 whole thing fail. */
4952 if (GET_CODE (src) == CLOBBER && XEXP (src, 0) == const0_rtx)
4953 return src;
4954 else if (GET_CODE (dest) == CLOBBER && XEXP (dest, 0) == const0_rtx)
4955 return dest;
4956 else
4957 /* Convert this into a field assignment operation, if possible. */
4958 return make_field_assignment (x);
8079805d
RK
4959}
4960\f
4961/* Simplify, X, and AND, IOR, or XOR operation, and return the simplified
4962 result. LAST is nonzero if this is the last retry. */
4963
4964static rtx
4965simplify_logical (x, last)
4966 rtx x;
4967 int last;
4968{
4969 enum machine_mode mode = GET_MODE (x);
4970 rtx op0 = XEXP (x, 0);
4971 rtx op1 = XEXP (x, 1);
4972
4973 switch (GET_CODE (x))
4974 {
230d793d 4975 case AND:
8079805d
RK
4976 /* Convert (A ^ B) & A to A & (~ B) since the latter is often a single
4977 insn (and may simplify more). */
4978 if (GET_CODE (op0) == XOR
4979 && rtx_equal_p (XEXP (op0, 0), op1)
4980 && ! side_effects_p (op1))
0c1c8ea6
RK
4981 x = gen_binary (AND, mode,
4982 gen_unary (NOT, mode, mode, XEXP (op0, 1)), op1);
8079805d
RK
4983
4984 if (GET_CODE (op0) == XOR
4985 && rtx_equal_p (XEXP (op0, 1), op1)
4986 && ! side_effects_p (op1))
0c1c8ea6
RK
4987 x = gen_binary (AND, mode,
4988 gen_unary (NOT, mode, mode, XEXP (op0, 0)), op1);
8079805d
RK
4989
4990 /* Similarly for (~ (A ^ B)) & A. */
4991 if (GET_CODE (op0) == NOT
4992 && GET_CODE (XEXP (op0, 0)) == XOR
4993 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), op1)
4994 && ! side_effects_p (op1))
4995 x = gen_binary (AND, mode, XEXP (XEXP (op0, 0), 1), op1);
4996
4997 if (GET_CODE (op0) == NOT
4998 && GET_CODE (XEXP (op0, 0)) == XOR
4999 && rtx_equal_p (XEXP (XEXP (op0, 0), 1), op1)
5000 && ! side_effects_p (op1))
5001 x = gen_binary (AND, mode, XEXP (XEXP (op0, 0), 0), op1);
5002
2e8f9abf
DM
5003 /* We can call simplify_and_const_int only if we don't lose
5004 any (sign) bits when converting INTVAL (op1) to
5005 "unsigned HOST_WIDE_INT". */
5006 if (GET_CODE (op1) == CONST_INT
5007 && (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5008 || INTVAL (op1) > 0))
230d793d 5009 {
8079805d 5010 x = simplify_and_const_int (x, mode, op0, INTVAL (op1));
230d793d
RS
5011
5012 /* If we have (ior (and (X C1) C2)) and the next restart would be
5013 the last, simplify this by making C1 as small as possible
0f41302f 5014 and then exit. */
8079805d
RK
5015 if (last
5016 && GET_CODE (x) == IOR && GET_CODE (op0) == AND
5017 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5018 && GET_CODE (op1) == CONST_INT)
5019 return gen_binary (IOR, mode,
5020 gen_binary (AND, mode, XEXP (op0, 0),
5021 GEN_INT (INTVAL (XEXP (op0, 1))
5022 & ~ INTVAL (op1))), op1);
230d793d
RS
5023
5024 if (GET_CODE (x) != AND)
8079805d 5025 return x;
0e32506c
RK
5026
5027 if (GET_RTX_CLASS (GET_CODE (x)) == 'c'
5028 || GET_RTX_CLASS (GET_CODE (x)) == '2')
5029 op0 = XEXP (x, 0), op1 = XEXP (x, 1);
230d793d
RS
5030 }
5031
5032 /* Convert (A | B) & A to A. */
8079805d
RK
5033 if (GET_CODE (op0) == IOR
5034 && (rtx_equal_p (XEXP (op0, 0), op1)
5035 || rtx_equal_p (XEXP (op0, 1), op1))
5036 && ! side_effects_p (XEXP (op0, 0))
5037 && ! side_effects_p (XEXP (op0, 1)))
5038 return op1;
230d793d 5039
d0ab8cd3 5040 /* In the following group of tests (and those in case IOR below),
230d793d
RS
5041 we start with some combination of logical operations and apply
5042 the distributive law followed by the inverse distributive law.
5043 Most of the time, this results in no change. However, if some of
5044 the operands are the same or inverses of each other, simplifications
5045 will result.
5046
5047 For example, (and (ior A B) (not B)) can occur as the result of
5048 expanding a bit field assignment. When we apply the distributive
5049 law to this, we get (ior (and (A (not B))) (and (B (not B)))),
8079805d 5050 which then simplifies to (and (A (not B))).
230d793d 5051
8079805d 5052 If we have (and (ior A B) C), apply the distributive law and then
230d793d
RS
5053 the inverse distributive law to see if things simplify. */
5054
8079805d 5055 if (GET_CODE (op0) == IOR || GET_CODE (op0) == XOR)
230d793d
RS
5056 {
5057 x = apply_distributive_law
8079805d
RK
5058 (gen_binary (GET_CODE (op0), mode,
5059 gen_binary (AND, mode, XEXP (op0, 0), op1),
5060 gen_binary (AND, mode, XEXP (op0, 1), op1)));
230d793d 5061 if (GET_CODE (x) != AND)
8079805d 5062 return x;
230d793d
RS
5063 }
5064
8079805d
RK
5065 if (GET_CODE (op1) == IOR || GET_CODE (op1) == XOR)
5066 return apply_distributive_law
5067 (gen_binary (GET_CODE (op1), mode,
5068 gen_binary (AND, mode, XEXP (op1, 0), op0),
5069 gen_binary (AND, mode, XEXP (op1, 1), op0)));
230d793d
RS
5070
5071 /* Similarly, taking advantage of the fact that
5072 (and (not A) (xor B C)) == (xor (ior A B) (ior A C)) */
5073
8079805d
RK
5074 if (GET_CODE (op0) == NOT && GET_CODE (op1) == XOR)
5075 return apply_distributive_law
5076 (gen_binary (XOR, mode,
5077 gen_binary (IOR, mode, XEXP (op0, 0), XEXP (op1, 0)),
5078 gen_binary (IOR, mode, XEXP (op0, 0), XEXP (op1, 1))));
230d793d 5079
8079805d
RK
5080 else if (GET_CODE (op1) == NOT && GET_CODE (op0) == XOR)
5081 return apply_distributive_law
5082 (gen_binary (XOR, mode,
5083 gen_binary (IOR, mode, XEXP (op1, 0), XEXP (op0, 0)),
5084 gen_binary (IOR, mode, XEXP (op1, 0), XEXP (op0, 1))));
230d793d
RS
5085 break;
5086
5087 case IOR:
951553af 5088 /* (ior A C) is C if all bits of A that might be nonzero are on in C. */
8079805d 5089 if (GET_CODE (op1) == CONST_INT
ac49a949 5090 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8079805d
RK
5091 && (nonzero_bits (op0, mode) & ~ INTVAL (op1)) == 0)
5092 return op1;
d0ab8cd3 5093
230d793d 5094 /* Convert (A & B) | A to A. */
8079805d
RK
5095 if (GET_CODE (op0) == AND
5096 && (rtx_equal_p (XEXP (op0, 0), op1)
5097 || rtx_equal_p (XEXP (op0, 1), op1))
5098 && ! side_effects_p (XEXP (op0, 0))
5099 && ! side_effects_p (XEXP (op0, 1)))
5100 return op1;
230d793d
RS
5101
5102 /* If we have (ior (and A B) C), apply the distributive law and then
5103 the inverse distributive law to see if things simplify. */
5104
8079805d 5105 if (GET_CODE (op0) == AND)
230d793d
RS
5106 {
5107 x = apply_distributive_law
5108 (gen_binary (AND, mode,
8079805d
RK
5109 gen_binary (IOR, mode, XEXP (op0, 0), op1),
5110 gen_binary (IOR, mode, XEXP (op0, 1), op1)));
230d793d
RS
5111
5112 if (GET_CODE (x) != IOR)
8079805d 5113 return x;
230d793d
RS
5114 }
5115
8079805d 5116 if (GET_CODE (op1) == AND)
230d793d
RS
5117 {
5118 x = apply_distributive_law
5119 (gen_binary (AND, mode,
8079805d
RK
5120 gen_binary (IOR, mode, XEXP (op1, 0), op0),
5121 gen_binary (IOR, mode, XEXP (op1, 1), op0)));
230d793d
RS
5122
5123 if (GET_CODE (x) != IOR)
8079805d 5124 return x;
230d793d
RS
5125 }
5126
5127 /* Convert (ior (ashift A CX) (lshiftrt A CY)) where CX+CY equals the
5128 mode size to (rotate A CX). */
5129
8079805d
RK
5130 if (((GET_CODE (op0) == ASHIFT && GET_CODE (op1) == LSHIFTRT)
5131 || (GET_CODE (op1) == ASHIFT && GET_CODE (op0) == LSHIFTRT))
5132 && rtx_equal_p (XEXP (op0, 0), XEXP (op1, 0))
5133 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5134 && GET_CODE (XEXP (op1, 1)) == CONST_INT
5135 && (INTVAL (XEXP (op0, 1)) + INTVAL (XEXP (op1, 1))
230d793d 5136 == GET_MODE_BITSIZE (mode)))
38a448ca
RH
5137 return gen_rtx_ROTATE (mode, XEXP (op0, 0),
5138 (GET_CODE (op0) == ASHIFT
5139 ? XEXP (op0, 1) : XEXP (op1, 1)));
230d793d 5140
71923da7
RK
5141 /* If OP0 is (ashiftrt (plus ...) C), it might actually be
5142 a (sign_extend (plus ...)). If so, OP1 is a CONST_INT, and the PLUS
5143 does not affect any of the bits in OP1, it can really be done
5144 as a PLUS and we can associate. We do this by seeing if OP1
5145 can be safely shifted left C bits. */
5146 if (GET_CODE (op1) == CONST_INT && GET_CODE (op0) == ASHIFTRT
5147 && GET_CODE (XEXP (op0, 0)) == PLUS
5148 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
5149 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5150 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT)
5151 {
5152 int count = INTVAL (XEXP (op0, 1));
5153 HOST_WIDE_INT mask = INTVAL (op1) << count;
5154
5155 if (mask >> count == INTVAL (op1)
5156 && (mask & nonzero_bits (XEXP (op0, 0), mode)) == 0)
5157 {
5158 SUBST (XEXP (XEXP (op0, 0), 1),
5159 GEN_INT (INTVAL (XEXP (XEXP (op0, 0), 1)) | mask));
5160 return op0;
5161 }
5162 }
230d793d
RS
5163 break;
5164
5165 case XOR:
79e8185c
JH
5166 /* If we are XORing two things that have no bits in common,
5167 convert them into an IOR. This helps to detect rotation encoded
5168 using those methods and possibly other simplifications. */
5169
5170 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5171 && (nonzero_bits (op0, mode)
5172 & nonzero_bits (op1, mode)) == 0)
5173 return (gen_binary (IOR, mode, op0, op1));
5174
230d793d
RS
5175 /* Convert (XOR (NOT x) (NOT y)) to (XOR x y).
5176 Also convert (XOR (NOT x) y) to (NOT (XOR x y)), similarly for
5177 (NOT y). */
5178 {
5179 int num_negated = 0;
230d793d 5180
8079805d
RK
5181 if (GET_CODE (op0) == NOT)
5182 num_negated++, op0 = XEXP (op0, 0);
5183 if (GET_CODE (op1) == NOT)
5184 num_negated++, op1 = XEXP (op1, 0);
230d793d
RS
5185
5186 if (num_negated == 2)
5187 {
8079805d
RK
5188 SUBST (XEXP (x, 0), op0);
5189 SUBST (XEXP (x, 1), op1);
230d793d
RS
5190 }
5191 else if (num_negated == 1)
0c1c8ea6 5192 return gen_unary (NOT, mode, mode, gen_binary (XOR, mode, op0, op1));
230d793d
RS
5193 }
5194
5195 /* Convert (xor (and A B) B) to (and (not A) B). The latter may
5196 correspond to a machine insn or result in further simplifications
5197 if B is a constant. */
5198
8079805d
RK
5199 if (GET_CODE (op0) == AND
5200 && rtx_equal_p (XEXP (op0, 1), op1)
5201 && ! side_effects_p (op1))
0c1c8ea6
RK
5202 return gen_binary (AND, mode,
5203 gen_unary (NOT, mode, mode, XEXP (op0, 0)),
8079805d 5204 op1);
230d793d 5205
8079805d
RK
5206 else if (GET_CODE (op0) == AND
5207 && rtx_equal_p (XEXP (op0, 0), op1)
5208 && ! side_effects_p (op1))
0c1c8ea6
RK
5209 return gen_binary (AND, mode,
5210 gen_unary (NOT, mode, mode, XEXP (op0, 1)),
8079805d 5211 op1);
230d793d 5212
230d793d 5213 /* (xor (comparison foo bar) (const_int 1)) can become the reversed
0802d516
RK
5214 comparison if STORE_FLAG_VALUE is 1. */
5215 if (STORE_FLAG_VALUE == 1
5216 && op1 == const1_rtx
8079805d
RK
5217 && GET_RTX_CLASS (GET_CODE (op0)) == '<'
5218 && reversible_comparison_p (op0))
5219 return gen_rtx_combine (reverse_condition (GET_CODE (op0)),
5220 mode, XEXP (op0, 0), XEXP (op0, 1));
500c518b
RK
5221
5222 /* (lshiftrt foo C) where C is the number of bits in FOO minus 1
5223 is (lt foo (const_int 0)), so we can perform the above
0802d516 5224 simplification if STORE_FLAG_VALUE is 1. */
500c518b 5225
0802d516
RK
5226 if (STORE_FLAG_VALUE == 1
5227 && op1 == const1_rtx
8079805d
RK
5228 && GET_CODE (op0) == LSHIFTRT
5229 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5230 && INTVAL (XEXP (op0, 1)) == GET_MODE_BITSIZE (mode) - 1)
5231 return gen_rtx_combine (GE, mode, XEXP (op0, 0), const0_rtx);
230d793d
RS
5232
5233 /* (xor (comparison foo bar) (const_int sign-bit))
5234 when STORE_FLAG_VALUE is the sign bit. */
5f4f0e22 5235 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
0802d516 5236 && ((STORE_FLAG_VALUE & GET_MODE_MASK (mode))
e51712db 5237 == (unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
8079805d
RK
5238 && op1 == const_true_rtx
5239 && GET_RTX_CLASS (GET_CODE (op0)) == '<'
5240 && reversible_comparison_p (op0))
5241 return gen_rtx_combine (reverse_condition (GET_CODE (op0)),
5242 mode, XEXP (op0, 0), XEXP (op0, 1));
0918eca0 5243
230d793d 5244 break;
e9a25f70
JL
5245
5246 default:
5247 abort ();
230d793d
RS
5248 }
5249
5250 return x;
5251}
5252\f
5253/* We consider ZERO_EXTRACT, SIGN_EXTRACT, and SIGN_EXTEND as "compound
5254 operations" because they can be replaced with two more basic operations.
5255 ZERO_EXTEND is also considered "compound" because it can be replaced with
5256 an AND operation, which is simpler, though only one operation.
5257
5258 The function expand_compound_operation is called with an rtx expression
5259 and will convert it to the appropriate shifts and AND operations,
5260 simplifying at each stage.
5261
5262 The function make_compound_operation is called to convert an expression
5263 consisting of shifts and ANDs into the equivalent compound expression.
5264 It is the inverse of this function, loosely speaking. */
5265
5266static rtx
5267expand_compound_operation (x)
5268 rtx x;
5269{
5270 int pos = 0, len;
5271 int unsignedp = 0;
5272 int modewidth;
5273 rtx tem;
5274
5275 switch (GET_CODE (x))
5276 {
5277 case ZERO_EXTEND:
5278 unsignedp = 1;
5279 case SIGN_EXTEND:
75473182
RS
5280 /* We can't necessarily use a const_int for a multiword mode;
5281 it depends on implicitly extending the value.
5282 Since we don't know the right way to extend it,
5283 we can't tell whether the implicit way is right.
5284
5285 Even for a mode that is no wider than a const_int,
5286 we can't win, because we need to sign extend one of its bits through
5287 the rest of it, and we don't know which bit. */
230d793d 5288 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
75473182 5289 return x;
230d793d 5290
8079805d
RK
5291 /* Return if (subreg:MODE FROM 0) is not a safe replacement for
5292 (zero_extend:MODE FROM) or (sign_extend:MODE FROM). It is for any MEM
5293 because (SUBREG (MEM...)) is guaranteed to cause the MEM to be
5294 reloaded. If not for that, MEM's would very rarely be safe.
5295
5296 Reject MODEs bigger than a word, because we might not be able
5297 to reference a two-register group starting with an arbitrary register
5298 (and currently gen_lowpart might crash for a SUBREG). */
5299
5300 if (GET_MODE_SIZE (GET_MODE (XEXP (x, 0))) > UNITS_PER_WORD)
230d793d
RS
5301 return x;
5302
5303 len = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)));
5304 /* If the inner object has VOIDmode (the only way this can happen
5305 is if it is a ASM_OPERANDS), we can't do anything since we don't
5306 know how much masking to do. */
5307 if (len == 0)
5308 return x;
5309
5310 break;
5311
5312 case ZERO_EXTRACT:
5313 unsignedp = 1;
5314 case SIGN_EXTRACT:
5315 /* If the operand is a CLOBBER, just return it. */
5316 if (GET_CODE (XEXP (x, 0)) == CLOBBER)
5317 return XEXP (x, 0);
5318
5319 if (GET_CODE (XEXP (x, 1)) != CONST_INT
5320 || GET_CODE (XEXP (x, 2)) != CONST_INT
5321 || GET_MODE (XEXP (x, 0)) == VOIDmode)
5322 return x;
5323
5324 len = INTVAL (XEXP (x, 1));
5325 pos = INTVAL (XEXP (x, 2));
5326
5327 /* If this goes outside the object being extracted, replace the object
5328 with a (use (mem ...)) construct that only combine understands
5329 and is used only for this purpose. */
5330 if (len + pos > GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
38a448ca 5331 SUBST (XEXP (x, 0), gen_rtx_USE (GET_MODE (x), XEXP (x, 0)));
230d793d 5332
f76b9db2
ILT
5333 if (BITS_BIG_ENDIAN)
5334 pos = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - len - pos;
5335
230d793d
RS
5336 break;
5337
5338 default:
5339 return x;
5340 }
5341
0f13a422
ILT
5342 /* We can optimize some special cases of ZERO_EXTEND. */
5343 if (GET_CODE (x) == ZERO_EXTEND)
5344 {
5345 /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI if we
5346 know that the last value didn't have any inappropriate bits
5347 set. */
5348 if (GET_CODE (XEXP (x, 0)) == TRUNCATE
5349 && GET_MODE (XEXP (XEXP (x, 0), 0)) == GET_MODE (x)
5350 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
5351 && (nonzero_bits (XEXP (XEXP (x, 0), 0), GET_MODE (x))
5352 & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5353 return XEXP (XEXP (x, 0), 0);
5354
5355 /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)). */
5356 if (GET_CODE (XEXP (x, 0)) == SUBREG
5357 && GET_MODE (SUBREG_REG (XEXP (x, 0))) == GET_MODE (x)
5358 && subreg_lowpart_p (XEXP (x, 0))
5359 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
5360 && (nonzero_bits (SUBREG_REG (XEXP (x, 0)), GET_MODE (x))
fcc60894 5361 & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
0f13a422
ILT
5362 return SUBREG_REG (XEXP (x, 0));
5363
5364 /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI when foo
5365 is a comparison and STORE_FLAG_VALUE permits. This is like
5366 the first case, but it works even when GET_MODE (x) is larger
5367 than HOST_WIDE_INT. */
5368 if (GET_CODE (XEXP (x, 0)) == TRUNCATE
5369 && GET_MODE (XEXP (XEXP (x, 0), 0)) == GET_MODE (x)
5370 && GET_RTX_CLASS (GET_CODE (XEXP (XEXP (x, 0), 0))) == '<'
5371 && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
5372 <= HOST_BITS_PER_WIDE_INT)
5373 && ((HOST_WIDE_INT) STORE_FLAG_VALUE
5374 & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5375 return XEXP (XEXP (x, 0), 0);
5376
5377 /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)). */
5378 if (GET_CODE (XEXP (x, 0)) == SUBREG
5379 && GET_MODE (SUBREG_REG (XEXP (x, 0))) == GET_MODE (x)
5380 && subreg_lowpart_p (XEXP (x, 0))
5381 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0)))) == '<'
5382 && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
5383 <= HOST_BITS_PER_WIDE_INT)
5384 && ((HOST_WIDE_INT) STORE_FLAG_VALUE
5385 & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5386 return SUBREG_REG (XEXP (x, 0));
5387
5388 /* If sign extension is cheaper than zero extension, then use it
5389 if we know that no extraneous bits are set, and that the high
5390 bit is not set. */
5391 if (flag_expensive_optimizations
5392 && ((GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
5393 && ((nonzero_bits (XEXP (x, 0), GET_MODE (x))
5394 & ~ (((unsigned HOST_WIDE_INT)
5395 GET_MODE_MASK (GET_MODE (XEXP (x, 0))))
5396 >> 1))
5397 == 0))
5398 || (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
5399 && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
5400 <= HOST_BITS_PER_WIDE_INT)
5401 && (((HOST_WIDE_INT) STORE_FLAG_VALUE
5402 & ~ (((unsigned HOST_WIDE_INT)
5403 GET_MODE_MASK (GET_MODE (XEXP (x, 0))))
5404 >> 1))
5405 == 0))))
5406 {
38a448ca 5407 rtx temp = gen_rtx_SIGN_EXTEND (GET_MODE (x), XEXP (x, 0));
0f13a422
ILT
5408
5409 if (rtx_cost (temp, SET) < rtx_cost (x, SET))
5410 return expand_compound_operation (temp);
5411 }
5412 }
5413
230d793d
RS
5414 /* If we reach here, we want to return a pair of shifts. The inner
5415 shift is a left shift of BITSIZE - POS - LEN bits. The outer
5416 shift is a right shift of BITSIZE - LEN bits. It is arithmetic or
5417 logical depending on the value of UNSIGNEDP.
5418
5419 If this was a ZERO_EXTEND or ZERO_EXTRACT, this pair of shifts will be
5420 converted into an AND of a shift.
5421
5422 We must check for the case where the left shift would have a negative
5423 count. This can happen in a case like (x >> 31) & 255 on machines
5424 that can't shift by a constant. On those machines, we would first
5425 combine the shift with the AND to produce a variable-position
5426 extraction. Then the constant of 31 would be substituted in to produce
5427 a such a position. */
5428
5429 modewidth = GET_MODE_BITSIZE (GET_MODE (x));
5430 if (modewidth >= pos - len)
5f4f0e22 5431 tem = simplify_shift_const (NULL_RTX, unsignedp ? LSHIFTRT : ASHIFTRT,
230d793d 5432 GET_MODE (x),
5f4f0e22
CH
5433 simplify_shift_const (NULL_RTX, ASHIFT,
5434 GET_MODE (x),
230d793d
RS
5435 XEXP (x, 0),
5436 modewidth - pos - len),
5437 modewidth - len);
5438
5f4f0e22
CH
5439 else if (unsignedp && len < HOST_BITS_PER_WIDE_INT)
5440 tem = simplify_and_const_int (NULL_RTX, GET_MODE (x),
5441 simplify_shift_const (NULL_RTX, LSHIFTRT,
230d793d
RS
5442 GET_MODE (x),
5443 XEXP (x, 0), pos),
5f4f0e22 5444 ((HOST_WIDE_INT) 1 << len) - 1);
230d793d
RS
5445 else
5446 /* Any other cases we can't handle. */
5447 return x;
5448
5449
5450 /* If we couldn't do this for some reason, return the original
5451 expression. */
5452 if (GET_CODE (tem) == CLOBBER)
5453 return x;
5454
5455 return tem;
5456}
5457\f
5458/* X is a SET which contains an assignment of one object into
5459 a part of another (such as a bit-field assignment, STRICT_LOW_PART,
5460 or certain SUBREGS). If possible, convert it into a series of
5461 logical operations.
5462
5463 We half-heartedly support variable positions, but do not at all
5464 support variable lengths. */
5465
5466static rtx
5467expand_field_assignment (x)
5468 rtx x;
5469{
5470 rtx inner;
0f41302f 5471 rtx pos; /* Always counts from low bit. */
230d793d
RS
5472 int len;
5473 rtx mask;
5474 enum machine_mode compute_mode;
5475
5476 /* Loop until we find something we can't simplify. */
5477 while (1)
5478 {
5479 if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
5480 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG)
5481 {
5482 inner = SUBREG_REG (XEXP (SET_DEST (x), 0));
5483 len = GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)));
4d9cfc7b 5484 pos = GEN_INT (BITS_PER_WORD * SUBREG_WORD (XEXP (SET_DEST (x), 0)));
230d793d
RS
5485 }
5486 else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
5487 && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT)
5488 {
5489 inner = XEXP (SET_DEST (x), 0);
5490 len = INTVAL (XEXP (SET_DEST (x), 1));
5491 pos = XEXP (SET_DEST (x), 2);
5492
5493 /* If the position is constant and spans the width of INNER,
5494 surround INNER with a USE to indicate this. */
5495 if (GET_CODE (pos) == CONST_INT
5496 && INTVAL (pos) + len > GET_MODE_BITSIZE (GET_MODE (inner)))
38a448ca 5497 inner = gen_rtx_USE (GET_MODE (SET_DEST (x)), inner);
230d793d 5498
f76b9db2
ILT
5499 if (BITS_BIG_ENDIAN)
5500 {
5501 if (GET_CODE (pos) == CONST_INT)
5502 pos = GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner)) - len
5503 - INTVAL (pos));
5504 else if (GET_CODE (pos) == MINUS
5505 && GET_CODE (XEXP (pos, 1)) == CONST_INT
5506 && (INTVAL (XEXP (pos, 1))
5507 == GET_MODE_BITSIZE (GET_MODE (inner)) - len))
5508 /* If position is ADJUST - X, new position is X. */
5509 pos = XEXP (pos, 0);
5510 else
5511 pos = gen_binary (MINUS, GET_MODE (pos),
5512 GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner))
5513 - len),
5514 pos);
5515 }
230d793d
RS
5516 }
5517
5518 /* A SUBREG between two modes that occupy the same numbers of words
5519 can be done by moving the SUBREG to the source. */
5520 else if (GET_CODE (SET_DEST (x)) == SUBREG
5521 && (((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
5522 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
5523 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
5524 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
5525 {
38a448ca 5526 x = gen_rtx_SET (VOIDmode, SUBREG_REG (SET_DEST (x)),
c5c76735
JL
5527 gen_lowpart_for_combine
5528 (GET_MODE (SUBREG_REG (SET_DEST (x))),
5529 SET_SRC (x)));
230d793d
RS
5530 continue;
5531 }
5532 else
5533 break;
5534
5535 while (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
5536 inner = SUBREG_REG (inner);
5537
5538 compute_mode = GET_MODE (inner);
5539
861556b4
RH
5540 /* Don't attempt bitwise arithmetic on non-integral modes. */
5541 if (! INTEGRAL_MODE_P (compute_mode))
5542 {
5543 enum machine_mode imode;
5544
5545 /* Something is probably seriously wrong if this matches. */
5546 if (! FLOAT_MODE_P (compute_mode))
5547 break;
5548
5549 /* Try to find an integral mode to pun with. */
5550 imode = mode_for_size (GET_MODE_BITSIZE (compute_mode), MODE_INT, 0);
5551 if (imode == BLKmode)
5552 break;
5553
5554 compute_mode = imode;
5555 inner = gen_lowpart_for_combine (imode, inner);
5556 }
5557
230d793d 5558 /* Compute a mask of LEN bits, if we can do this on the host machine. */
5f4f0e22
CH
5559 if (len < HOST_BITS_PER_WIDE_INT)
5560 mask = GEN_INT (((HOST_WIDE_INT) 1 << len) - 1);
230d793d
RS
5561 else
5562 break;
5563
5564 /* Now compute the equivalent expression. Make a copy of INNER
5565 for the SET_DEST in case it is a MEM into which we will substitute;
5566 we don't want shared RTL in that case. */
c5c76735
JL
5567 x = gen_rtx_SET
5568 (VOIDmode, copy_rtx (inner),
5569 gen_binary (IOR, compute_mode,
5570 gen_binary (AND, compute_mode,
5571 gen_unary (NOT, compute_mode,
5572 compute_mode,
5573 gen_binary (ASHIFT,
5574 compute_mode,
5575 mask, pos)),
5576 inner),
5577 gen_binary (ASHIFT, compute_mode,
5578 gen_binary (AND, compute_mode,
5579 gen_lowpart_for_combine
5580 (compute_mode, SET_SRC (x)),
5581 mask),
5582 pos)));
230d793d
RS
5583 }
5584
5585 return x;
5586}
5587\f
8999a12e
RK
5588/* Return an RTX for a reference to LEN bits of INNER. If POS_RTX is nonzero,
5589 it is an RTX that represents a variable starting position; otherwise,
5590 POS is the (constant) starting bit position (counted from the LSB).
230d793d
RS
5591
5592 INNER may be a USE. This will occur when we started with a bitfield
5593 that went outside the boundary of the object in memory, which is
5594 allowed on most machines. To isolate this case, we produce a USE
5595 whose mode is wide enough and surround the MEM with it. The only
5596 code that understands the USE is this routine. If it is not removed,
5597 it will cause the resulting insn not to match.
5598
5599 UNSIGNEDP is non-zero for an unsigned reference and zero for a
5600 signed reference.
5601
5602 IN_DEST is non-zero if this is a reference in the destination of a
5603 SET. This is used when a ZERO_ or SIGN_EXTRACT isn't needed. If non-zero,
5604 a STRICT_LOW_PART will be used, if zero, ZERO_EXTEND or SIGN_EXTEND will
5605 be used.
5606
5607 IN_COMPARE is non-zero if we are in a COMPARE. This means that a
5608 ZERO_EXTRACT should be built even for bits starting at bit 0.
5609
76184def
DE
5610 MODE is the desired mode of the result (if IN_DEST == 0).
5611
5612 The result is an RTX for the extraction or NULL_RTX if the target
5613 can't handle it. */
230d793d
RS
5614
5615static rtx
5616make_extraction (mode, inner, pos, pos_rtx, len,
5617 unsignedp, in_dest, in_compare)
5618 enum machine_mode mode;
5619 rtx inner;
5620 int pos;
5621 rtx pos_rtx;
5622 int len;
5623 int unsignedp;
5624 int in_dest, in_compare;
5625{
94b4b17a
RS
5626 /* This mode describes the size of the storage area
5627 to fetch the overall value from. Within that, we
5628 ignore the POS lowest bits, etc. */
230d793d
RS
5629 enum machine_mode is_mode = GET_MODE (inner);
5630 enum machine_mode inner_mode;
d7cd794f
RK
5631 enum machine_mode wanted_inner_mode = byte_mode;
5632 enum machine_mode wanted_inner_reg_mode = word_mode;
230d793d
RS
5633 enum machine_mode pos_mode = word_mode;
5634 enum machine_mode extraction_mode = word_mode;
5635 enum machine_mode tmode = mode_for_size (len, MODE_INT, 1);
5636 int spans_byte = 0;
5637 rtx new = 0;
8999a12e 5638 rtx orig_pos_rtx = pos_rtx;
6139ff20 5639 int orig_pos;
230d793d
RS
5640
5641 /* Get some information about INNER and get the innermost object. */
5642 if (GET_CODE (inner) == USE)
94b4b17a 5643 /* (use:SI (mem:QI foo)) stands for (mem:SI foo). */
230d793d
RS
5644 /* We don't need to adjust the position because we set up the USE
5645 to pretend that it was a full-word object. */
5646 spans_byte = 1, inner = XEXP (inner, 0);
5647 else if (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
94b4b17a
RS
5648 {
5649 /* If going from (subreg:SI (mem:QI ...)) to (mem:QI ...),
5650 consider just the QI as the memory to extract from.
5651 The subreg adds or removes high bits; its mode is
5652 irrelevant to the meaning of this extraction,
5653 since POS and LEN count from the lsb. */
5654 if (GET_CODE (SUBREG_REG (inner)) == MEM)
5655 is_mode = GET_MODE (SUBREG_REG (inner));
5656 inner = SUBREG_REG (inner);
5657 }
230d793d
RS
5658
5659 inner_mode = GET_MODE (inner);
5660
5661 if (pos_rtx && GET_CODE (pos_rtx) == CONST_INT)
8999a12e 5662 pos = INTVAL (pos_rtx), pos_rtx = 0;
230d793d
RS
5663
5664 /* See if this can be done without an extraction. We never can if the
5665 width of the field is not the same as that of some integer mode. For
5666 registers, we can only avoid the extraction if the position is at the
5667 low-order bit and this is either not in the destination or we have the
5668 appropriate STRICT_LOW_PART operation available.
5669
5670 For MEM, we can avoid an extract if the field starts on an appropriate
5671 boundary and we can change the mode of the memory reference. However,
5672 we cannot directly access the MEM if we have a USE and the underlying
5673 MEM is not TMODE. This combination means that MEM was being used in a
5674 context where bits outside its mode were being referenced; that is only
5675 valid in bit-field insns. */
5676
5677 if (tmode != BLKmode
5678 && ! (spans_byte && inner_mode != tmode)
4d9cfc7b
RK
5679 && ((pos_rtx == 0 && (pos % BITS_PER_WORD) == 0
5680 && GET_CODE (inner) != MEM
230d793d 5681 && (! in_dest
df62f951
RK
5682 || (GET_CODE (inner) == REG
5683 && (movstrict_optab->handlers[(int) tmode].insn_code
5684 != CODE_FOR_nothing))))
8999a12e 5685 || (GET_CODE (inner) == MEM && pos_rtx == 0
dfbe1b2f
RK
5686 && (pos
5687 % (STRICT_ALIGNMENT ? GET_MODE_ALIGNMENT (tmode)
5688 : BITS_PER_UNIT)) == 0
230d793d
RS
5689 /* We can't do this if we are widening INNER_MODE (it
5690 may not be aligned, for one thing). */
5691 && GET_MODE_BITSIZE (inner_mode) >= GET_MODE_BITSIZE (tmode)
5692 && (inner_mode == tmode
5693 || (! mode_dependent_address_p (XEXP (inner, 0))
5694 && ! MEM_VOLATILE_P (inner))))))
5695 {
230d793d
RS
5696 /* If INNER is a MEM, make a new MEM that encompasses just the desired
5697 field. If the original and current mode are the same, we need not
5698 adjust the offset. Otherwise, we do if bytes big endian.
5699
4d9cfc7b
RK
5700 If INNER is not a MEM, get a piece consisting of just the field
5701 of interest (in this case POS % BITS_PER_WORD must be 0). */
230d793d
RS
5702
5703 if (GET_CODE (inner) == MEM)
5704 {
94b4b17a
RS
5705 int offset;
5706 /* POS counts from lsb, but make OFFSET count in memory order. */
5707 if (BYTES_BIG_ENDIAN)
5708 offset = (GET_MODE_BITSIZE (is_mode) - len - pos) / BITS_PER_UNIT;
5709 else
5710 offset = pos / BITS_PER_UNIT;
230d793d 5711
38a448ca 5712 new = gen_rtx_MEM (tmode, plus_constant (XEXP (inner, 0), offset));
230d793d 5713 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (inner);
c6df88cb 5714 MEM_COPY_ATTRIBUTES (new, inner);
230d793d 5715 }
df62f951 5716 else if (GET_CODE (inner) == REG)
c0d3ac4d
RK
5717 {
5718 /* We can't call gen_lowpart_for_combine here since we always want
5719 a SUBREG and it would sometimes return a new hard register. */
5720 if (tmode != inner_mode)
38a448ca
RH
5721 new = gen_rtx_SUBREG (tmode, inner,
5722 (WORDS_BIG_ENDIAN
c5c76735
JL
5723 && (GET_MODE_SIZE (inner_mode)
5724 > UNITS_PER_WORD)
38a448ca
RH
5725 ? (((GET_MODE_SIZE (inner_mode)
5726 - GET_MODE_SIZE (tmode))
5727 / UNITS_PER_WORD)
5728 - pos / BITS_PER_WORD)
5729 : pos / BITS_PER_WORD));
c0d3ac4d
RK
5730 else
5731 new = inner;
5732 }
230d793d 5733 else
6139ff20
RK
5734 new = force_to_mode (inner, tmode,
5735 len >= HOST_BITS_PER_WIDE_INT
5736 ? GET_MODE_MASK (tmode)
5737 : ((HOST_WIDE_INT) 1 << len) - 1,
e3d616e3 5738 NULL_RTX, 0);
230d793d
RS
5739
5740 /* If this extraction is going into the destination of a SET,
5741 make a STRICT_LOW_PART unless we made a MEM. */
5742
5743 if (in_dest)
5744 return (GET_CODE (new) == MEM ? new
77fa0940 5745 : (GET_CODE (new) != SUBREG
38a448ca 5746 ? gen_rtx_CLOBBER (tmode, const0_rtx)
77fa0940 5747 : gen_rtx_combine (STRICT_LOW_PART, VOIDmode, new)));
230d793d
RS
5748
5749 /* Otherwise, sign- or zero-extend unless we already are in the
5750 proper mode. */
5751
5752 return (mode == tmode ? new
5753 : gen_rtx_combine (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
5754 mode, new));
5755 }
5756
cc471082
RS
5757 /* Unless this is a COMPARE or we have a funny memory reference,
5758 don't do anything with zero-extending field extracts starting at
5759 the low-order bit since they are simple AND operations. */
8999a12e
RK
5760 if (pos_rtx == 0 && pos == 0 && ! in_dest
5761 && ! in_compare && ! spans_byte && unsignedp)
230d793d
RS
5762 return 0;
5763
c5c76735
JL
5764 /* Unless we are allowed to span bytes or INNER is not MEM, reject this if
5765 we would be spanning bytes or if the position is not a constant and the
5766 length is not 1. In all other cases, we would only be going outside
5767 our object in cases when an original shift would have been
e7373556 5768 undefined. */
c5c76735 5769 if (! spans_byte && GET_CODE (inner) == MEM
e7373556
RK
5770 && ((pos_rtx == 0 && pos + len > GET_MODE_BITSIZE (is_mode))
5771 || (pos_rtx != 0 && len != 1)))
5772 return 0;
5773
d7cd794f 5774 /* Get the mode to use should INNER not be a MEM, the mode for the position,
230d793d
RS
5775 and the mode for the result. */
5776#ifdef HAVE_insv
5777 if (in_dest)
5778 {
0d8e55d8 5779 wanted_inner_reg_mode
a995e389
RH
5780 = insn_data[(int) CODE_FOR_insv].operand[0].mode;
5781 if (wanted_inner_reg_mode == VOIDmode)
5782 wanted_inner_reg_mode = word_mode;
5783
5784 pos_mode = insn_data[(int) CODE_FOR_insv].operand[2].mode;
5785 if (pos_mode == VOIDmode)
5786 pos_mode = word_mode;
5787
5788 extraction_mode = insn_data[(int) CODE_FOR_insv].operand[3].mode;
5789 if (extraction_mode == VOIDmode)
5790 extraction_mode = word_mode;
230d793d
RS
5791 }
5792#endif
5793
5794#ifdef HAVE_extzv
5795 if (! in_dest && unsignedp)
5796 {
0d8e55d8 5797 wanted_inner_reg_mode
a995e389
RH
5798 = insn_data[(int) CODE_FOR_extzv].operand[1].mode;
5799 if (wanted_inner_reg_mode == VOIDmode)
5800 wanted_inner_reg_mode = word_mode;
5801
5802 pos_mode = insn_data[(int) CODE_FOR_extzv].operand[3].mode;
5803 if (pos_mode == VOIDmode)
5804 pos_mode = word_mode;
5805
5806 extraction_mode = insn_data[(int) CODE_FOR_extzv].operand[0].mode;
5807 if (extraction_mode == VOIDmode)
5808 extraction_mode = word_mode;
230d793d
RS
5809 }
5810#endif
5811
5812#ifdef HAVE_extv
5813 if (! in_dest && ! unsignedp)
5814 {
0d8e55d8 5815 wanted_inner_reg_mode
a995e389
RH
5816 = insn_data[(int) CODE_FOR_extv].operand[1].mode;
5817 if (wanted_inner_reg_mode == VOIDmode)
5818 wanted_inner_reg_mode = word_mode;
5819
5820 pos_mode = insn_data[(int) CODE_FOR_extv].operand[3].mode;
5821 if (pos_mode == VOIDmode)
5822 pos_mode = word_mode;
5823
5824 extraction_mode = insn_data[(int) CODE_FOR_extv].operand[0].mode;
5825 if (extraction_mode == VOIDmode)
5826 extraction_mode = word_mode;
230d793d
RS
5827 }
5828#endif
5829
5830 /* Never narrow an object, since that might not be safe. */
5831
5832 if (mode != VOIDmode
5833 && GET_MODE_SIZE (extraction_mode) < GET_MODE_SIZE (mode))
5834 extraction_mode = mode;
5835
5836 if (pos_rtx && GET_MODE (pos_rtx) != VOIDmode
5837 && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
5838 pos_mode = GET_MODE (pos_rtx);
5839
d7cd794f
RK
5840 /* If this is not from memory, the desired mode is wanted_inner_reg_mode;
5841 if we have to change the mode of memory and cannot, the desired mode is
5842 EXTRACTION_MODE. */
5843 if (GET_CODE (inner) != MEM)
5844 wanted_inner_mode = wanted_inner_reg_mode;
5845 else if (inner_mode != wanted_inner_mode
5846 && (mode_dependent_address_p (XEXP (inner, 0))
5847 || MEM_VOLATILE_P (inner)))
5848 wanted_inner_mode = extraction_mode;
230d793d 5849
6139ff20
RK
5850 orig_pos = pos;
5851
f76b9db2
ILT
5852 if (BITS_BIG_ENDIAN)
5853 {
cf54c2cd
DE
5854 /* POS is passed as if BITS_BIG_ENDIAN == 0, so we need to convert it to
5855 BITS_BIG_ENDIAN style. If position is constant, compute new
5856 position. Otherwise, build subtraction.
5857 Note that POS is relative to the mode of the original argument.
5858 If it's a MEM we need to recompute POS relative to that.
5859 However, if we're extracting from (or inserting into) a register,
5860 we want to recompute POS relative to wanted_inner_mode. */
5861 int width = (GET_CODE (inner) == MEM
5862 ? GET_MODE_BITSIZE (is_mode)
5863 : GET_MODE_BITSIZE (wanted_inner_mode));
5864
f76b9db2 5865 if (pos_rtx == 0)
cf54c2cd 5866 pos = width - len - pos;
f76b9db2
ILT
5867 else
5868 pos_rtx
5869 = gen_rtx_combine (MINUS, GET_MODE (pos_rtx),
cf54c2cd
DE
5870 GEN_INT (width - len), pos_rtx);
5871 /* POS may be less than 0 now, but we check for that below.
5872 Note that it can only be less than 0 if GET_CODE (inner) != MEM. */
f76b9db2 5873 }
230d793d
RS
5874
5875 /* If INNER has a wider mode, make it smaller. If this is a constant
5876 extract, try to adjust the byte to point to the byte containing
5877 the value. */
d7cd794f
RK
5878 if (wanted_inner_mode != VOIDmode
5879 && GET_MODE_SIZE (wanted_inner_mode) < GET_MODE_SIZE (is_mode)
230d793d 5880 && ((GET_CODE (inner) == MEM
d7cd794f 5881 && (inner_mode == wanted_inner_mode
230d793d
RS
5882 || (! mode_dependent_address_p (XEXP (inner, 0))
5883 && ! MEM_VOLATILE_P (inner))))))
5884 {
5885 int offset = 0;
5886
5887 /* The computations below will be correct if the machine is big
5888 endian in both bits and bytes or little endian in bits and bytes.
5889 If it is mixed, we must adjust. */
5890
230d793d 5891 /* If bytes are big endian and we had a paradoxical SUBREG, we must
0f41302f 5892 adjust OFFSET to compensate. */
f76b9db2
ILT
5893 if (BYTES_BIG_ENDIAN
5894 && ! spans_byte
230d793d
RS
5895 && GET_MODE_SIZE (inner_mode) < GET_MODE_SIZE (is_mode))
5896 offset -= GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (inner_mode);
230d793d
RS
5897
5898 /* If this is a constant position, we can move to the desired byte. */
8999a12e 5899 if (pos_rtx == 0)
230d793d
RS
5900 {
5901 offset += pos / BITS_PER_UNIT;
d7cd794f 5902 pos %= GET_MODE_BITSIZE (wanted_inner_mode);
230d793d
RS
5903 }
5904
f76b9db2
ILT
5905 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
5906 && ! spans_byte
d7cd794f 5907 && is_mode != wanted_inner_mode)
c6b3f1f2 5908 offset = (GET_MODE_SIZE (is_mode)
d7cd794f 5909 - GET_MODE_SIZE (wanted_inner_mode) - offset);
c6b3f1f2 5910
d7cd794f 5911 if (offset != 0 || inner_mode != wanted_inner_mode)
230d793d 5912 {
38a448ca
RH
5913 rtx newmem = gen_rtx_MEM (wanted_inner_mode,
5914 plus_constant (XEXP (inner, 0), offset));
230d793d 5915 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (inner);
c6df88cb 5916 MEM_COPY_ATTRIBUTES (newmem, inner);
230d793d
RS
5917 inner = newmem;
5918 }
5919 }
5920
9e74dc41
RK
5921 /* If INNER is not memory, we can always get it into the proper mode. If we
5922 are changing its mode, POS must be a constant and smaller than the size
5923 of the new mode. */
230d793d 5924 else if (GET_CODE (inner) != MEM)
9e74dc41
RK
5925 {
5926 if (GET_MODE (inner) != wanted_inner_mode
5927 && (pos_rtx != 0
5928 || orig_pos + len > GET_MODE_BITSIZE (wanted_inner_mode)))
5929 return 0;
5930
5931 inner = force_to_mode (inner, wanted_inner_mode,
5932 pos_rtx
5933 || len + orig_pos >= HOST_BITS_PER_WIDE_INT
5934 ? GET_MODE_MASK (wanted_inner_mode)
5935 : (((HOST_WIDE_INT) 1 << len) - 1) << orig_pos,
5936 NULL_RTX, 0);
5937 }
230d793d
RS
5938
5939 /* Adjust mode of POS_RTX, if needed. If we want a wider mode, we
5940 have to zero extend. Otherwise, we can just use a SUBREG. */
8999a12e 5941 if (pos_rtx != 0
230d793d
RS
5942 && GET_MODE_SIZE (pos_mode) > GET_MODE_SIZE (GET_MODE (pos_rtx)))
5943 pos_rtx = gen_rtx_combine (ZERO_EXTEND, pos_mode, pos_rtx);
8999a12e 5944 else if (pos_rtx != 0
230d793d
RS
5945 && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
5946 pos_rtx = gen_lowpart_for_combine (pos_mode, pos_rtx);
5947
8999a12e
RK
5948 /* Make POS_RTX unless we already have it and it is correct. If we don't
5949 have a POS_RTX but we do have an ORIG_POS_RTX, the latter must
0f41302f 5950 be a CONST_INT. */
8999a12e
RK
5951 if (pos_rtx == 0 && orig_pos_rtx != 0 && INTVAL (orig_pos_rtx) == pos)
5952 pos_rtx = orig_pos_rtx;
5953
5954 else if (pos_rtx == 0)
5f4f0e22 5955 pos_rtx = GEN_INT (pos);
230d793d
RS
5956
5957 /* Make the required operation. See if we can use existing rtx. */
5958 new = gen_rtx_combine (unsignedp ? ZERO_EXTRACT : SIGN_EXTRACT,
5f4f0e22 5959 extraction_mode, inner, GEN_INT (len), pos_rtx);
230d793d
RS
5960 if (! in_dest)
5961 new = gen_lowpart_for_combine (mode, new);
5962
5963 return new;
5964}
5965\f
71923da7
RK
5966/* See if X contains an ASHIFT of COUNT or more bits that can be commuted
5967 with any other operations in X. Return X without that shift if so. */
5968
5969static rtx
5970extract_left_shift (x, count)
5971 rtx x;
5972 int count;
5973{
5974 enum rtx_code code = GET_CODE (x);
5975 enum machine_mode mode = GET_MODE (x);
5976 rtx tem;
5977
5978 switch (code)
5979 {
5980 case ASHIFT:
5981 /* This is the shift itself. If it is wide enough, we will return
5982 either the value being shifted if the shift count is equal to
5983 COUNT or a shift for the difference. */
5984 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5985 && INTVAL (XEXP (x, 1)) >= count)
5986 return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (x, 0),
5987 INTVAL (XEXP (x, 1)) - count);
5988 break;
5989
5990 case NEG: case NOT:
5991 if ((tem = extract_left_shift (XEXP (x, 0), count)) != 0)
0c1c8ea6 5992 return gen_unary (code, mode, mode, tem);
71923da7
RK
5993
5994 break;
5995
5996 case PLUS: case IOR: case XOR: case AND:
5997 /* If we can safely shift this constant and we find the inner shift,
5998 make a new operation. */
5999 if (GET_CODE (XEXP (x,1)) == CONST_INT
b729186a 6000 && (INTVAL (XEXP (x, 1)) & ((((HOST_WIDE_INT) 1 << count)) - 1)) == 0
71923da7
RK
6001 && (tem = extract_left_shift (XEXP (x, 0), count)) != 0)
6002 return gen_binary (code, mode, tem,
6003 GEN_INT (INTVAL (XEXP (x, 1)) >> count));
6004
6005 break;
e9a25f70
JL
6006
6007 default:
6008 break;
71923da7
RK
6009 }
6010
6011 return 0;
6012}
6013\f
230d793d
RS
6014/* Look at the expression rooted at X. Look for expressions
6015 equivalent to ZERO_EXTRACT, SIGN_EXTRACT, ZERO_EXTEND, SIGN_EXTEND.
6016 Form these expressions.
6017
6018 Return the new rtx, usually just X.
6019
6020 Also, for machines like the Vax that don't have logical shift insns,
6021 try to convert logical to arithmetic shift operations in cases where
6022 they are equivalent. This undoes the canonicalizations to logical
6023 shifts done elsewhere.
6024
6025 We try, as much as possible, to re-use rtl expressions to save memory.
6026
6027 IN_CODE says what kind of expression we are processing. Normally, it is
42495ca0
RK
6028 SET. In a memory address (inside a MEM, PLUS or minus, the latter two
6029 being kludges), it is MEM. When processing the arguments of a comparison
230d793d
RS
6030 or a COMPARE against zero, it is COMPARE. */
6031
6032static rtx
6033make_compound_operation (x, in_code)
6034 rtx x;
6035 enum rtx_code in_code;
6036{
6037 enum rtx_code code = GET_CODE (x);
6038 enum machine_mode mode = GET_MODE (x);
6039 int mode_width = GET_MODE_BITSIZE (mode);
71923da7 6040 rtx rhs, lhs;
230d793d 6041 enum rtx_code next_code;
f24ad0e4 6042 int i;
230d793d 6043 rtx new = 0;
280f58ba 6044 rtx tem;
6f7d635c 6045 const char *fmt;
230d793d
RS
6046
6047 /* Select the code to be used in recursive calls. Once we are inside an
6048 address, we stay there. If we have a comparison, set to COMPARE,
6049 but once inside, go back to our default of SET. */
6050
42495ca0 6051 next_code = (code == MEM || code == PLUS || code == MINUS ? MEM
230d793d
RS
6052 : ((code == COMPARE || GET_RTX_CLASS (code) == '<')
6053 && XEXP (x, 1) == const0_rtx) ? COMPARE
6054 : in_code == COMPARE ? SET : in_code);
6055
6056 /* Process depending on the code of this operation. If NEW is set
6057 non-zero, it will be returned. */
6058
6059 switch (code)
6060 {
6061 case ASHIFT:
230d793d
RS
6062 /* Convert shifts by constants into multiplications if inside
6063 an address. */
6064 if (in_code == MEM && GET_CODE (XEXP (x, 1)) == CONST_INT
5f4f0e22 6065 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
230d793d 6066 && INTVAL (XEXP (x, 1)) >= 0)
280f58ba
RK
6067 {
6068 new = make_compound_operation (XEXP (x, 0), next_code);
6069 new = gen_rtx_combine (MULT, mode, new,
6070 GEN_INT ((HOST_WIDE_INT) 1
6071 << INTVAL (XEXP (x, 1))));
6072 }
230d793d
RS
6073 break;
6074
6075 case AND:
6076 /* If the second operand is not a constant, we can't do anything
6077 with it. */
6078 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6079 break;
6080
6081 /* If the constant is a power of two minus one and the first operand
6082 is a logical right shift, make an extraction. */
6083 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
6084 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
280f58ba
RK
6085 {
6086 new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
6087 new = make_extraction (mode, new, 0, XEXP (XEXP (x, 0), 1), i, 1,
6088 0, in_code == COMPARE);
6089 }
dfbe1b2f 6090
230d793d
RS
6091 /* Same as previous, but for (subreg (lshiftrt ...)) in first op. */
6092 else if (GET_CODE (XEXP (x, 0)) == SUBREG
6093 && subreg_lowpart_p (XEXP (x, 0))
6094 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == LSHIFTRT
6095 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
280f58ba
RK
6096 {
6097 new = make_compound_operation (XEXP (SUBREG_REG (XEXP (x, 0)), 0),
6098 next_code);
2f99f437 6099 new = make_extraction (GET_MODE (SUBREG_REG (XEXP (x, 0))), new, 0,
280f58ba
RK
6100 XEXP (SUBREG_REG (XEXP (x, 0)), 1), i, 1,
6101 0, in_code == COMPARE);
6102 }
45620ed4 6103 /* Same as previous, but for (xor/ior (lshiftrt...) (lshiftrt...)). */
c2f9f64e
JW
6104 else if ((GET_CODE (XEXP (x, 0)) == XOR
6105 || GET_CODE (XEXP (x, 0)) == IOR)
6106 && GET_CODE (XEXP (XEXP (x, 0), 0)) == LSHIFTRT
6107 && GET_CODE (XEXP (XEXP (x, 0), 1)) == LSHIFTRT
6108 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
6109 {
6110 /* Apply the distributive law, and then try to make extractions. */
6111 new = gen_rtx_combine (GET_CODE (XEXP (x, 0)), mode,
38a448ca
RH
6112 gen_rtx_AND (mode, XEXP (XEXP (x, 0), 0),
6113 XEXP (x, 1)),
6114 gen_rtx_AND (mode, XEXP (XEXP (x, 0), 1),
6115 XEXP (x, 1)));
c2f9f64e
JW
6116 new = make_compound_operation (new, in_code);
6117 }
a7c99304
RK
6118
6119 /* If we are have (and (rotate X C) M) and C is larger than the number
6120 of bits in M, this is an extraction. */
6121
6122 else if (GET_CODE (XEXP (x, 0)) == ROTATE
6123 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6124 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0
6125 && i <= INTVAL (XEXP (XEXP (x, 0), 1)))
280f58ba
RK
6126 {
6127 new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
6128 new = make_extraction (mode, new,
6129 (GET_MODE_BITSIZE (mode)
6130 - INTVAL (XEXP (XEXP (x, 0), 1))),
6131 NULL_RTX, i, 1, 0, in_code == COMPARE);
6132 }
a7c99304
RK
6133
6134 /* On machines without logical shifts, if the operand of the AND is
230d793d
RS
6135 a logical shift and our mask turns off all the propagated sign
6136 bits, we can replace the logical shift with an arithmetic shift. */
d0ab8cd3
RK
6137 else if (ashr_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
6138 && (lshr_optab->handlers[(int) mode].insn_code
6139 == CODE_FOR_nothing)
230d793d
RS
6140 && GET_CODE (XEXP (x, 0)) == LSHIFTRT
6141 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6142 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
5f4f0e22
CH
6143 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
6144 && mode_width <= HOST_BITS_PER_WIDE_INT)
230d793d 6145 {
5f4f0e22 6146 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
230d793d
RS
6147
6148 mask >>= INTVAL (XEXP (XEXP (x, 0), 1));
6149 if ((INTVAL (XEXP (x, 1)) & ~mask) == 0)
6150 SUBST (XEXP (x, 0),
280f58ba
RK
6151 gen_rtx_combine (ASHIFTRT, mode,
6152 make_compound_operation (XEXP (XEXP (x, 0), 0),
6153 next_code),
230d793d
RS
6154 XEXP (XEXP (x, 0), 1)));
6155 }
6156
6157 /* If the constant is one less than a power of two, this might be
6158 representable by an extraction even if no shift is present.
6159 If it doesn't end up being a ZERO_EXTEND, we will ignore it unless
6160 we are in a COMPARE. */
6161 else if ((i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
280f58ba
RK
6162 new = make_extraction (mode,
6163 make_compound_operation (XEXP (x, 0),
6164 next_code),
6165 0, NULL_RTX, i, 1, 0, in_code == COMPARE);
230d793d
RS
6166
6167 /* If we are in a comparison and this is an AND with a power of two,
6168 convert this into the appropriate bit extract. */
6169 else if (in_code == COMPARE
6170 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
280f58ba
RK
6171 new = make_extraction (mode,
6172 make_compound_operation (XEXP (x, 0),
6173 next_code),
6174 i, NULL_RTX, 1, 1, 0, 1);
230d793d
RS
6175
6176 break;
6177
6178 case LSHIFTRT:
6179 /* If the sign bit is known to be zero, replace this with an
6180 arithmetic shift. */
d0ab8cd3
RK
6181 if (ashr_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing
6182 && lshr_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
5f4f0e22 6183 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 6184 && (nonzero_bits (XEXP (x, 0), mode) & (1 << (mode_width - 1))) == 0)
230d793d 6185 {
280f58ba
RK
6186 new = gen_rtx_combine (ASHIFTRT, mode,
6187 make_compound_operation (XEXP (x, 0),
6188 next_code),
6189 XEXP (x, 1));
230d793d
RS
6190 break;
6191 }
6192
0f41302f 6193 /* ... fall through ... */
230d793d
RS
6194
6195 case ASHIFTRT:
71923da7
RK
6196 lhs = XEXP (x, 0);
6197 rhs = XEXP (x, 1);
6198
230d793d
RS
6199 /* If we have (ashiftrt (ashift foo C1) C2) with C2 >= C1,
6200 this is a SIGN_EXTRACT. */
71923da7
RK
6201 if (GET_CODE (rhs) == CONST_INT
6202 && GET_CODE (lhs) == ASHIFT
6203 && GET_CODE (XEXP (lhs, 1)) == CONST_INT
6204 && INTVAL (rhs) >= INTVAL (XEXP (lhs, 1)))
280f58ba 6205 {
71923da7 6206 new = make_compound_operation (XEXP (lhs, 0), next_code);
280f58ba 6207 new = make_extraction (mode, new,
71923da7
RK
6208 INTVAL (rhs) - INTVAL (XEXP (lhs, 1)),
6209 NULL_RTX, mode_width - INTVAL (rhs),
d0ab8cd3
RK
6210 code == LSHIFTRT, 0, in_code == COMPARE);
6211 }
6212
71923da7
RK
6213 /* See if we have operations between an ASHIFTRT and an ASHIFT.
6214 If so, try to merge the shifts into a SIGN_EXTEND. We could
6215 also do this for some cases of SIGN_EXTRACT, but it doesn't
6216 seem worth the effort; the case checked for occurs on Alpha. */
6217
6218 if (GET_RTX_CLASS (GET_CODE (lhs)) != 'o'
6219 && ! (GET_CODE (lhs) == SUBREG
6220 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (lhs))) == 'o'))
6221 && GET_CODE (rhs) == CONST_INT
6222 && INTVAL (rhs) < HOST_BITS_PER_WIDE_INT
6223 && (new = extract_left_shift (lhs, INTVAL (rhs))) != 0)
6224 new = make_extraction (mode, make_compound_operation (new, next_code),
6225 0, NULL_RTX, mode_width - INTVAL (rhs),
6226 code == LSHIFTRT, 0, in_code == COMPARE);
6227
230d793d 6228 break;
280f58ba
RK
6229
6230 case SUBREG:
6231 /* Call ourselves recursively on the inner expression. If we are
6232 narrowing the object and it has a different RTL code from
6233 what it originally did, do this SUBREG as a force_to_mode. */
6234
0a5cbff6 6235 tem = make_compound_operation (SUBREG_REG (x), in_code);
280f58ba
RK
6236 if (GET_CODE (tem) != GET_CODE (SUBREG_REG (x))
6237 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (tem))
6238 && subreg_lowpart_p (x))
0a5cbff6
RK
6239 {
6240 rtx newer = force_to_mode (tem, mode,
e3d616e3 6241 GET_MODE_MASK (mode), NULL_RTX, 0);
0a5cbff6
RK
6242
6243 /* If we have something other than a SUBREG, we might have
6244 done an expansion, so rerun outselves. */
6245 if (GET_CODE (newer) != SUBREG)
6246 newer = make_compound_operation (newer, in_code);
6247
6248 return newer;
6249 }
6f28d3e9
RH
6250
6251 /* If this is a paradoxical subreg, and the new code is a sign or
6252 zero extension, omit the subreg and widen the extension. If it
6253 is a regular subreg, we can still get rid of the subreg by not
6254 widening so much, or in fact removing the extension entirely. */
6255 if ((GET_CODE (tem) == SIGN_EXTEND
6256 || GET_CODE (tem) == ZERO_EXTEND)
6257 && subreg_lowpart_p (x))
6258 {
6259 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (tem))
6260 || (GET_MODE_SIZE (mode) >
6261 GET_MODE_SIZE (GET_MODE (XEXP (tem, 0)))))
6262 tem = gen_rtx_combine (GET_CODE (tem), mode, XEXP (tem, 0));
6263 else
6264 tem = gen_lowpart_for_combine (mode, XEXP (tem, 0));
6265 return tem;
6266 }
e9a25f70
JL
6267 break;
6268
6269 default:
6270 break;
230d793d
RS
6271 }
6272
6273 if (new)
6274 {
df62f951 6275 x = gen_lowpart_for_combine (mode, new);
230d793d
RS
6276 code = GET_CODE (x);
6277 }
6278
6279 /* Now recursively process each operand of this operation. */
6280 fmt = GET_RTX_FORMAT (code);
6281 for (i = 0; i < GET_RTX_LENGTH (code); i++)
6282 if (fmt[i] == 'e')
6283 {
6284 new = make_compound_operation (XEXP (x, i), next_code);
6285 SUBST (XEXP (x, i), new);
6286 }
6287
6288 return x;
6289}
6290\f
6291/* Given M see if it is a value that would select a field of bits
6292 within an item, but not the entire word. Return -1 if not.
6293 Otherwise, return the starting position of the field, where 0 is the
6294 low-order bit.
6295
6296 *PLEN is set to the length of the field. */
6297
6298static int
6299get_pos_from_mask (m, plen)
5f4f0e22 6300 unsigned HOST_WIDE_INT m;
230d793d
RS
6301 int *plen;
6302{
6303 /* Get the bit number of the first 1 bit from the right, -1 if none. */
6304 int pos = exact_log2 (m & - m);
6305
6306 if (pos < 0)
6307 return -1;
6308
6309 /* Now shift off the low-order zero bits and see if we have a power of
6310 two minus 1. */
6311 *plen = exact_log2 ((m >> pos) + 1);
6312
6313 if (*plen <= 0)
6314 return -1;
6315
6316 return pos;
6317}
6318\f
6139ff20
RK
6319/* See if X can be simplified knowing that we will only refer to it in
6320 MODE and will only refer to those bits that are nonzero in MASK.
6321 If other bits are being computed or if masking operations are done
6322 that select a superset of the bits in MASK, they can sometimes be
6323 ignored.
6324
6325 Return a possibly simplified expression, but always convert X to
6326 MODE. If X is a CONST_INT, AND the CONST_INT with MASK.
dfbe1b2f
RK
6327
6328 Also, if REG is non-zero and X is a register equal in value to REG,
e3d616e3
RK
6329 replace X with REG.
6330
6331 If JUST_SELECT is nonzero, don't optimize by noticing that bits in MASK
6332 are all off in X. This is used when X will be complemented, by either
180b8e4b 6333 NOT, NEG, or XOR. */
dfbe1b2f
RK
6334
6335static rtx
e3d616e3 6336force_to_mode (x, mode, mask, reg, just_select)
dfbe1b2f
RK
6337 rtx x;
6338 enum machine_mode mode;
6139ff20 6339 unsigned HOST_WIDE_INT mask;
dfbe1b2f 6340 rtx reg;
e3d616e3 6341 int just_select;
dfbe1b2f
RK
6342{
6343 enum rtx_code code = GET_CODE (x);
180b8e4b 6344 int next_select = just_select || code == XOR || code == NOT || code == NEG;
ef026f91
RS
6345 enum machine_mode op_mode;
6346 unsigned HOST_WIDE_INT fuller_mask, nonzero;
6139ff20
RK
6347 rtx op0, op1, temp;
6348
132d2040
RK
6349 /* If this is a CALL or ASM_OPERANDS, don't do anything. Some of the
6350 code below will do the wrong thing since the mode of such an
be3d27d6
CI
6351 expression is VOIDmode.
6352
6353 Also do nothing if X is a CLOBBER; this can happen if X was
6354 the return value from a call to gen_lowpart_for_combine. */
6355 if (code == CALL || code == ASM_OPERANDS || code == CLOBBER)
246e00f2
RK
6356 return x;
6357
6139ff20
RK
6358 /* We want to perform the operation is its present mode unless we know
6359 that the operation is valid in MODE, in which case we do the operation
6360 in MODE. */
1c75dfa4
RK
6361 op_mode = ((GET_MODE_CLASS (mode) == GET_MODE_CLASS (GET_MODE (x))
6362 && code_to_optab[(int) code] != 0
ef026f91
RS
6363 && (code_to_optab[(int) code]->handlers[(int) mode].insn_code
6364 != CODE_FOR_nothing))
6365 ? mode : GET_MODE (x));
e3d616e3 6366
aa988991
RS
6367 /* It is not valid to do a right-shift in a narrower mode
6368 than the one it came in with. */
6369 if ((code == LSHIFTRT || code == ASHIFTRT)
6370 && GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (GET_MODE (x)))
6371 op_mode = GET_MODE (x);
ef026f91
RS
6372
6373 /* Truncate MASK to fit OP_MODE. */
6374 if (op_mode)
6375 mask &= GET_MODE_MASK (op_mode);
6139ff20
RK
6376
6377 /* When we have an arithmetic operation, or a shift whose count we
6378 do not know, we need to assume that all bit the up to the highest-order
6379 bit in MASK will be needed. This is how we form such a mask. */
ef026f91
RS
6380 if (op_mode)
6381 fuller_mask = (GET_MODE_BITSIZE (op_mode) >= HOST_BITS_PER_WIDE_INT
6382 ? GET_MODE_MASK (op_mode)
6383 : ((HOST_WIDE_INT) 1 << (floor_log2 (mask) + 1)) - 1);
6384 else
6385 fuller_mask = ~ (HOST_WIDE_INT) 0;
6386
6387 /* Determine what bits of X are guaranteed to be (non)zero. */
6388 nonzero = nonzero_bits (x, mode);
6139ff20
RK
6389
6390 /* If none of the bits in X are needed, return a zero. */
e3d616e3 6391 if (! just_select && (nonzero & mask) == 0)
6139ff20 6392 return const0_rtx;
dfbe1b2f 6393
6139ff20
RK
6394 /* If X is a CONST_INT, return a new one. Do this here since the
6395 test below will fail. */
6396 if (GET_CODE (x) == CONST_INT)
ceb7983c
RK
6397 {
6398 HOST_WIDE_INT cval = INTVAL (x) & mask;
6399 int width = GET_MODE_BITSIZE (mode);
6400
6401 /* If MODE is narrower that HOST_WIDE_INT and CVAL is a negative
6402 number, sign extend it. */
6403 if (width > 0 && width < HOST_BITS_PER_WIDE_INT
6404 && (cval & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
6405 cval |= (HOST_WIDE_INT) -1 << width;
6406
6407 return GEN_INT (cval);
6408 }
dfbe1b2f 6409
180b8e4b
RK
6410 /* If X is narrower than MODE and we want all the bits in X's mode, just
6411 get X in the proper mode. */
6412 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode)
6413 && (GET_MODE_MASK (GET_MODE (x)) & ~ mask) == 0)
dfbe1b2f
RK
6414 return gen_lowpart_for_combine (mode, x);
6415
71923da7
RK
6416 /* If we aren't changing the mode, X is not a SUBREG, and all zero bits in
6417 MASK are already known to be zero in X, we need not do anything. */
6418 if (GET_MODE (x) == mode && code != SUBREG && (~ mask & nonzero) == 0)
6139ff20
RK
6419 return x;
6420
dfbe1b2f
RK
6421 switch (code)
6422 {
6139ff20
RK
6423 case CLOBBER:
6424 /* If X is a (clobber (const_int)), return it since we know we are
0f41302f 6425 generating something that won't match. */
6139ff20
RK
6426 return x;
6427
6139ff20
RK
6428 case USE:
6429 /* X is a (use (mem ..)) that was made from a bit-field extraction that
6430 spanned the boundary of the MEM. If we are now masking so it is
6431 within that boundary, we don't need the USE any more. */
f76b9db2
ILT
6432 if (! BITS_BIG_ENDIAN
6433 && (mask & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
e3d616e3 6434 return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
f76b9db2 6435 break;
6139ff20 6436
dfbe1b2f
RK
6437 case SIGN_EXTEND:
6438 case ZERO_EXTEND:
6439 case ZERO_EXTRACT:
6440 case SIGN_EXTRACT:
6441 x = expand_compound_operation (x);
6442 if (GET_CODE (x) != code)
e3d616e3 6443 return force_to_mode (x, mode, mask, reg, next_select);
dfbe1b2f
RK
6444 break;
6445
6446 case REG:
6447 if (reg != 0 && (rtx_equal_p (get_last_value (reg), x)
6448 || rtx_equal_p (reg, get_last_value (x))))
6449 x = reg;
6450 break;
6451
dfbe1b2f 6452 case SUBREG:
6139ff20 6453 if (subreg_lowpart_p (x)
180b8e4b
RK
6454 /* We can ignore the effect of this SUBREG if it narrows the mode or
6455 if the constant masks to zero all the bits the mode doesn't
6456 have. */
6139ff20
RK
6457 && ((GET_MODE_SIZE (GET_MODE (x))
6458 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
6139ff20
RK
6459 || (0 == (mask
6460 & GET_MODE_MASK (GET_MODE (x))
180b8e4b 6461 & ~ GET_MODE_MASK (GET_MODE (SUBREG_REG (x)))))))
e3d616e3 6462 return force_to_mode (SUBREG_REG (x), mode, mask, reg, next_select);
dfbe1b2f
RK
6463 break;
6464
6465 case AND:
6139ff20
RK
6466 /* If this is an AND with a constant, convert it into an AND
6467 whose constant is the AND of that constant with MASK. If it
6468 remains an AND of MASK, delete it since it is redundant. */
dfbe1b2f 6469
2ca9ae17 6470 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
dfbe1b2f 6471 {
6139ff20
RK
6472 x = simplify_and_const_int (x, op_mode, XEXP (x, 0),
6473 mask & INTVAL (XEXP (x, 1)));
dfbe1b2f
RK
6474
6475 /* If X is still an AND, see if it is an AND with a mask that
71923da7
RK
6476 is just some low-order bits. If so, and it is MASK, we don't
6477 need it. */
dfbe1b2f
RK
6478
6479 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
e51712db 6480 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (x, 1)) == mask)
dfbe1b2f 6481 x = XEXP (x, 0);
d0ab8cd3 6482
71923da7
RK
6483 /* If it remains an AND, try making another AND with the bits
6484 in the mode mask that aren't in MASK turned on. If the
6485 constant in the AND is wide enough, this might make a
6486 cheaper constant. */
6487
6488 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
2ca9ae17
JW
6489 && GET_MODE_MASK (GET_MODE (x)) != mask
6490 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
71923da7
RK
6491 {
6492 HOST_WIDE_INT cval = (INTVAL (XEXP (x, 1))
6493 | (GET_MODE_MASK (GET_MODE (x)) & ~ mask));
6494 int width = GET_MODE_BITSIZE (GET_MODE (x));
6495 rtx y;
6496
6497 /* If MODE is narrower that HOST_WIDE_INT and CVAL is a negative
6498 number, sign extend it. */
6499 if (width > 0 && width < HOST_BITS_PER_WIDE_INT
6500 && (cval & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
6501 cval |= (HOST_WIDE_INT) -1 << width;
6502
6503 y = gen_binary (AND, GET_MODE (x), XEXP (x, 0), GEN_INT (cval));
6504 if (rtx_cost (y, SET) < rtx_cost (x, SET))
6505 x = y;
6506 }
6507
d0ab8cd3 6508 break;
dfbe1b2f
RK
6509 }
6510
6139ff20 6511 goto binop;
dfbe1b2f
RK
6512
6513 case PLUS:
6139ff20
RK
6514 /* In (and (plus FOO C1) M), if M is a mask that just turns off
6515 low-order bits (as in an alignment operation) and FOO is already
6516 aligned to that boundary, mask C1 to that boundary as well.
6517 This may eliminate that PLUS and, later, the AND. */
9fa6d012
TG
6518
6519 {
6520 int width = GET_MODE_BITSIZE (mode);
6521 unsigned HOST_WIDE_INT smask = mask;
6522
6523 /* If MODE is narrower than HOST_WIDE_INT and mask is a negative
6524 number, sign extend it. */
6525
6526 if (width < HOST_BITS_PER_WIDE_INT
6527 && (smask & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
6528 smask |= (HOST_WIDE_INT) -1 << width;
6529
6530 if (GET_CODE (XEXP (x, 1)) == CONST_INT
0e9ff885
DM
6531 && exact_log2 (- smask) >= 0)
6532 {
6533#ifdef STACK_BIAS
6534 if (STACK_BIAS
6535 && (XEXP (x, 0) == stack_pointer_rtx
6536 || XEXP (x, 0) == frame_pointer_rtx))
6537 {
6538 int sp_alignment = STACK_BOUNDARY / BITS_PER_UNIT;
6539 unsigned HOST_WIDE_INT sp_mask = GET_MODE_MASK (mode);
6540
6541 sp_mask &= ~ (sp_alignment - 1);
835c8e04
DT
6542 if ((sp_mask & ~ smask) == 0
6543 && ((INTVAL (XEXP (x, 1)) - STACK_BIAS) & ~ smask) != 0)
0e9ff885
DM
6544 return force_to_mode (plus_constant (XEXP (x, 0),
6545 ((INTVAL (XEXP (x, 1)) -
835c8e04 6546 STACK_BIAS) & smask)
0e9ff885 6547 + STACK_BIAS),
835c8e04 6548 mode, smask, reg, next_select);
0e9ff885
DM
6549 }
6550#endif
835c8e04
DT
6551 if ((nonzero_bits (XEXP (x, 0), mode) & ~ smask) == 0
6552 && (INTVAL (XEXP (x, 1)) & ~ smask) != 0)
0e9ff885 6553 return force_to_mode (plus_constant (XEXP (x, 0),
835c8e04
DT
6554 (INTVAL (XEXP (x, 1))
6555 & smask)),
6556 mode, smask, reg, next_select);
0e9ff885 6557 }
9fa6d012 6558 }
6139ff20 6559
0f41302f 6560 /* ... fall through ... */
6139ff20 6561
dfbe1b2f
RK
6562 case MINUS:
6563 case MULT:
6139ff20
RK
6564 /* For PLUS, MINUS and MULT, we need any bits less significant than the
6565 most significant bit in MASK since carries from those bits will
6566 affect the bits we are interested in. */
6567 mask = fuller_mask;
6568 goto binop;
6569
dfbe1b2f
RK
6570 case IOR:
6571 case XOR:
6139ff20
RK
6572 /* If X is (ior (lshiftrt FOO C1) C2), try to commute the IOR and
6573 LSHIFTRT so we end up with an (and (lshiftrt (ior ...) ...) ...)
6574 operation which may be a bitfield extraction. Ensure that the
6575 constant we form is not wider than the mode of X. */
6576
6577 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
6578 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6579 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
6580 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
6581 && GET_CODE (XEXP (x, 1)) == CONST_INT
6582 && ((INTVAL (XEXP (XEXP (x, 0), 1))
6583 + floor_log2 (INTVAL (XEXP (x, 1))))
6584 < GET_MODE_BITSIZE (GET_MODE (x)))
6585 && (INTVAL (XEXP (x, 1))
01c82bbb 6586 & ~ nonzero_bits (XEXP (x, 0), GET_MODE (x))) == 0)
6139ff20
RK
6587 {
6588 temp = GEN_INT ((INTVAL (XEXP (x, 1)) & mask)
6589 << INTVAL (XEXP (XEXP (x, 0), 1)));
6590 temp = gen_binary (GET_CODE (x), GET_MODE (x),
6591 XEXP (XEXP (x, 0), 0), temp);
d4d2b13f
RK
6592 x = gen_binary (LSHIFTRT, GET_MODE (x), temp,
6593 XEXP (XEXP (x, 0), 1));
e3d616e3 6594 return force_to_mode (x, mode, mask, reg, next_select);
6139ff20
RK
6595 }
6596
6597 binop:
dfbe1b2f 6598 /* For most binary operations, just propagate into the operation and
6139ff20
RK
6599 change the mode if we have an operation of that mode. */
6600
e3d616e3
RK
6601 op0 = gen_lowpart_for_combine (op_mode,
6602 force_to_mode (XEXP (x, 0), mode, mask,
6603 reg, next_select));
6604 op1 = gen_lowpart_for_combine (op_mode,
6605 force_to_mode (XEXP (x, 1), mode, mask,
6606 reg, next_select));
6139ff20 6607
2dd484ed
RK
6608 /* If OP1 is a CONST_INT and X is an IOR or XOR, clear bits outside
6609 MASK since OP1 might have been sign-extended but we never want
6610 to turn on extra bits, since combine might have previously relied
6611 on them being off. */
6612 if (GET_CODE (op1) == CONST_INT && (code == IOR || code == XOR)
6613 && (INTVAL (op1) & mask) != 0)
6614 op1 = GEN_INT (INTVAL (op1) & mask);
6615
6139ff20
RK
6616 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
6617 x = gen_binary (code, op_mode, op0, op1);
d0ab8cd3 6618 break;
dfbe1b2f
RK
6619
6620 case ASHIFT:
dfbe1b2f 6621 /* For left shifts, do the same, but just for the first operand.
f6785026
RK
6622 However, we cannot do anything with shifts where we cannot
6623 guarantee that the counts are smaller than the size of the mode
6624 because such a count will have a different meaning in a
6139ff20 6625 wider mode. */
f6785026
RK
6626
6627 if (! (GET_CODE (XEXP (x, 1)) == CONST_INT
6139ff20 6628 && INTVAL (XEXP (x, 1)) >= 0
f6785026
RK
6629 && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (mode))
6630 && ! (GET_MODE (XEXP (x, 1)) != VOIDmode
6631 && (nonzero_bits (XEXP (x, 1), GET_MODE (XEXP (x, 1)))
adb7a1cb 6632 < (unsigned HOST_WIDE_INT) GET_MODE_BITSIZE (mode))))
f6785026
RK
6633 break;
6634
6139ff20
RK
6635 /* If the shift count is a constant and we can do arithmetic in
6636 the mode of the shift, refine which bits we need. Otherwise, use the
6637 conservative form of the mask. */
6638 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6639 && INTVAL (XEXP (x, 1)) >= 0
6640 && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (op_mode)
6641 && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
6642 mask >>= INTVAL (XEXP (x, 1));
6643 else
6644 mask = fuller_mask;
6645
6646 op0 = gen_lowpart_for_combine (op_mode,
6647 force_to_mode (XEXP (x, 0), op_mode,
e3d616e3 6648 mask, reg, next_select));
6139ff20
RK
6649
6650 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
6651 x = gen_binary (code, op_mode, op0, XEXP (x, 1));
d0ab8cd3 6652 break;
dfbe1b2f
RK
6653
6654 case LSHIFTRT:
1347292b
JW
6655 /* Here we can only do something if the shift count is a constant,
6656 this shift constant is valid for the host, and we can do arithmetic
6657 in OP_MODE. */
dfbe1b2f
RK
6658
6659 if (GET_CODE (XEXP (x, 1)) == CONST_INT
1347292b 6660 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
6139ff20 6661 && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
d0ab8cd3 6662 {
6139ff20
RK
6663 rtx inner = XEXP (x, 0);
6664
6665 /* Select the mask of the bits we need for the shift operand. */
6666 mask <<= INTVAL (XEXP (x, 1));
d0ab8cd3 6667
6139ff20
RK
6668 /* We can only change the mode of the shift if we can do arithmetic
6669 in the mode of the shift and MASK is no wider than the width of
6670 OP_MODE. */
6671 if (GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT
6672 || (mask & ~ GET_MODE_MASK (op_mode)) != 0)
d0ab8cd3
RK
6673 op_mode = GET_MODE (x);
6674
e3d616e3 6675 inner = force_to_mode (inner, op_mode, mask, reg, next_select);
6139ff20
RK
6676
6677 if (GET_MODE (x) != op_mode || inner != XEXP (x, 0))
6678 x = gen_binary (LSHIFTRT, op_mode, inner, XEXP (x, 1));
d0ab8cd3 6679 }
6139ff20
RK
6680
6681 /* If we have (and (lshiftrt FOO C1) C2) where the combination of the
6682 shift and AND produces only copies of the sign bit (C2 is one less
6683 than a power of two), we can do this with just a shift. */
6684
6685 if (GET_CODE (x) == LSHIFTRT
6686 && GET_CODE (XEXP (x, 1)) == CONST_INT
6687 && ((INTVAL (XEXP (x, 1))
6688 + num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0))))
6689 >= GET_MODE_BITSIZE (GET_MODE (x)))
6690 && exact_log2 (mask + 1) >= 0
6691 && (num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
6692 >= exact_log2 (mask + 1)))
6693 x = gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0),
6694 GEN_INT (GET_MODE_BITSIZE (GET_MODE (x))
6695 - exact_log2 (mask + 1)));
fae2db47
JW
6696
6697 goto shiftrt;
d0ab8cd3
RK
6698
6699 case ASHIFTRT:
6139ff20
RK
6700 /* If we are just looking for the sign bit, we don't need this shift at
6701 all, even if it has a variable count. */
9bf22b75 6702 if (GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
e51712db 6703 && (mask == ((unsigned HOST_WIDE_INT) 1
9bf22b75 6704 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
e3d616e3 6705 return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
6139ff20
RK
6706
6707 /* If this is a shift by a constant, get a mask that contains those bits
6708 that are not copies of the sign bit. We then have two cases: If
6709 MASK only includes those bits, this can be a logical shift, which may
6710 allow simplifications. If MASK is a single-bit field not within
6711 those bits, we are requesting a copy of the sign bit and hence can
6712 shift the sign bit to the appropriate location. */
6713
6714 if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) >= 0
6715 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
6716 {
6717 int i = -1;
6718
b69960ac
RK
6719 /* If the considered data is wider then HOST_WIDE_INT, we can't
6720 represent a mask for all its bits in a single scalar.
6721 But we only care about the lower bits, so calculate these. */
6722
6a11342f 6723 if (GET_MODE_BITSIZE (GET_MODE (x)) > HOST_BITS_PER_WIDE_INT)
b69960ac 6724 {
0f41302f 6725 nonzero = ~ (HOST_WIDE_INT) 0;
b69960ac
RK
6726
6727 /* GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1))
6728 is the number of bits a full-width mask would have set.
6729 We need only shift if these are fewer than nonzero can
6730 hold. If not, we must keep all bits set in nonzero. */
6731
6732 if (GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1))
6733 < HOST_BITS_PER_WIDE_INT)
6734 nonzero >>= INTVAL (XEXP (x, 1))
6735 + HOST_BITS_PER_WIDE_INT
6736 - GET_MODE_BITSIZE (GET_MODE (x)) ;
6737 }
6738 else
6739 {
6740 nonzero = GET_MODE_MASK (GET_MODE (x));
6741 nonzero >>= INTVAL (XEXP (x, 1));
6742 }
6139ff20
RK
6743
6744 if ((mask & ~ nonzero) == 0
6745 || (i = exact_log2 (mask)) >= 0)
6746 {
6747 x = simplify_shift_const
6748 (x, LSHIFTRT, GET_MODE (x), XEXP (x, 0),
6749 i < 0 ? INTVAL (XEXP (x, 1))
6750 : GET_MODE_BITSIZE (GET_MODE (x)) - 1 - i);
6751
6752 if (GET_CODE (x) != ASHIFTRT)
e3d616e3 6753 return force_to_mode (x, mode, mask, reg, next_select);
6139ff20
RK
6754 }
6755 }
6756
6757 /* If MASK is 1, convert this to a LSHIFTRT. This can be done
6758 even if the shift count isn't a constant. */
6759 if (mask == 1)
6760 x = gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0), XEXP (x, 1));
6761
fae2db47
JW
6762 shiftrt:
6763
6764 /* If this is a zero- or sign-extension operation that just affects bits
4c002f29
RK
6765 we don't care about, remove it. Be sure the call above returned
6766 something that is still a shift. */
d0ab8cd3 6767
4c002f29
RK
6768 if ((GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ASHIFTRT)
6769 && GET_CODE (XEXP (x, 1)) == CONST_INT
d0ab8cd3 6770 && INTVAL (XEXP (x, 1)) >= 0
6139ff20
RK
6771 && (INTVAL (XEXP (x, 1))
6772 <= GET_MODE_BITSIZE (GET_MODE (x)) - (floor_log2 (mask) + 1))
d0ab8cd3
RK
6773 && GET_CODE (XEXP (x, 0)) == ASHIFT
6774 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6775 && INTVAL (XEXP (XEXP (x, 0), 1)) == INTVAL (XEXP (x, 1)))
e3d616e3
RK
6776 return force_to_mode (XEXP (XEXP (x, 0), 0), mode, mask,
6777 reg, next_select);
6139ff20 6778
dfbe1b2f
RK
6779 break;
6780
6139ff20
RK
6781 case ROTATE:
6782 case ROTATERT:
6783 /* If the shift count is constant and we can do computations
6784 in the mode of X, compute where the bits we care about are.
6785 Otherwise, we can't do anything. Don't change the mode of
6786 the shift or propagate MODE into the shift, though. */
6787 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6788 && INTVAL (XEXP (x, 1)) >= 0)
6789 {
6790 temp = simplify_binary_operation (code == ROTATE ? ROTATERT : ROTATE,
6791 GET_MODE (x), GEN_INT (mask),
6792 XEXP (x, 1));
7d171a1e 6793 if (temp && GET_CODE(temp) == CONST_INT)
6139ff20
RK
6794 SUBST (XEXP (x, 0),
6795 force_to_mode (XEXP (x, 0), GET_MODE (x),
e3d616e3 6796 INTVAL (temp), reg, next_select));
6139ff20
RK
6797 }
6798 break;
6799
dfbe1b2f 6800 case NEG:
180b8e4b
RK
6801 /* If we just want the low-order bit, the NEG isn't needed since it
6802 won't change the low-order bit. */
6803 if (mask == 1)
6804 return force_to_mode (XEXP (x, 0), mode, mask, reg, just_select);
6805
6139ff20
RK
6806 /* We need any bits less significant than the most significant bit in
6807 MASK since carries from those bits will affect the bits we are
6808 interested in. */
6809 mask = fuller_mask;
6810 goto unop;
6811
dfbe1b2f 6812 case NOT:
6139ff20
RK
6813 /* (not FOO) is (xor FOO CONST), so if FOO is an LSHIFTRT, we can do the
6814 same as the XOR case above. Ensure that the constant we form is not
6815 wider than the mode of X. */
6816
6817 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
6818 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6819 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
6820 && (INTVAL (XEXP (XEXP (x, 0), 1)) + floor_log2 (mask)
6821 < GET_MODE_BITSIZE (GET_MODE (x)))
6822 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT)
6823 {
6824 temp = GEN_INT (mask << INTVAL (XEXP (XEXP (x, 0), 1)));
6825 temp = gen_binary (XOR, GET_MODE (x), XEXP (XEXP (x, 0), 0), temp);
6826 x = gen_binary (LSHIFTRT, GET_MODE (x), temp, XEXP (XEXP (x, 0), 1));
6827
e3d616e3 6828 return force_to_mode (x, mode, mask, reg, next_select);
6139ff20
RK
6829 }
6830
f82da7d2
JW
6831 /* (and (not FOO) CONST) is (not (or FOO (not CONST))), so we must
6832 use the full mask inside the NOT. */
6833 mask = fuller_mask;
6834
6139ff20 6835 unop:
e3d616e3
RK
6836 op0 = gen_lowpart_for_combine (op_mode,
6837 force_to_mode (XEXP (x, 0), mode, mask,
6838 reg, next_select));
6139ff20 6839 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
0c1c8ea6 6840 x = gen_unary (code, op_mode, op_mode, op0);
6139ff20
RK
6841 break;
6842
6843 case NE:
6844 /* (and (ne FOO 0) CONST) can be (and FOO CONST) if CONST is included
3aceff0d 6845 in STORE_FLAG_VALUE and FOO has a single bit that might be nonzero,
1a6ec070 6846 which is equal to STORE_FLAG_VALUE. */
3aceff0d
RK
6847 if ((mask & ~ STORE_FLAG_VALUE) == 0 && XEXP (x, 1) == const0_rtx
6848 && exact_log2 (nonzero_bits (XEXP (x, 0), mode)) >= 0
1a6ec070 6849 && nonzero_bits (XEXP (x, 0), mode) == STORE_FLAG_VALUE)
e3d616e3 6850 return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
6139ff20 6851
d0ab8cd3
RK
6852 break;
6853
6854 case IF_THEN_ELSE:
6855 /* We have no way of knowing if the IF_THEN_ELSE can itself be
6856 written in a narrower mode. We play it safe and do not do so. */
6857
6858 SUBST (XEXP (x, 1),
6859 gen_lowpart_for_combine (GET_MODE (x),
6860 force_to_mode (XEXP (x, 1), mode,
e3d616e3 6861 mask, reg, next_select)));
d0ab8cd3
RK
6862 SUBST (XEXP (x, 2),
6863 gen_lowpart_for_combine (GET_MODE (x),
6864 force_to_mode (XEXP (x, 2), mode,
e3d616e3 6865 mask, reg,next_select)));
d0ab8cd3 6866 break;
e9a25f70
JL
6867
6868 default:
6869 break;
dfbe1b2f
RK
6870 }
6871
d0ab8cd3 6872 /* Ensure we return a value of the proper mode. */
dfbe1b2f
RK
6873 return gen_lowpart_for_combine (mode, x);
6874}
6875\f
abe6e52f
RK
6876/* Return nonzero if X is an expression that has one of two values depending on
6877 whether some other value is zero or nonzero. In that case, we return the
6878 value that is being tested, *PTRUE is set to the value if the rtx being
6879 returned has a nonzero value, and *PFALSE is set to the other alternative.
6880
6881 If we return zero, we set *PTRUE and *PFALSE to X. */
6882
6883static rtx
6884if_then_else_cond (x, ptrue, pfalse)
6885 rtx x;
6886 rtx *ptrue, *pfalse;
6887{
6888 enum machine_mode mode = GET_MODE (x);
6889 enum rtx_code code = GET_CODE (x);
6890 int size = GET_MODE_BITSIZE (mode);
6891 rtx cond0, cond1, true0, true1, false0, false1;
6892 unsigned HOST_WIDE_INT nz;
6893
6894 /* If this is a unary operation whose operand has one of two values, apply
6895 our opcode to compute those values. */
6896 if (GET_RTX_CLASS (code) == '1'
6897 && (cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0)) != 0)
6898 {
0c1c8ea6
RK
6899 *ptrue = gen_unary (code, mode, GET_MODE (XEXP (x, 0)), true0);
6900 *pfalse = gen_unary (code, mode, GET_MODE (XEXP (x, 0)), false0);
abe6e52f
RK
6901 return cond0;
6902 }
6903
3a19aabc 6904 /* If this is a COMPARE, do nothing, since the IF_THEN_ELSE we would
ddd5a7c1 6905 make can't possibly match and would suppress other optimizations. */
3a19aabc
RK
6906 else if (code == COMPARE)
6907 ;
6908
abe6e52f
RK
6909 /* If this is a binary operation, see if either side has only one of two
6910 values. If either one does or if both do and they are conditional on
6911 the same value, compute the new true and false values. */
6912 else if (GET_RTX_CLASS (code) == 'c' || GET_RTX_CLASS (code) == '2'
6913 || GET_RTX_CLASS (code) == '<')
6914 {
6915 cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0);
6916 cond1 = if_then_else_cond (XEXP (x, 1), &true1, &false1);
6917
6918 if ((cond0 != 0 || cond1 != 0)
6919 && ! (cond0 != 0 && cond1 != 0 && ! rtx_equal_p (cond0, cond1)))
6920 {
987e845a
JW
6921 /* If if_then_else_cond returned zero, then true/false are the
6922 same rtl. We must copy one of them to prevent invalid rtl
6923 sharing. */
6924 if (cond0 == 0)
6925 true0 = copy_rtx (true0);
6926 else if (cond1 == 0)
6927 true1 = copy_rtx (true1);
6928
abe6e52f
RK
6929 *ptrue = gen_binary (code, mode, true0, true1);
6930 *pfalse = gen_binary (code, mode, false0, false1);
6931 return cond0 ? cond0 : cond1;
6932 }
9210df58 6933
9210df58 6934 /* See if we have PLUS, IOR, XOR, MINUS or UMAX, where one of the
0802d516
RK
6935 operands is zero when the other is non-zero, and vice-versa,
6936 and STORE_FLAG_VALUE is 1 or -1. */
9210df58 6937
0802d516
RK
6938 if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
6939 && (code == PLUS || code == IOR || code == XOR || code == MINUS
9210df58
RK
6940 || code == UMAX)
6941 && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
6942 {
6943 rtx op0 = XEXP (XEXP (x, 0), 1);
6944 rtx op1 = XEXP (XEXP (x, 1), 1);
6945
6946 cond0 = XEXP (XEXP (x, 0), 0);
6947 cond1 = XEXP (XEXP (x, 1), 0);
6948
6949 if (GET_RTX_CLASS (GET_CODE (cond0)) == '<'
6950 && GET_RTX_CLASS (GET_CODE (cond1)) == '<'
6951 && reversible_comparison_p (cond1)
6952 && ((GET_CODE (cond0) == reverse_condition (GET_CODE (cond1))
6953 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
6954 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
6955 || ((swap_condition (GET_CODE (cond0))
6956 == reverse_condition (GET_CODE (cond1)))
6957 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
6958 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
6959 && ! side_effects_p (x))
6960 {
6961 *ptrue = gen_binary (MULT, mode, op0, const_true_rtx);
6962 *pfalse = gen_binary (MULT, mode,
6963 (code == MINUS
0c1c8ea6 6964 ? gen_unary (NEG, mode, mode, op1) : op1),
9210df58
RK
6965 const_true_rtx);
6966 return cond0;
6967 }
6968 }
6969
6970 /* Similarly for MULT, AND and UMIN, execpt that for these the result
6971 is always zero. */
0802d516
RK
6972 if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
6973 && (code == MULT || code == AND || code == UMIN)
9210df58
RK
6974 && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
6975 {
6976 cond0 = XEXP (XEXP (x, 0), 0);
6977 cond1 = XEXP (XEXP (x, 1), 0);
6978
6979 if (GET_RTX_CLASS (GET_CODE (cond0)) == '<'
6980 && GET_RTX_CLASS (GET_CODE (cond1)) == '<'
6981 && reversible_comparison_p (cond1)
6982 && ((GET_CODE (cond0) == reverse_condition (GET_CODE (cond1))
6983 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
6984 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
6985 || ((swap_condition (GET_CODE (cond0))
6986 == reverse_condition (GET_CODE (cond1)))
6987 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
6988 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
6989 && ! side_effects_p (x))
6990 {
6991 *ptrue = *pfalse = const0_rtx;
6992 return cond0;
6993 }
6994 }
abe6e52f
RK
6995 }
6996
6997 else if (code == IF_THEN_ELSE)
6998 {
6999 /* If we have IF_THEN_ELSE already, extract the condition and
7000 canonicalize it if it is NE or EQ. */
7001 cond0 = XEXP (x, 0);
7002 *ptrue = XEXP (x, 1), *pfalse = XEXP (x, 2);
7003 if (GET_CODE (cond0) == NE && XEXP (cond0, 1) == const0_rtx)
7004 return XEXP (cond0, 0);
7005 else if (GET_CODE (cond0) == EQ && XEXP (cond0, 1) == const0_rtx)
7006 {
7007 *ptrue = XEXP (x, 2), *pfalse = XEXP (x, 1);
7008 return XEXP (cond0, 0);
7009 }
7010 else
7011 return cond0;
7012 }
7013
7014 /* If X is a normal SUBREG with both inner and outer modes integral,
7015 we can narrow both the true and false values of the inner expression,
7016 if there is a condition. */
7017 else if (code == SUBREG && GET_MODE_CLASS (mode) == MODE_INT
7018 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
7019 && GET_MODE_SIZE (mode) <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
7020 && 0 != (cond0 = if_then_else_cond (SUBREG_REG (x),
7021 &true0, &false0)))
7022 {
00244e6b
RK
7023 *ptrue = force_to_mode (true0, mode, GET_MODE_MASK (mode), NULL_RTX, 0);
7024 *pfalse
7025 = force_to_mode (false0, mode, GET_MODE_MASK (mode), NULL_RTX, 0);
abe6e52f 7026
abe6e52f
RK
7027 return cond0;
7028 }
7029
7030 /* If X is a constant, this isn't special and will cause confusions
7031 if we treat it as such. Likewise if it is equivalent to a constant. */
7032 else if (CONSTANT_P (x)
7033 || ((cond0 = get_last_value (x)) != 0 && CONSTANT_P (cond0)))
7034 ;
7035
7036 /* If X is known to be either 0 or -1, those are the true and
7037 false values when testing X. */
7038 else if (num_sign_bit_copies (x, mode) == size)
7039 {
7040 *ptrue = constm1_rtx, *pfalse = const0_rtx;
7041 return x;
7042 }
7043
7044 /* Likewise for 0 or a single bit. */
7045 else if (exact_log2 (nz = nonzero_bits (x, mode)) >= 0)
7046 {
7047 *ptrue = GEN_INT (nz), *pfalse = const0_rtx;
7048 return x;
7049 }
7050
7051 /* Otherwise fail; show no condition with true and false values the same. */
7052 *ptrue = *pfalse = x;
7053 return 0;
7054}
7055\f
1a26b032
RK
7056/* Return the value of expression X given the fact that condition COND
7057 is known to be true when applied to REG as its first operand and VAL
7058 as its second. X is known to not be shared and so can be modified in
7059 place.
7060
7061 We only handle the simplest cases, and specifically those cases that
7062 arise with IF_THEN_ELSE expressions. */
7063
7064static rtx
7065known_cond (x, cond, reg, val)
7066 rtx x;
7067 enum rtx_code cond;
7068 rtx reg, val;
7069{
7070 enum rtx_code code = GET_CODE (x);
f24ad0e4 7071 rtx temp;
6f7d635c 7072 const char *fmt;
1a26b032
RK
7073 int i, j;
7074
7075 if (side_effects_p (x))
7076 return x;
7077
7078 if (cond == EQ && rtx_equal_p (x, reg))
7079 return val;
7080
7081 /* If X is (abs REG) and we know something about REG's relationship
7082 with zero, we may be able to simplify this. */
7083
7084 if (code == ABS && rtx_equal_p (XEXP (x, 0), reg) && val == const0_rtx)
7085 switch (cond)
7086 {
7087 case GE: case GT: case EQ:
7088 return XEXP (x, 0);
7089 case LT: case LE:
0c1c8ea6
RK
7090 return gen_unary (NEG, GET_MODE (XEXP (x, 0)), GET_MODE (XEXP (x, 0)),
7091 XEXP (x, 0));
e9a25f70
JL
7092 default:
7093 break;
1a26b032
RK
7094 }
7095
7096 /* The only other cases we handle are MIN, MAX, and comparisons if the
7097 operands are the same as REG and VAL. */
7098
7099 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
7100 {
7101 if (rtx_equal_p (XEXP (x, 0), val))
7102 cond = swap_condition (cond), temp = val, val = reg, reg = temp;
7103
7104 if (rtx_equal_p (XEXP (x, 0), reg) && rtx_equal_p (XEXP (x, 1), val))
7105 {
7106 if (GET_RTX_CLASS (code) == '<')
7107 return (comparison_dominates_p (cond, code) ? const_true_rtx
7108 : (comparison_dominates_p (cond,
7109 reverse_condition (code))
7110 ? const0_rtx : x));
7111
7112 else if (code == SMAX || code == SMIN
7113 || code == UMIN || code == UMAX)
7114 {
7115 int unsignedp = (code == UMIN || code == UMAX);
7116
7117 if (code == SMAX || code == UMAX)
7118 cond = reverse_condition (cond);
7119
7120 switch (cond)
7121 {
7122 case GE: case GT:
7123 return unsignedp ? x : XEXP (x, 1);
7124 case LE: case LT:
7125 return unsignedp ? x : XEXP (x, 0);
7126 case GEU: case GTU:
7127 return unsignedp ? XEXP (x, 1) : x;
7128 case LEU: case LTU:
7129 return unsignedp ? XEXP (x, 0) : x;
e9a25f70
JL
7130 default:
7131 break;
1a26b032
RK
7132 }
7133 }
7134 }
7135 }
7136
7137 fmt = GET_RTX_FORMAT (code);
7138 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7139 {
7140 if (fmt[i] == 'e')
7141 SUBST (XEXP (x, i), known_cond (XEXP (x, i), cond, reg, val));
7142 else if (fmt[i] == 'E')
7143 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7144 SUBST (XVECEXP (x, i, j), known_cond (XVECEXP (x, i, j),
7145 cond, reg, val));
7146 }
7147
7148 return x;
7149}
7150\f
e11fa86f
RK
7151/* See if X and Y are equal for the purposes of seeing if we can rewrite an
7152 assignment as a field assignment. */
7153
7154static int
7155rtx_equal_for_field_assignment_p (x, y)
7156 rtx x;
7157 rtx y;
7158{
e11fa86f
RK
7159 if (x == y || rtx_equal_p (x, y))
7160 return 1;
7161
7162 if (x == 0 || y == 0 || GET_MODE (x) != GET_MODE (y))
7163 return 0;
7164
7165 /* Check for a paradoxical SUBREG of a MEM compared with the MEM.
7166 Note that all SUBREGs of MEM are paradoxical; otherwise they
7167 would have been rewritten. */
7168 if (GET_CODE (x) == MEM && GET_CODE (y) == SUBREG
7169 && GET_CODE (SUBREG_REG (y)) == MEM
7170 && rtx_equal_p (SUBREG_REG (y),
7171 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (y)), x)))
7172 return 1;
7173
7174 if (GET_CODE (y) == MEM && GET_CODE (x) == SUBREG
7175 && GET_CODE (SUBREG_REG (x)) == MEM
7176 && rtx_equal_p (SUBREG_REG (x),
7177 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (x)), y)))
7178 return 1;
7179
9ec36da5
JL
7180 /* We used to see if get_last_value of X and Y were the same but that's
7181 not correct. In one direction, we'll cause the assignment to have
7182 the wrong destination and in the case, we'll import a register into this
7183 insn that might have already have been dead. So fail if none of the
7184 above cases are true. */
7185 return 0;
e11fa86f
RK
7186}
7187\f
230d793d
RS
7188/* See if X, a SET operation, can be rewritten as a bit-field assignment.
7189 Return that assignment if so.
7190
7191 We only handle the most common cases. */
7192
7193static rtx
7194make_field_assignment (x)
7195 rtx x;
7196{
7197 rtx dest = SET_DEST (x);
7198 rtx src = SET_SRC (x);
dfbe1b2f 7199 rtx assign;
e11fa86f 7200 rtx rhs, lhs;
5f4f0e22
CH
7201 HOST_WIDE_INT c1;
7202 int pos, len;
dfbe1b2f
RK
7203 rtx other;
7204 enum machine_mode mode;
230d793d
RS
7205
7206 /* If SRC was (and (not (ashift (const_int 1) POS)) DEST), this is
7207 a clear of a one-bit field. We will have changed it to
7208 (and (rotate (const_int -2) POS) DEST), so check for that. Also check
7209 for a SUBREG. */
7210
7211 if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == ROTATE
7212 && GET_CODE (XEXP (XEXP (src, 0), 0)) == CONST_INT
7213 && INTVAL (XEXP (XEXP (src, 0), 0)) == -2
e11fa86f 7214 && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
230d793d 7215 {
8999a12e 7216 assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
230d793d 7217 1, 1, 1, 0);
76184def 7218 if (assign != 0)
38a448ca 7219 return gen_rtx_SET (VOIDmode, assign, const0_rtx);
76184def 7220 return x;
230d793d
RS
7221 }
7222
7223 else if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == SUBREG
7224 && subreg_lowpart_p (XEXP (src, 0))
7225 && (GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
7226 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (src, 0)))))
7227 && GET_CODE (SUBREG_REG (XEXP (src, 0))) == ROTATE
7228 && INTVAL (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == -2
e11fa86f 7229 && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
230d793d 7230 {
8999a12e 7231 assign = make_extraction (VOIDmode, dest, 0,
230d793d
RS
7232 XEXP (SUBREG_REG (XEXP (src, 0)), 1),
7233 1, 1, 1, 0);
76184def 7234 if (assign != 0)
38a448ca 7235 return gen_rtx_SET (VOIDmode, assign, const0_rtx);
76184def 7236 return x;
230d793d
RS
7237 }
7238
9dd11dcb 7239 /* If SRC is (ior (ashift (const_int 1) POS) DEST), this is a set of a
230d793d
RS
7240 one-bit field. */
7241 else if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == ASHIFT
7242 && XEXP (XEXP (src, 0), 0) == const1_rtx
e11fa86f 7243 && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
230d793d 7244 {
8999a12e 7245 assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
230d793d 7246 1, 1, 1, 0);
76184def 7247 if (assign != 0)
38a448ca 7248 return gen_rtx_SET (VOIDmode, assign, const1_rtx);
76184def 7249 return x;
230d793d
RS
7250 }
7251
dfbe1b2f 7252 /* The other case we handle is assignments into a constant-position
9dd11dcb 7253 field. They look like (ior/xor (and DEST C1) OTHER). If C1 represents
dfbe1b2f
RK
7254 a mask that has all one bits except for a group of zero bits and
7255 OTHER is known to have zeros where C1 has ones, this is such an
7256 assignment. Compute the position and length from C1. Shift OTHER
7257 to the appropriate position, force it to the required mode, and
7258 make the extraction. Check for the AND in both operands. */
7259
9dd11dcb 7260 if (GET_CODE (src) != IOR && GET_CODE (src) != XOR)
e11fa86f
RK
7261 return x;
7262
7263 rhs = expand_compound_operation (XEXP (src, 0));
7264 lhs = expand_compound_operation (XEXP (src, 1));
7265
7266 if (GET_CODE (rhs) == AND
7267 && GET_CODE (XEXP (rhs, 1)) == CONST_INT
7268 && rtx_equal_for_field_assignment_p (XEXP (rhs, 0), dest))
7269 c1 = INTVAL (XEXP (rhs, 1)), other = lhs;
7270 else if (GET_CODE (lhs) == AND
7271 && GET_CODE (XEXP (lhs, 1)) == CONST_INT
7272 && rtx_equal_for_field_assignment_p (XEXP (lhs, 0), dest))
7273 c1 = INTVAL (XEXP (lhs, 1)), other = rhs;
dfbe1b2f
RK
7274 else
7275 return x;
230d793d 7276
e11fa86f 7277 pos = get_pos_from_mask ((~ c1) & GET_MODE_MASK (GET_MODE (dest)), &len);
dfbe1b2f 7278 if (pos < 0 || pos + len > GET_MODE_BITSIZE (GET_MODE (dest))
e5e809f4
JL
7279 || GET_MODE_BITSIZE (GET_MODE (dest)) > HOST_BITS_PER_WIDE_INT
7280 || (c1 & nonzero_bits (other, GET_MODE (dest))) != 0)
dfbe1b2f 7281 return x;
230d793d 7282
5f4f0e22 7283 assign = make_extraction (VOIDmode, dest, pos, NULL_RTX, len, 1, 1, 0);
76184def
DE
7284 if (assign == 0)
7285 return x;
230d793d 7286
dfbe1b2f
RK
7287 /* The mode to use for the source is the mode of the assignment, or of
7288 what is inside a possible STRICT_LOW_PART. */
7289 mode = (GET_CODE (assign) == STRICT_LOW_PART
7290 ? GET_MODE (XEXP (assign, 0)) : GET_MODE (assign));
230d793d 7291
dfbe1b2f
RK
7292 /* Shift OTHER right POS places and make it the source, restricting it
7293 to the proper length and mode. */
230d793d 7294
5f4f0e22
CH
7295 src = force_to_mode (simplify_shift_const (NULL_RTX, LSHIFTRT,
7296 GET_MODE (src), other, pos),
6139ff20
RK
7297 mode,
7298 GET_MODE_BITSIZE (mode) >= HOST_BITS_PER_WIDE_INT
7299 ? GET_MODE_MASK (mode)
7300 : ((HOST_WIDE_INT) 1 << len) - 1,
e3d616e3 7301 dest, 0);
230d793d 7302
dfbe1b2f 7303 return gen_rtx_combine (SET, VOIDmode, assign, src);
230d793d
RS
7304}
7305\f
7306/* See if X is of the form (+ (* a c) (* b c)) and convert to (* (+ a b) c)
7307 if so. */
7308
7309static rtx
7310apply_distributive_law (x)
7311 rtx x;
7312{
7313 enum rtx_code code = GET_CODE (x);
7314 rtx lhs, rhs, other;
7315 rtx tem;
7316 enum rtx_code inner_code;
7317
d8a8a4da
RS
7318 /* Distributivity is not true for floating point.
7319 It can change the value. So don't do it.
7320 -- rms and moshier@world.std.com. */
3ad2180a 7321 if (FLOAT_MODE_P (GET_MODE (x)))
d8a8a4da
RS
7322 return x;
7323
230d793d
RS
7324 /* The outer operation can only be one of the following: */
7325 if (code != IOR && code != AND && code != XOR
7326 && code != PLUS && code != MINUS)
7327 return x;
7328
7329 lhs = XEXP (x, 0), rhs = XEXP (x, 1);
7330
0f41302f
MS
7331 /* If either operand is a primitive we can't do anything, so get out
7332 fast. */
230d793d 7333 if (GET_RTX_CLASS (GET_CODE (lhs)) == 'o'
dfbe1b2f 7334 || GET_RTX_CLASS (GET_CODE (rhs)) == 'o')
230d793d
RS
7335 return x;
7336
7337 lhs = expand_compound_operation (lhs);
7338 rhs = expand_compound_operation (rhs);
7339 inner_code = GET_CODE (lhs);
7340 if (inner_code != GET_CODE (rhs))
7341 return x;
7342
7343 /* See if the inner and outer operations distribute. */
7344 switch (inner_code)
7345 {
7346 case LSHIFTRT:
7347 case ASHIFTRT:
7348 case AND:
7349 case IOR:
7350 /* These all distribute except over PLUS. */
7351 if (code == PLUS || code == MINUS)
7352 return x;
7353 break;
7354
7355 case MULT:
7356 if (code != PLUS && code != MINUS)
7357 return x;
7358 break;
7359
7360 case ASHIFT:
45620ed4 7361 /* This is also a multiply, so it distributes over everything. */
230d793d
RS
7362 break;
7363
7364 case SUBREG:
dfbe1b2f
RK
7365 /* Non-paradoxical SUBREGs distributes over all operations, provided
7366 the inner modes and word numbers are the same, this is an extraction
2b4bd1bc
JW
7367 of a low-order part, we don't convert an fp operation to int or
7368 vice versa, and we would not be converting a single-word
dfbe1b2f 7369 operation into a multi-word operation. The latter test is not
2b4bd1bc 7370 required, but it prevents generating unneeded multi-word operations.
dfbe1b2f
RK
7371 Some of the previous tests are redundant given the latter test, but
7372 are retained because they are required for correctness.
7373
7374 We produce the result slightly differently in this case. */
7375
7376 if (GET_MODE (SUBREG_REG (lhs)) != GET_MODE (SUBREG_REG (rhs))
7377 || SUBREG_WORD (lhs) != SUBREG_WORD (rhs)
7378 || ! subreg_lowpart_p (lhs)
2b4bd1bc
JW
7379 || (GET_MODE_CLASS (GET_MODE (lhs))
7380 != GET_MODE_CLASS (GET_MODE (SUBREG_REG (lhs))))
dfbe1b2f 7381 || (GET_MODE_SIZE (GET_MODE (lhs))
8af24e26 7382 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))))
dfbe1b2f 7383 || GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))) > UNITS_PER_WORD)
230d793d
RS
7384 return x;
7385
7386 tem = gen_binary (code, GET_MODE (SUBREG_REG (lhs)),
7387 SUBREG_REG (lhs), SUBREG_REG (rhs));
7388 return gen_lowpart_for_combine (GET_MODE (x), tem);
7389
7390 default:
7391 return x;
7392 }
7393
7394 /* Set LHS and RHS to the inner operands (A and B in the example
7395 above) and set OTHER to the common operand (C in the example).
7396 These is only one way to do this unless the inner operation is
7397 commutative. */
7398 if (GET_RTX_CLASS (inner_code) == 'c'
7399 && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 0)))
7400 other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 1);
7401 else if (GET_RTX_CLASS (inner_code) == 'c'
7402 && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 1)))
7403 other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 0);
7404 else if (GET_RTX_CLASS (inner_code) == 'c'
7405 && rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 0)))
7406 other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 1);
7407 else if (rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 1)))
7408 other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 0);
7409 else
7410 return x;
7411
7412 /* Form the new inner operation, seeing if it simplifies first. */
7413 tem = gen_binary (code, GET_MODE (x), lhs, rhs);
7414
7415 /* There is one exception to the general way of distributing:
7416 (a ^ b) | (a ^ c) -> (~a) & (b ^ c) */
7417 if (code == XOR && inner_code == IOR)
7418 {
7419 inner_code = AND;
0c1c8ea6 7420 other = gen_unary (NOT, GET_MODE (x), GET_MODE (x), other);
230d793d
RS
7421 }
7422
7423 /* We may be able to continuing distributing the result, so call
7424 ourselves recursively on the inner operation before forming the
7425 outer operation, which we return. */
7426 return gen_binary (inner_code, GET_MODE (x),
7427 apply_distributive_law (tem), other);
7428}
7429\f
7430/* We have X, a logical `and' of VAROP with the constant CONSTOP, to be done
7431 in MODE.
7432
7433 Return an equivalent form, if different from X. Otherwise, return X. If
7434 X is zero, we are to always construct the equivalent form. */
7435
7436static rtx
7437simplify_and_const_int (x, mode, varop, constop)
7438 rtx x;
7439 enum machine_mode mode;
7440 rtx varop;
5f4f0e22 7441 unsigned HOST_WIDE_INT constop;
230d793d 7442{
951553af 7443 unsigned HOST_WIDE_INT nonzero;
42301240 7444 int i;
230d793d 7445
6139ff20
RK
7446 /* Simplify VAROP knowing that we will be only looking at some of the
7447 bits in it. */
e3d616e3 7448 varop = force_to_mode (varop, mode, constop, NULL_RTX, 0);
230d793d 7449
6139ff20
RK
7450 /* If VAROP is a CLOBBER, we will fail so return it; if it is a
7451 CONST_INT, we are done. */
7452 if (GET_CODE (varop) == CLOBBER || GET_CODE (varop) == CONST_INT)
7453 return varop;
230d793d 7454
fc06d7aa
RK
7455 /* See what bits may be nonzero in VAROP. Unlike the general case of
7456 a call to nonzero_bits, here we don't care about bits outside
7457 MODE. */
7458
7459 nonzero = nonzero_bits (varop, mode) & GET_MODE_MASK (mode);
7e4ce834 7460 nonzero = trunc_int_for_mode (nonzero, mode);
9fa6d012 7461
230d793d 7462 /* Turn off all bits in the constant that are known to already be zero.
951553af 7463 Thus, if the AND isn't needed at all, we will have CONSTOP == NONZERO_BITS
230d793d
RS
7464 which is tested below. */
7465
951553af 7466 constop &= nonzero;
230d793d
RS
7467
7468 /* If we don't have any bits left, return zero. */
7469 if (constop == 0)
7470 return const0_rtx;
7471
42301240
RK
7472 /* If VAROP is a NEG of something known to be zero or 1 and CONSTOP is
7473 a power of two, we can replace this with a ASHIFT. */
7474 if (GET_CODE (varop) == NEG && nonzero_bits (XEXP (varop, 0), mode) == 1
7475 && (i = exact_log2 (constop)) >= 0)
7476 return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (varop, 0), i);
7477
6139ff20
RK
7478 /* If VAROP is an IOR or XOR, apply the AND to both branches of the IOR
7479 or XOR, then try to apply the distributive law. This may eliminate
7480 operations if either branch can be simplified because of the AND.
7481 It may also make some cases more complex, but those cases probably
7482 won't match a pattern either with or without this. */
7483
7484 if (GET_CODE (varop) == IOR || GET_CODE (varop) == XOR)
7485 return
7486 gen_lowpart_for_combine
7487 (mode,
7488 apply_distributive_law
7489 (gen_binary (GET_CODE (varop), GET_MODE (varop),
7490 simplify_and_const_int (NULL_RTX, GET_MODE (varop),
7491 XEXP (varop, 0), constop),
7492 simplify_and_const_int (NULL_RTX, GET_MODE (varop),
7493 XEXP (varop, 1), constop))));
7494
230d793d
RS
7495 /* Get VAROP in MODE. Try to get a SUBREG if not. Don't make a new SUBREG
7496 if we already had one (just check for the simplest cases). */
7497 if (x && GET_CODE (XEXP (x, 0)) == SUBREG
7498 && GET_MODE (XEXP (x, 0)) == mode
7499 && SUBREG_REG (XEXP (x, 0)) == varop)
7500 varop = XEXP (x, 0);
7501 else
7502 varop = gen_lowpart_for_combine (mode, varop);
7503
0f41302f 7504 /* If we can't make the SUBREG, try to return what we were given. */
230d793d
RS
7505 if (GET_CODE (varop) == CLOBBER)
7506 return x ? x : varop;
7507
7508 /* If we are only masking insignificant bits, return VAROP. */
951553af 7509 if (constop == nonzero)
230d793d
RS
7510 x = varop;
7511
7512 /* Otherwise, return an AND. See how much, if any, of X we can use. */
7513 else if (x == 0 || GET_CODE (x) != AND || GET_MODE (x) != mode)
6139ff20 7514 x = gen_binary (AND, mode, varop, GEN_INT (constop));
230d793d
RS
7515
7516 else
7517 {
7518 if (GET_CODE (XEXP (x, 1)) != CONST_INT
e51712db 7519 || (unsigned HOST_WIDE_INT) INTVAL (XEXP (x, 1)) != constop)
5f4f0e22 7520 SUBST (XEXP (x, 1), GEN_INT (constop));
230d793d
RS
7521
7522 SUBST (XEXP (x, 0), varop);
7523 }
7524
7525 return x;
7526}
7527\f
b3728b0e
JW
7528/* We let num_sign_bit_copies recur into nonzero_bits as that is useful.
7529 We don't let nonzero_bits recur into num_sign_bit_copies, because that
7530 is less useful. We can't allow both, because that results in exponential
956d6950 7531 run time recursion. There is a nullstone testcase that triggered
b3728b0e
JW
7532 this. This macro avoids accidental uses of num_sign_bit_copies. */
7533#define num_sign_bit_copies()
7534
230d793d
RS
7535/* Given an expression, X, compute which bits in X can be non-zero.
7536 We don't care about bits outside of those defined in MODE.
7537
7538 For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
7539 a shift, AND, or zero_extract, we can do better. */
7540
5f4f0e22 7541static unsigned HOST_WIDE_INT
951553af 7542nonzero_bits (x, mode)
230d793d
RS
7543 rtx x;
7544 enum machine_mode mode;
7545{
951553af
RK
7546 unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
7547 unsigned HOST_WIDE_INT inner_nz;
230d793d
RS
7548 enum rtx_code code;
7549 int mode_width = GET_MODE_BITSIZE (mode);
7550 rtx tem;
7551
1c75dfa4
RK
7552 /* For floating-point values, assume all bits are needed. */
7553 if (FLOAT_MODE_P (GET_MODE (x)) || FLOAT_MODE_P (mode))
7554 return nonzero;
7555
230d793d
RS
7556 /* If X is wider than MODE, use its mode instead. */
7557 if (GET_MODE_BITSIZE (GET_MODE (x)) > mode_width)
7558 {
7559 mode = GET_MODE (x);
951553af 7560 nonzero = GET_MODE_MASK (mode);
230d793d
RS
7561 mode_width = GET_MODE_BITSIZE (mode);
7562 }
7563
5f4f0e22 7564 if (mode_width > HOST_BITS_PER_WIDE_INT)
230d793d
RS
7565 /* Our only callers in this case look for single bit values. So
7566 just return the mode mask. Those tests will then be false. */
951553af 7567 return nonzero;
230d793d 7568
8baf60bb 7569#ifndef WORD_REGISTER_OPERATIONS
c6965c0f 7570 /* If MODE is wider than X, but both are a single word for both the host
0840fd91
RK
7571 and target machines, we can compute this from which bits of the
7572 object might be nonzero in its own mode, taking into account the fact
7573 that on many CISC machines, accessing an object in a wider mode
7574 causes the high-order bits to become undefined. So they are
7575 not known to be zero. */
7576
7577 if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode
7578 && GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD
7579 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
c6965c0f 7580 && GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (GET_MODE (x)))
0840fd91
RK
7581 {
7582 nonzero &= nonzero_bits (x, GET_MODE (x));
7583 nonzero |= GET_MODE_MASK (mode) & ~ GET_MODE_MASK (GET_MODE (x));
7584 return nonzero;
7585 }
7586#endif
7587
230d793d
RS
7588 code = GET_CODE (x);
7589 switch (code)
7590 {
7591 case REG:
320dd7a7
RK
7592#ifdef POINTERS_EXTEND_UNSIGNED
7593 /* If pointers extend unsigned and this is a pointer in Pmode, say that
7594 all the bits above ptr_mode are known to be zero. */
7595 if (POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
7596 && REGNO_POINTER_FLAG (REGNO (x)))
7597 nonzero &= GET_MODE_MASK (ptr_mode);
7598#endif
7599
b0d71df9
RK
7600#ifdef STACK_BOUNDARY
7601 /* If this is the stack pointer, we may know something about its
7602 alignment. If PUSH_ROUNDING is defined, it is possible for the
230d793d
RS
7603 stack to be momentarily aligned only to that amount, so we pick
7604 the least alignment. */
7605
ee49a9c7
JW
7606 /* We can't check for arg_pointer_rtx here, because it is not
7607 guaranteed to have as much alignment as the stack pointer.
7608 In particular, in the Irix6 n64 ABI, the stack has 128 bit
7609 alignment but the argument pointer has only 64 bit alignment. */
7610
0e9ff885
DM
7611 if ((x == frame_pointer_rtx
7612 || x == stack_pointer_rtx
7613 || x == hard_frame_pointer_rtx
7614 || (REGNO (x) >= FIRST_VIRTUAL_REGISTER
7615 && REGNO (x) <= LAST_VIRTUAL_REGISTER))
7616#ifdef STACK_BIAS
7617 && !STACK_BIAS
7618#endif
7619 )
230d793d 7620 {
b0d71df9 7621 int sp_alignment = STACK_BOUNDARY / BITS_PER_UNIT;
230d793d
RS
7622
7623#ifdef PUSH_ROUNDING
91102d5a 7624 if (REGNO (x) == STACK_POINTER_REGNUM)
b0d71df9 7625 sp_alignment = MIN (PUSH_ROUNDING (1), sp_alignment);
230d793d
RS
7626#endif
7627
320dd7a7
RK
7628 /* We must return here, otherwise we may get a worse result from
7629 one of the choices below. There is nothing useful below as
7630 far as the stack pointer is concerned. */
b0d71df9 7631 return nonzero &= ~ (sp_alignment - 1);
230d793d 7632 }
b0d71df9 7633#endif
230d793d 7634
55310dad
RK
7635 /* If X is a register whose nonzero bits value is current, use it.
7636 Otherwise, if X is a register whose value we can find, use that
7637 value. Otherwise, use the previously-computed global nonzero bits
7638 for this register. */
7639
7640 if (reg_last_set_value[REGNO (x)] != 0
7641 && reg_last_set_mode[REGNO (x)] == mode
57cf50a4
GRK
7642 && (reg_last_set_label[REGNO (x)] == label_tick
7643 || (REGNO (x) >= FIRST_PSEUDO_REGISTER
7644 && REG_N_SETS (REGNO (x)) == 1
7645 && ! REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start,
7646 REGNO (x))))
55310dad
RK
7647 && INSN_CUID (reg_last_set[REGNO (x)]) < subst_low_cuid)
7648 return reg_last_set_nonzero_bits[REGNO (x)];
230d793d
RS
7649
7650 tem = get_last_value (x);
9afa3d54 7651
230d793d 7652 if (tem)
9afa3d54
RK
7653 {
7654#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
7655 /* If X is narrower than MODE and TEM is a non-negative
7656 constant that would appear negative in the mode of X,
7657 sign-extend it for use in reg_nonzero_bits because some
7658 machines (maybe most) will actually do the sign-extension
7659 and this is the conservative approach.
7660
7661 ??? For 2.5, try to tighten up the MD files in this regard
7662 instead of this kludge. */
7663
7664 if (GET_MODE_BITSIZE (GET_MODE (x)) < mode_width
7665 && GET_CODE (tem) == CONST_INT
7666 && INTVAL (tem) > 0
7667 && 0 != (INTVAL (tem)
7668 & ((HOST_WIDE_INT) 1
9e69be8c 7669 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
9afa3d54
RK
7670 tem = GEN_INT (INTVAL (tem)
7671 | ((HOST_WIDE_INT) (-1)
7672 << GET_MODE_BITSIZE (GET_MODE (x))));
7673#endif
7674 return nonzero_bits (tem, mode);
7675 }
951553af
RK
7676 else if (nonzero_sign_valid && reg_nonzero_bits[REGNO (x)])
7677 return reg_nonzero_bits[REGNO (x)] & nonzero;
230d793d 7678 else
951553af 7679 return nonzero;
230d793d
RS
7680
7681 case CONST_INT:
9afa3d54
RK
7682#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
7683 /* If X is negative in MODE, sign-extend the value. */
9e69be8c
RK
7684 if (INTVAL (x) > 0 && mode_width < BITS_PER_WORD
7685 && 0 != (INTVAL (x) & ((HOST_WIDE_INT) 1 << (mode_width - 1))))
7686 return (INTVAL (x) | ((HOST_WIDE_INT) (-1) << mode_width));
9afa3d54
RK
7687#endif
7688
230d793d
RS
7689 return INTVAL (x);
7690
230d793d 7691 case MEM:
8baf60bb 7692#ifdef LOAD_EXTEND_OP
230d793d
RS
7693 /* In many, if not most, RISC machines, reading a byte from memory
7694 zeros the rest of the register. Noticing that fact saves a lot
7695 of extra zero-extends. */
8baf60bb
RK
7696 if (LOAD_EXTEND_OP (GET_MODE (x)) == ZERO_EXTEND)
7697 nonzero &= GET_MODE_MASK (GET_MODE (x));
230d793d 7698#endif
8baf60bb 7699 break;
230d793d 7700
230d793d
RS
7701 case EQ: case NE:
7702 case GT: case GTU:
7703 case LT: case LTU:
7704 case GE: case GEU:
7705 case LE: case LEU:
3f508eca 7706
c6965c0f
RK
7707 /* If this produces an integer result, we know which bits are set.
7708 Code here used to clear bits outside the mode of X, but that is
7709 now done above. */
230d793d 7710
c6965c0f
RK
7711 if (GET_MODE_CLASS (mode) == MODE_INT
7712 && mode_width <= HOST_BITS_PER_WIDE_INT)
7713 nonzero = STORE_FLAG_VALUE;
230d793d 7714 break;
230d793d 7715
230d793d 7716 case NEG:
b3728b0e
JW
7717#if 0
7718 /* Disabled to avoid exponential mutual recursion between nonzero_bits
7719 and num_sign_bit_copies. */
d0ab8cd3
RK
7720 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
7721 == GET_MODE_BITSIZE (GET_MODE (x)))
951553af 7722 nonzero = 1;
b3728b0e 7723#endif
230d793d
RS
7724
7725 if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
951553af 7726 nonzero |= (GET_MODE_MASK (mode) & ~ GET_MODE_MASK (GET_MODE (x)));
230d793d 7727 break;
d0ab8cd3
RK
7728
7729 case ABS:
b3728b0e
JW
7730#if 0
7731 /* Disabled to avoid exponential mutual recursion between nonzero_bits
7732 and num_sign_bit_copies. */
d0ab8cd3
RK
7733 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
7734 == GET_MODE_BITSIZE (GET_MODE (x)))
951553af 7735 nonzero = 1;
b3728b0e 7736#endif
d0ab8cd3 7737 break;
230d793d
RS
7738
7739 case TRUNCATE:
951553af 7740 nonzero &= (nonzero_bits (XEXP (x, 0), mode) & GET_MODE_MASK (mode));
230d793d
RS
7741 break;
7742
7743 case ZERO_EXTEND:
951553af 7744 nonzero &= nonzero_bits (XEXP (x, 0), mode);
230d793d 7745 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
951553af 7746 nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
230d793d
RS
7747 break;
7748
7749 case SIGN_EXTEND:
7750 /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
7751 Otherwise, show all the bits in the outer mode but not the inner
7752 may be non-zero. */
951553af 7753 inner_nz = nonzero_bits (XEXP (x, 0), mode);
230d793d
RS
7754 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
7755 {
951553af 7756 inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
e3da301d
MS
7757 if (inner_nz
7758 & (((HOST_WIDE_INT) 1
7759 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1))))
951553af 7760 inner_nz |= (GET_MODE_MASK (mode)
230d793d
RS
7761 & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
7762 }
7763
951553af 7764 nonzero &= inner_nz;
230d793d
RS
7765 break;
7766
7767 case AND:
951553af
RK
7768 nonzero &= (nonzero_bits (XEXP (x, 0), mode)
7769 & nonzero_bits (XEXP (x, 1), mode));
230d793d
RS
7770 break;
7771
d0ab8cd3
RK
7772 case XOR: case IOR:
7773 case UMIN: case UMAX: case SMIN: case SMAX:
951553af
RK
7774 nonzero &= (nonzero_bits (XEXP (x, 0), mode)
7775 | nonzero_bits (XEXP (x, 1), mode));
230d793d
RS
7776 break;
7777
7778 case PLUS: case MINUS:
7779 case MULT:
7780 case DIV: case UDIV:
7781 case MOD: case UMOD:
7782 /* We can apply the rules of arithmetic to compute the number of
7783 high- and low-order zero bits of these operations. We start by
7784 computing the width (position of the highest-order non-zero bit)
7785 and the number of low-order zero bits for each value. */
7786 {
951553af
RK
7787 unsigned HOST_WIDE_INT nz0 = nonzero_bits (XEXP (x, 0), mode);
7788 unsigned HOST_WIDE_INT nz1 = nonzero_bits (XEXP (x, 1), mode);
7789 int width0 = floor_log2 (nz0) + 1;
7790 int width1 = floor_log2 (nz1) + 1;
7791 int low0 = floor_log2 (nz0 & -nz0);
7792 int low1 = floor_log2 (nz1 & -nz1);
318b149c
RK
7793 HOST_WIDE_INT op0_maybe_minusp
7794 = (nz0 & ((HOST_WIDE_INT) 1 << (mode_width - 1)));
7795 HOST_WIDE_INT op1_maybe_minusp
7796 = (nz1 & ((HOST_WIDE_INT) 1 << (mode_width - 1)));
230d793d
RS
7797 int result_width = mode_width;
7798 int result_low = 0;
7799
7800 switch (code)
7801 {
7802 case PLUS:
0e9ff885
DM
7803#ifdef STACK_BIAS
7804 if (STACK_BIAS
7805 && (XEXP (x, 0) == stack_pointer_rtx
7806 || XEXP (x, 0) == frame_pointer_rtx)
7807 && GET_CODE (XEXP (x, 1)) == CONST_INT)
7808 {
7809 int sp_alignment = STACK_BOUNDARY / BITS_PER_UNIT;
7810
7811 nz0 = (GET_MODE_MASK (mode) & ~ (sp_alignment - 1));
7812 nz1 = INTVAL (XEXP (x, 1)) - STACK_BIAS;
7813 width0 = floor_log2 (nz0) + 1;
7814 width1 = floor_log2 (nz1) + 1;
7815 low0 = floor_log2 (nz0 & -nz0);
7816 low1 = floor_log2 (nz1 & -nz1);
7817 }
7818#endif
230d793d
RS
7819 result_width = MAX (width0, width1) + 1;
7820 result_low = MIN (low0, low1);
7821 break;
7822 case MINUS:
7823 result_low = MIN (low0, low1);
7824 break;
7825 case MULT:
7826 result_width = width0 + width1;
7827 result_low = low0 + low1;
7828 break;
7829 case DIV:
7830 if (! op0_maybe_minusp && ! op1_maybe_minusp)
7831 result_width = width0;
7832 break;
7833 case UDIV:
7834 result_width = width0;
7835 break;
7836 case MOD:
7837 if (! op0_maybe_minusp && ! op1_maybe_minusp)
7838 result_width = MIN (width0, width1);
7839 result_low = MIN (low0, low1);
7840 break;
7841 case UMOD:
7842 result_width = MIN (width0, width1);
7843 result_low = MIN (low0, low1);
7844 break;
e9a25f70
JL
7845 default:
7846 abort ();
230d793d
RS
7847 }
7848
7849 if (result_width < mode_width)
951553af 7850 nonzero &= ((HOST_WIDE_INT) 1 << result_width) - 1;
230d793d
RS
7851
7852 if (result_low > 0)
951553af 7853 nonzero &= ~ (((HOST_WIDE_INT) 1 << result_low) - 1);
230d793d
RS
7854 }
7855 break;
7856
7857 case ZERO_EXTRACT:
7858 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5f4f0e22 7859 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
951553af 7860 nonzero &= ((HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
230d793d
RS
7861 break;
7862
7863 case SUBREG:
c3c2cb37
RK
7864 /* If this is a SUBREG formed for a promoted variable that has
7865 been zero-extended, we know that at least the high-order bits
7866 are zero, though others might be too. */
7867
7868 if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x))
951553af
RK
7869 nonzero = (GET_MODE_MASK (GET_MODE (x))
7870 & nonzero_bits (SUBREG_REG (x), GET_MODE (x)));
c3c2cb37 7871
230d793d
RS
7872 /* If the inner mode is a single word for both the host and target
7873 machines, we can compute this from which bits of the inner
951553af 7874 object might be nonzero. */
230d793d 7875 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) <= BITS_PER_WORD
5f4f0e22
CH
7876 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
7877 <= HOST_BITS_PER_WIDE_INT))
230d793d 7878 {
951553af 7879 nonzero &= nonzero_bits (SUBREG_REG (x), mode);
8baf60bb 7880
b52ce03d
R
7881#if defined (WORD_REGISTER_OPERATIONS) && defined (LOAD_EXTEND_OP)
7882 /* If this is a typical RISC machine, we only have to worry
7883 about the way loads are extended. */
7884 if (LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
7885 ? (nonzero
7886 & (1L << (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) - 1)))
7887 : LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) != ZERO_EXTEND)
230d793d 7888#endif
b52ce03d
R
7889 {
7890 /* On many CISC machines, accessing an object in a wider mode
7891 causes the high-order bits to become undefined. So they are
7892 not known to be zero. */
7893 if (GET_MODE_SIZE (GET_MODE (x))
7894 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
7895 nonzero |= (GET_MODE_MASK (GET_MODE (x))
7896 & ~ GET_MODE_MASK (GET_MODE (SUBREG_REG (x))));
7897 }
230d793d
RS
7898 }
7899 break;
7900
7901 case ASHIFTRT:
7902 case LSHIFTRT:
7903 case ASHIFT:
230d793d 7904 case ROTATE:
951553af 7905 /* The nonzero bits are in two classes: any bits within MODE
230d793d 7906 that aren't in GET_MODE (x) are always significant. The rest of the
951553af 7907 nonzero bits are those that are significant in the operand of
230d793d
RS
7908 the shift when shifted the appropriate number of bits. This
7909 shows that high-order bits are cleared by the right shift and
7910 low-order bits by left shifts. */
7911 if (GET_CODE (XEXP (x, 1)) == CONST_INT
7912 && INTVAL (XEXP (x, 1)) >= 0
5f4f0e22 7913 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
230d793d
RS
7914 {
7915 enum machine_mode inner_mode = GET_MODE (x);
7916 int width = GET_MODE_BITSIZE (inner_mode);
7917 int count = INTVAL (XEXP (x, 1));
5f4f0e22 7918 unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
951553af
RK
7919 unsigned HOST_WIDE_INT op_nonzero = nonzero_bits (XEXP (x, 0), mode);
7920 unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask;
5f4f0e22 7921 unsigned HOST_WIDE_INT outer = 0;
230d793d
RS
7922
7923 if (mode_width > width)
951553af 7924 outer = (op_nonzero & nonzero & ~ mode_mask);
230d793d
RS
7925
7926 if (code == LSHIFTRT)
7927 inner >>= count;
7928 else if (code == ASHIFTRT)
7929 {
7930 inner >>= count;
7931
951553af 7932 /* If the sign bit may have been nonzero before the shift, we
230d793d 7933 need to mark all the places it could have been copied to
951553af 7934 by the shift as possibly nonzero. */
5f4f0e22
CH
7935 if (inner & ((HOST_WIDE_INT) 1 << (width - 1 - count)))
7936 inner |= (((HOST_WIDE_INT) 1 << count) - 1) << (width - count);
230d793d 7937 }
45620ed4 7938 else if (code == ASHIFT)
230d793d
RS
7939 inner <<= count;
7940 else
7941 inner = ((inner << (count % width)
7942 | (inner >> (width - (count % width)))) & mode_mask);
7943
951553af 7944 nonzero &= (outer | inner);
230d793d
RS
7945 }
7946 break;
7947
7948 case FFS:
7949 /* This is at most the number of bits in the mode. */
951553af 7950 nonzero = ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width) + 1)) - 1;
230d793d 7951 break;
d0ab8cd3
RK
7952
7953 case IF_THEN_ELSE:
951553af
RK
7954 nonzero &= (nonzero_bits (XEXP (x, 1), mode)
7955 | nonzero_bits (XEXP (x, 2), mode));
d0ab8cd3 7956 break;
e9a25f70
JL
7957
7958 default:
7959 break;
230d793d
RS
7960 }
7961
951553af 7962 return nonzero;
230d793d 7963}
b3728b0e
JW
7964
7965/* See the macro definition above. */
7966#undef num_sign_bit_copies
230d793d 7967\f
d0ab8cd3 7968/* Return the number of bits at the high-order end of X that are known to
5109d49f
RK
7969 be equal to the sign bit. X will be used in mode MODE; if MODE is
7970 VOIDmode, X will be used in its own mode. The returned value will always
7971 be between 1 and the number of bits in MODE. */
d0ab8cd3
RK
7972
7973static int
7974num_sign_bit_copies (x, mode)
7975 rtx x;
7976 enum machine_mode mode;
7977{
7978 enum rtx_code code = GET_CODE (x);
7979 int bitwidth;
7980 int num0, num1, result;
951553af 7981 unsigned HOST_WIDE_INT nonzero;
d0ab8cd3
RK
7982 rtx tem;
7983
7984 /* If we weren't given a mode, use the mode of X. If the mode is still
1c75dfa4
RK
7985 VOIDmode, we don't know anything. Likewise if one of the modes is
7986 floating-point. */
d0ab8cd3
RK
7987
7988 if (mode == VOIDmode)
7989 mode = GET_MODE (x);
7990
1c75dfa4 7991 if (mode == VOIDmode || FLOAT_MODE_P (mode) || FLOAT_MODE_P (GET_MODE (x)))
6752e8d2 7992 return 1;
d0ab8cd3
RK
7993
7994 bitwidth = GET_MODE_BITSIZE (mode);
7995
0f41302f 7996 /* For a smaller object, just ignore the high bits. */
312def2e
RK
7997 if (bitwidth < GET_MODE_BITSIZE (GET_MODE (x)))
7998 return MAX (1, (num_sign_bit_copies (x, GET_MODE (x))
7999 - (GET_MODE_BITSIZE (GET_MODE (x)) - bitwidth)));
8000
e9a25f70
JL
8001 if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_BITSIZE (GET_MODE (x)))
8002 {
0c314d1a
RK
8003#ifndef WORD_REGISTER_OPERATIONS
8004 /* If this machine does not do all register operations on the entire
8005 register and MODE is wider than the mode of X, we can say nothing
8006 at all about the high-order bits. */
e9a25f70
JL
8007 return 1;
8008#else
8009 /* Likewise on machines that do, if the mode of the object is smaller
8010 than a word and loads of that size don't sign extend, we can say
8011 nothing about the high order bits. */
8012 if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
8013#ifdef LOAD_EXTEND_OP
8014 && LOAD_EXTEND_OP (GET_MODE (x)) != SIGN_EXTEND
8015#endif
8016 )
8017 return 1;
0c314d1a 8018#endif
e9a25f70 8019 }
0c314d1a 8020
d0ab8cd3
RK
8021 switch (code)
8022 {
8023 case REG:
55310dad 8024
ff0dbdd1
RK
8025#ifdef POINTERS_EXTEND_UNSIGNED
8026 /* If pointers extend signed and this is a pointer in Pmode, say that
8027 all the bits above ptr_mode are known to be sign bit copies. */
8028 if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode && mode == Pmode
8029 && REGNO_POINTER_FLAG (REGNO (x)))
8030 return GET_MODE_BITSIZE (Pmode) - GET_MODE_BITSIZE (ptr_mode) + 1;
8031#endif
8032
55310dad
RK
8033 if (reg_last_set_value[REGNO (x)] != 0
8034 && reg_last_set_mode[REGNO (x)] == mode
57cf50a4
GRK
8035 && (reg_last_set_label[REGNO (x)] == label_tick
8036 || (REGNO (x) >= FIRST_PSEUDO_REGISTER
8037 && REG_N_SETS (REGNO (x)) == 1
8038 && ! REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start,
8039 REGNO (x))))
55310dad
RK
8040 && INSN_CUID (reg_last_set[REGNO (x)]) < subst_low_cuid)
8041 return reg_last_set_sign_bit_copies[REGNO (x)];
d0ab8cd3
RK
8042
8043 tem = get_last_value (x);
8044 if (tem != 0)
8045 return num_sign_bit_copies (tem, mode);
55310dad
RK
8046
8047 if (nonzero_sign_valid && reg_sign_bit_copies[REGNO (x)] != 0)
8048 return reg_sign_bit_copies[REGNO (x)];
d0ab8cd3
RK
8049 break;
8050
457816e2 8051 case MEM:
8baf60bb 8052#ifdef LOAD_EXTEND_OP
457816e2 8053 /* Some RISC machines sign-extend all loads of smaller than a word. */
8baf60bb
RK
8054 if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND)
8055 return MAX (1, bitwidth - GET_MODE_BITSIZE (GET_MODE (x)) + 1);
457816e2 8056#endif
8baf60bb 8057 break;
457816e2 8058
d0ab8cd3
RK
8059 case CONST_INT:
8060 /* If the constant is negative, take its 1's complement and remask.
8061 Then see how many zero bits we have. */
951553af 8062 nonzero = INTVAL (x) & GET_MODE_MASK (mode);
ac49a949 8063 if (bitwidth <= HOST_BITS_PER_WIDE_INT
951553af
RK
8064 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
8065 nonzero = (~ nonzero) & GET_MODE_MASK (mode);
d0ab8cd3 8066
951553af 8067 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
d0ab8cd3
RK
8068
8069 case SUBREG:
c3c2cb37
RK
8070 /* If this is a SUBREG for a promoted object that is sign-extended
8071 and we are looking at it in a wider mode, we know that at least the
8072 high-order bits are known to be sign bit copies. */
8073
8074 if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x))
dc3e17ad
RK
8075 return MAX (bitwidth - GET_MODE_BITSIZE (GET_MODE (x)) + 1,
8076 num_sign_bit_copies (SUBREG_REG (x), mode));
c3c2cb37 8077
0f41302f 8078 /* For a smaller object, just ignore the high bits. */
d0ab8cd3
RK
8079 if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
8080 {
8081 num0 = num_sign_bit_copies (SUBREG_REG (x), VOIDmode);
8082 return MAX (1, (num0
8083 - (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
8084 - bitwidth)));
8085 }
457816e2 8086
8baf60bb 8087#ifdef WORD_REGISTER_OPERATIONS
2aec5b7a 8088#ifdef LOAD_EXTEND_OP
8baf60bb
RK
8089 /* For paradoxical SUBREGs on machines where all register operations
8090 affect the entire register, just look inside. Note that we are
8091 passing MODE to the recursive call, so the number of sign bit copies
8092 will remain relative to that mode, not the inner mode. */
457816e2 8093
2aec5b7a
JW
8094 /* This works only if loads sign extend. Otherwise, if we get a
8095 reload for the inner part, it may be loaded from the stack, and
8096 then we lose all sign bit copies that existed before the store
8097 to the stack. */
8098
8099 if ((GET_MODE_SIZE (GET_MODE (x))
8100 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
8101 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND)
457816e2 8102 return num_sign_bit_copies (SUBREG_REG (x), mode);
2aec5b7a 8103#endif
457816e2 8104#endif
d0ab8cd3
RK
8105 break;
8106
8107 case SIGN_EXTRACT:
8108 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
8109 return MAX (1, bitwidth - INTVAL (XEXP (x, 1)));
8110 break;
8111
8112 case SIGN_EXTEND:
8113 return (bitwidth - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
8114 + num_sign_bit_copies (XEXP (x, 0), VOIDmode));
8115
8116 case TRUNCATE:
0f41302f 8117 /* For a smaller object, just ignore the high bits. */
d0ab8cd3
RK
8118 num0 = num_sign_bit_copies (XEXP (x, 0), VOIDmode);
8119 return MAX (1, (num0 - (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
8120 - bitwidth)));
8121
8122 case NOT:
8123 return num_sign_bit_copies (XEXP (x, 0), mode);
8124
8125 case ROTATE: case ROTATERT:
8126 /* If we are rotating left by a number of bits less than the number
8127 of sign bit copies, we can just subtract that amount from the
8128 number. */
8129 if (GET_CODE (XEXP (x, 1)) == CONST_INT
8130 && INTVAL (XEXP (x, 1)) >= 0 && INTVAL (XEXP (x, 1)) < bitwidth)
8131 {
8132 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8133 return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
8134 : bitwidth - INTVAL (XEXP (x, 1))));
8135 }
8136 break;
8137
8138 case NEG:
8139 /* In general, this subtracts one sign bit copy. But if the value
8140 is known to be positive, the number of sign bit copies is the
951553af
RK
8141 same as that of the input. Finally, if the input has just one bit
8142 that might be nonzero, all the bits are copies of the sign bit. */
70186b34
BS
8143 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8144 if (bitwidth > HOST_BITS_PER_WIDE_INT)
8145 return num0 > 1 ? num0 - 1 : 1;
8146
951553af
RK
8147 nonzero = nonzero_bits (XEXP (x, 0), mode);
8148 if (nonzero == 1)
d0ab8cd3
RK
8149 return bitwidth;
8150
d0ab8cd3 8151 if (num0 > 1
951553af 8152 && (((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero))
d0ab8cd3
RK
8153 num0--;
8154
8155 return num0;
8156
8157 case IOR: case AND: case XOR:
8158 case SMIN: case SMAX: case UMIN: case UMAX:
8159 /* Logical operations will preserve the number of sign-bit copies.
8160 MIN and MAX operations always return one of the operands. */
8161 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8162 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
8163 return MIN (num0, num1);
8164
8165 case PLUS: case MINUS:
8166 /* For addition and subtraction, we can have a 1-bit carry. However,
8167 if we are subtracting 1 from a positive number, there will not
8168 be such a carry. Furthermore, if the positive number is known to
8169 be 0 or 1, we know the result is either -1 or 0. */
8170
3e3ea975 8171 if (code == PLUS && XEXP (x, 1) == constm1_rtx
9295e6af 8172 && bitwidth <= HOST_BITS_PER_WIDE_INT)
d0ab8cd3 8173 {
951553af
RK
8174 nonzero = nonzero_bits (XEXP (x, 0), mode);
8175 if ((((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0)
8176 return (nonzero == 1 || nonzero == 0 ? bitwidth
8177 : bitwidth - floor_log2 (nonzero) - 1);
d0ab8cd3
RK
8178 }
8179
8180 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8181 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
8182 return MAX (1, MIN (num0, num1) - 1);
8183
8184 case MULT:
8185 /* The number of bits of the product is the sum of the number of
8186 bits of both terms. However, unless one of the terms if known
8187 to be positive, we must allow for an additional bit since negating
8188 a negative number can remove one sign bit copy. */
8189
8190 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8191 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
8192
8193 result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
8194 if (result > 0
70186b34
BS
8195 && (bitwidth > HOST_BITS_PER_WIDE_INT
8196 || (((nonzero_bits (XEXP (x, 0), mode)
8197 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
8198 && ((nonzero_bits (XEXP (x, 1), mode)
8199 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))))
d0ab8cd3
RK
8200 result--;
8201
8202 return MAX (1, result);
8203
8204 case UDIV:
70186b34
BS
8205 /* The result must be <= the first operand. If the first operand
8206 has the high bit set, we know nothing about the number of sign
8207 bit copies. */
8208 if (bitwidth > HOST_BITS_PER_WIDE_INT)
8209 return 1;
8210 else if ((nonzero_bits (XEXP (x, 0), mode)
8211 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
8212 return 1;
8213 else
8214 return num_sign_bit_copies (XEXP (x, 0), mode);
8215
d0ab8cd3
RK
8216 case UMOD:
8217 /* The result must be <= the scond operand. */
8218 return num_sign_bit_copies (XEXP (x, 1), mode);
8219
8220 case DIV:
8221 /* Similar to unsigned division, except that we have to worry about
8222 the case where the divisor is negative, in which case we have
8223 to add 1. */
8224 result = num_sign_bit_copies (XEXP (x, 0), mode);
8225 if (result > 1
70186b34
BS
8226 && (bitwidth > HOST_BITS_PER_WIDE_INT
8227 || (nonzero_bits (XEXP (x, 1), mode)
8228 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
8229 result--;
d0ab8cd3
RK
8230
8231 return result;
8232
8233 case MOD:
8234 result = num_sign_bit_copies (XEXP (x, 1), mode);
8235 if (result > 1
70186b34
BS
8236 && (bitwidth > HOST_BITS_PER_WIDE_INT
8237 || (nonzero_bits (XEXP (x, 1), mode)
8238 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
8239 result--;
d0ab8cd3
RK
8240
8241 return result;
8242
8243 case ASHIFTRT:
8244 /* Shifts by a constant add to the number of bits equal to the
8245 sign bit. */
8246 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8247 if (GET_CODE (XEXP (x, 1)) == CONST_INT
8248 && INTVAL (XEXP (x, 1)) > 0)
8249 num0 = MIN (bitwidth, num0 + INTVAL (XEXP (x, 1)));
8250
8251 return num0;
8252
8253 case ASHIFT:
d0ab8cd3
RK
8254 /* Left shifts destroy copies. */
8255 if (GET_CODE (XEXP (x, 1)) != CONST_INT
8256 || INTVAL (XEXP (x, 1)) < 0
8257 || INTVAL (XEXP (x, 1)) >= bitwidth)
8258 return 1;
8259
8260 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8261 return MAX (1, num0 - INTVAL (XEXP (x, 1)));
8262
8263 case IF_THEN_ELSE:
8264 num0 = num_sign_bit_copies (XEXP (x, 1), mode);
8265 num1 = num_sign_bit_copies (XEXP (x, 2), mode);
8266 return MIN (num0, num1);
8267
d0ab8cd3
RK
8268 case EQ: case NE: case GE: case GT: case LE: case LT:
8269 case GEU: case GTU: case LEU: case LTU:
0802d516
RK
8270 if (STORE_FLAG_VALUE == -1)
8271 return bitwidth;
e9a25f70
JL
8272 break;
8273
8274 default:
8275 break;
d0ab8cd3
RK
8276 }
8277
8278 /* If we haven't been able to figure it out by one of the above rules,
8279 see if some of the high-order bits are known to be zero. If so,
ac49a949
RS
8280 count those bits and return one less than that amount. If we can't
8281 safely compute the mask for this mode, always return BITWIDTH. */
8282
8283 if (bitwidth > HOST_BITS_PER_WIDE_INT)
6752e8d2 8284 return 1;
d0ab8cd3 8285
951553af 8286 nonzero = nonzero_bits (x, mode);
df6f4086 8287 return (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))
951553af 8288 ? 1 : bitwidth - floor_log2 (nonzero) - 1);
d0ab8cd3
RK
8289}
8290\f
1a26b032
RK
8291/* Return the number of "extended" bits there are in X, when interpreted
8292 as a quantity in MODE whose signedness is indicated by UNSIGNEDP. For
8293 unsigned quantities, this is the number of high-order zero bits.
8294 For signed quantities, this is the number of copies of the sign bit
8295 minus 1. In both case, this function returns the number of "spare"
8296 bits. For example, if two quantities for which this function returns
8297 at least 1 are added, the addition is known not to overflow.
8298
8299 This function will always return 0 unless called during combine, which
8300 implies that it must be called from a define_split. */
8301
8302int
8303extended_count (x, mode, unsignedp)
8304 rtx x;
8305 enum machine_mode mode;
8306 int unsignedp;
8307{
951553af 8308 if (nonzero_sign_valid == 0)
1a26b032
RK
8309 return 0;
8310
8311 return (unsignedp
ac49a949
RS
8312 ? (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8313 && (GET_MODE_BITSIZE (mode) - 1
951553af 8314 - floor_log2 (nonzero_bits (x, mode))))
1a26b032
RK
8315 : num_sign_bit_copies (x, mode) - 1);
8316}
8317\f
230d793d
RS
8318/* This function is called from `simplify_shift_const' to merge two
8319 outer operations. Specifically, we have already found that we need
8320 to perform operation *POP0 with constant *PCONST0 at the outermost
8321 position. We would now like to also perform OP1 with constant CONST1
8322 (with *POP0 being done last).
8323
8324 Return 1 if we can do the operation and update *POP0 and *PCONST0 with
8325 the resulting operation. *PCOMP_P is set to 1 if we would need to
8326 complement the innermost operand, otherwise it is unchanged.
8327
8328 MODE is the mode in which the operation will be done. No bits outside
8329 the width of this mode matter. It is assumed that the width of this mode
5f4f0e22 8330 is smaller than or equal to HOST_BITS_PER_WIDE_INT.
230d793d
RS
8331
8332 If *POP0 or OP1 are NIL, it means no operation is required. Only NEG, PLUS,
8333 IOR, XOR, and AND are supported. We may set *POP0 to SET if the proper
8334 result is simply *PCONST0.
8335
8336 If the resulting operation cannot be expressed as one operation, we
8337 return 0 and do not change *POP0, *PCONST0, and *PCOMP_P. */
8338
8339static int
8340merge_outer_ops (pop0, pconst0, op1, const1, mode, pcomp_p)
8341 enum rtx_code *pop0;
5f4f0e22 8342 HOST_WIDE_INT *pconst0;
230d793d 8343 enum rtx_code op1;
5f4f0e22 8344 HOST_WIDE_INT const1;
230d793d
RS
8345 enum machine_mode mode;
8346 int *pcomp_p;
8347{
8348 enum rtx_code op0 = *pop0;
5f4f0e22 8349 HOST_WIDE_INT const0 = *pconst0;
230d793d
RS
8350
8351 const0 &= GET_MODE_MASK (mode);
8352 const1 &= GET_MODE_MASK (mode);
8353
8354 /* If OP0 is an AND, clear unimportant bits in CONST1. */
8355 if (op0 == AND)
8356 const1 &= const0;
8357
8358 /* If OP0 or OP1 is NIL, this is easy. Similarly if they are the same or
8359 if OP0 is SET. */
8360
8361 if (op1 == NIL || op0 == SET)
8362 return 1;
8363
8364 else if (op0 == NIL)
8365 op0 = op1, const0 = const1;
8366
8367 else if (op0 == op1)
8368 {
8369 switch (op0)
8370 {
8371 case AND:
8372 const0 &= const1;
8373 break;
8374 case IOR:
8375 const0 |= const1;
8376 break;
8377 case XOR:
8378 const0 ^= const1;
8379 break;
8380 case PLUS:
8381 const0 += const1;
8382 break;
8383 case NEG:
8384 op0 = NIL;
8385 break;
e9a25f70
JL
8386 default:
8387 break;
230d793d
RS
8388 }
8389 }
8390
8391 /* Otherwise, if either is a PLUS or NEG, we can't do anything. */
8392 else if (op0 == PLUS || op1 == PLUS || op0 == NEG || op1 == NEG)
8393 return 0;
8394
8395 /* If the two constants aren't the same, we can't do anything. The
8396 remaining six cases can all be done. */
8397 else if (const0 != const1)
8398 return 0;
8399
8400 else
8401 switch (op0)
8402 {
8403 case IOR:
8404 if (op1 == AND)
8405 /* (a & b) | b == b */
8406 op0 = SET;
8407 else /* op1 == XOR */
8408 /* (a ^ b) | b == a | b */
b729186a 8409 {;}
230d793d
RS
8410 break;
8411
8412 case XOR:
8413 if (op1 == AND)
8414 /* (a & b) ^ b == (~a) & b */
8415 op0 = AND, *pcomp_p = 1;
8416 else /* op1 == IOR */
8417 /* (a | b) ^ b == a & ~b */
8418 op0 = AND, *pconst0 = ~ const0;
8419 break;
8420
8421 case AND:
8422 if (op1 == IOR)
8423 /* (a | b) & b == b */
8424 op0 = SET;
8425 else /* op1 == XOR */
8426 /* (a ^ b) & b) == (~a) & b */
8427 *pcomp_p = 1;
8428 break;
e9a25f70
JL
8429 default:
8430 break;
230d793d
RS
8431 }
8432
8433 /* Check for NO-OP cases. */
8434 const0 &= GET_MODE_MASK (mode);
8435 if (const0 == 0
8436 && (op0 == IOR || op0 == XOR || op0 == PLUS))
8437 op0 = NIL;
8438 else if (const0 == 0 && op0 == AND)
8439 op0 = SET;
e51712db
KG
8440 else if ((unsigned HOST_WIDE_INT) const0 == GET_MODE_MASK (mode)
8441 && op0 == AND)
230d793d
RS
8442 op0 = NIL;
8443
7e4ce834
RH
8444 /* ??? Slightly redundant with the above mask, but not entirely.
8445 Moving this above means we'd have to sign-extend the mode mask
8446 for the final test. */
8447 const0 = trunc_int_for_mode (const0, mode);
9fa6d012 8448
230d793d
RS
8449 *pop0 = op0;
8450 *pconst0 = const0;
8451
8452 return 1;
8453}
8454\f
8455/* Simplify a shift of VAROP by COUNT bits. CODE says what kind of shift.
8456 The result of the shift is RESULT_MODE. X, if non-zero, is an expression
8457 that we started with.
8458
8459 The shift is normally computed in the widest mode we find in VAROP, as
8460 long as it isn't a different number of words than RESULT_MODE. Exceptions
8461 are ASHIFTRT and ROTATE, which are always done in their original mode, */
8462
8463static rtx
8464simplify_shift_const (x, code, result_mode, varop, count)
8465 rtx x;
8466 enum rtx_code code;
8467 enum machine_mode result_mode;
8468 rtx varop;
8469 int count;
8470{
8471 enum rtx_code orig_code = code;
8472 int orig_count = count;
8473 enum machine_mode mode = result_mode;
8474 enum machine_mode shift_mode, tmode;
8475 int mode_words
8476 = (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
8477 /* We form (outer_op (code varop count) (outer_const)). */
8478 enum rtx_code outer_op = NIL;
c4e861e8 8479 HOST_WIDE_INT outer_const = 0;
230d793d
RS
8480 rtx const_rtx;
8481 int complement_p = 0;
8482 rtx new;
8483
8484 /* If we were given an invalid count, don't do anything except exactly
8485 what was requested. */
8486
8487 if (count < 0 || count > GET_MODE_BITSIZE (mode))
8488 {
8489 if (x)
8490 return x;
8491
38a448ca 8492 return gen_rtx_fmt_ee (code, mode, varop, GEN_INT (count));
230d793d
RS
8493 }
8494
8495 /* Unless one of the branches of the `if' in this loop does a `continue',
8496 we will `break' the loop after the `if'. */
8497
8498 while (count != 0)
8499 {
8500 /* If we have an operand of (clobber (const_int 0)), just return that
8501 value. */
8502 if (GET_CODE (varop) == CLOBBER)
8503 return varop;
8504
8505 /* If we discovered we had to complement VAROP, leave. Making a NOT
8506 here would cause an infinite loop. */
8507 if (complement_p)
8508 break;
8509
abc95ed3 8510 /* Convert ROTATERT to ROTATE. */
230d793d
RS
8511 if (code == ROTATERT)
8512 code = ROTATE, count = GET_MODE_BITSIZE (result_mode) - count;
8513
230d793d 8514 /* We need to determine what mode we will do the shift in. If the
f6789c77
RK
8515 shift is a right shift or a ROTATE, we must always do it in the mode
8516 it was originally done in. Otherwise, we can do it in MODE, the
0f41302f 8517 widest mode encountered. */
f6789c77
RK
8518 shift_mode
8519 = (code == ASHIFTRT || code == LSHIFTRT || code == ROTATE
8520 ? result_mode : mode);
230d793d
RS
8521
8522 /* Handle cases where the count is greater than the size of the mode
8523 minus 1. For ASHIFT, use the size minus one as the count (this can
8524 occur when simplifying (lshiftrt (ashiftrt ..))). For rotates,
8525 take the count modulo the size. For other shifts, the result is
8526 zero.
8527
8528 Since these shifts are being produced by the compiler by combining
8529 multiple operations, each of which are defined, we know what the
8530 result is supposed to be. */
8531
8532 if (count > GET_MODE_BITSIZE (shift_mode) - 1)
8533 {
8534 if (code == ASHIFTRT)
8535 count = GET_MODE_BITSIZE (shift_mode) - 1;
8536 else if (code == ROTATE || code == ROTATERT)
8537 count %= GET_MODE_BITSIZE (shift_mode);
8538 else
8539 {
8540 /* We can't simply return zero because there may be an
8541 outer op. */
8542 varop = const0_rtx;
8543 count = 0;
8544 break;
8545 }
8546 }
8547
8548 /* Negative counts are invalid and should not have been made (a
8549 programmer-specified negative count should have been handled
0f41302f 8550 above). */
230d793d
RS
8551 else if (count < 0)
8552 abort ();
8553
312def2e
RK
8554 /* An arithmetic right shift of a quantity known to be -1 or 0
8555 is a no-op. */
8556 if (code == ASHIFTRT
8557 && (num_sign_bit_copies (varop, shift_mode)
8558 == GET_MODE_BITSIZE (shift_mode)))
d0ab8cd3 8559 {
312def2e
RK
8560 count = 0;
8561 break;
8562 }
d0ab8cd3 8563
312def2e
RK
8564 /* If we are doing an arithmetic right shift and discarding all but
8565 the sign bit copies, this is equivalent to doing a shift by the
8566 bitsize minus one. Convert it into that shift because it will often
8567 allow other simplifications. */
500c518b 8568
312def2e
RK
8569 if (code == ASHIFTRT
8570 && (count + num_sign_bit_copies (varop, shift_mode)
8571 >= GET_MODE_BITSIZE (shift_mode)))
8572 count = GET_MODE_BITSIZE (shift_mode) - 1;
500c518b 8573
230d793d
RS
8574 /* We simplify the tests below and elsewhere by converting
8575 ASHIFTRT to LSHIFTRT if we know the sign bit is clear.
8576 `make_compound_operation' will convert it to a ASHIFTRT for
8577 those machines (such as Vax) that don't have a LSHIFTRT. */
5f4f0e22 8578 if (GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT
230d793d 8579 && code == ASHIFTRT
951553af 8580 && ((nonzero_bits (varop, shift_mode)
5f4f0e22
CH
8581 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (shift_mode) - 1)))
8582 == 0))
230d793d
RS
8583 code = LSHIFTRT;
8584
8585 switch (GET_CODE (varop))
8586 {
8587 case SIGN_EXTEND:
8588 case ZERO_EXTEND:
8589 case SIGN_EXTRACT:
8590 case ZERO_EXTRACT:
8591 new = expand_compound_operation (varop);
8592 if (new != varop)
8593 {
8594 varop = new;
8595 continue;
8596 }
8597 break;
8598
8599 case MEM:
8600 /* If we have (xshiftrt (mem ...) C) and C is MODE_WIDTH
8601 minus the width of a smaller mode, we can do this with a
8602 SIGN_EXTEND or ZERO_EXTEND from the narrower memory location. */
8603 if ((code == ASHIFTRT || code == LSHIFTRT)
8604 && ! mode_dependent_address_p (XEXP (varop, 0))
8605 && ! MEM_VOLATILE_P (varop)
8606 && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
8607 MODE_INT, 1)) != BLKmode)
8608 {
f76b9db2 8609 if (BYTES_BIG_ENDIAN)
38a448ca 8610 new = gen_rtx_MEM (tmode, XEXP (varop, 0));
f76b9db2 8611 else
38a448ca
RH
8612 new = gen_rtx_MEM (tmode,
8613 plus_constant (XEXP (varop, 0),
8614 count / BITS_PER_UNIT));
e24b00c8 8615 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (varop);
c6df88cb 8616 MEM_COPY_ATTRIBUTES (new, varop);
230d793d
RS
8617 varop = gen_rtx_combine (code == ASHIFTRT ? SIGN_EXTEND
8618 : ZERO_EXTEND, mode, new);
8619 count = 0;
8620 continue;
8621 }
8622 break;
8623
8624 case USE:
8625 /* Similar to the case above, except that we can only do this if
8626 the resulting mode is the same as that of the underlying
8627 MEM and adjust the address depending on the *bits* endianness
8628 because of the way that bit-field extract insns are defined. */
8629 if ((code == ASHIFTRT || code == LSHIFTRT)
8630 && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
8631 MODE_INT, 1)) != BLKmode
8632 && tmode == GET_MODE (XEXP (varop, 0)))
8633 {
f76b9db2
ILT
8634 if (BITS_BIG_ENDIAN)
8635 new = XEXP (varop, 0);
8636 else
8637 {
8638 new = copy_rtx (XEXP (varop, 0));
8639 SUBST (XEXP (new, 0),
8640 plus_constant (XEXP (new, 0),
8641 count / BITS_PER_UNIT));
8642 }
230d793d
RS
8643
8644 varop = gen_rtx_combine (code == ASHIFTRT ? SIGN_EXTEND
8645 : ZERO_EXTEND, mode, new);
8646 count = 0;
8647 continue;
8648 }
8649 break;
8650
8651 case SUBREG:
8652 /* If VAROP is a SUBREG, strip it as long as the inner operand has
8653 the same number of words as what we've seen so far. Then store
8654 the widest mode in MODE. */
f9e67232
RS
8655 if (subreg_lowpart_p (varop)
8656 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
8657 > GET_MODE_SIZE (GET_MODE (varop)))
230d793d
RS
8658 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
8659 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
8660 == mode_words))
8661 {
8662 varop = SUBREG_REG (varop);
8663 if (GET_MODE_SIZE (GET_MODE (varop)) > GET_MODE_SIZE (mode))
8664 mode = GET_MODE (varop);
8665 continue;
8666 }
8667 break;
8668
8669 case MULT:
8670 /* Some machines use MULT instead of ASHIFT because MULT
8671 is cheaper. But it is still better on those machines to
8672 merge two shifts into one. */
8673 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
8674 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
8675 {
8676 varop = gen_binary (ASHIFT, GET_MODE (varop), XEXP (varop, 0),
6d649d26 8677 GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));
230d793d
RS
8678 continue;
8679 }
8680 break;
8681
8682 case UDIV:
8683 /* Similar, for when divides are cheaper. */
8684 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
8685 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
8686 {
8687 varop = gen_binary (LSHIFTRT, GET_MODE (varop), XEXP (varop, 0),
5f4f0e22 8688 GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));
230d793d
RS
8689 continue;
8690 }
8691 break;
8692
8693 case ASHIFTRT:
8694 /* If we are extracting just the sign bit of an arithmetic right
8695 shift, that shift is not needed. */
8696 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1)
8697 {
8698 varop = XEXP (varop, 0);
8699 continue;
8700 }
8701
0f41302f 8702 /* ... fall through ... */
230d793d
RS
8703
8704 case LSHIFTRT:
8705 case ASHIFT:
230d793d
RS
8706 case ROTATE:
8707 /* Here we have two nested shifts. The result is usually the
8708 AND of a new shift with a mask. We compute the result below. */
8709 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
8710 && INTVAL (XEXP (varop, 1)) >= 0
8711 && INTVAL (XEXP (varop, 1)) < GET_MODE_BITSIZE (GET_MODE (varop))
5f4f0e22
CH
8712 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
8713 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
230d793d
RS
8714 {
8715 enum rtx_code first_code = GET_CODE (varop);
8716 int first_count = INTVAL (XEXP (varop, 1));
5f4f0e22 8717 unsigned HOST_WIDE_INT mask;
230d793d 8718 rtx mask_rtx;
230d793d 8719
230d793d
RS
8720 /* We have one common special case. We can't do any merging if
8721 the inner code is an ASHIFTRT of a smaller mode. However, if
8722 we have (ashift:M1 (subreg:M1 (ashiftrt:M2 FOO C1) 0) C2)
8723 with C2 == GET_MODE_BITSIZE (M1) - GET_MODE_BITSIZE (M2),
8724 we can convert it to
8725 (ashiftrt:M1 (ashift:M1 (and:M1 (subreg:M1 FOO 0 C2) C3) C1).
8726 This simplifies certain SIGN_EXTEND operations. */
8727 if (code == ASHIFT && first_code == ASHIFTRT
8728 && (GET_MODE_BITSIZE (result_mode)
8729 - GET_MODE_BITSIZE (GET_MODE (varop))) == count)
8730 {
8731 /* C3 has the low-order C1 bits zero. */
8732
5f4f0e22
CH
8733 mask = (GET_MODE_MASK (mode)
8734 & ~ (((HOST_WIDE_INT) 1 << first_count) - 1));
230d793d 8735
5f4f0e22 8736 varop = simplify_and_const_int (NULL_RTX, result_mode,
230d793d 8737 XEXP (varop, 0), mask);
5f4f0e22 8738 varop = simplify_shift_const (NULL_RTX, ASHIFT, result_mode,
230d793d
RS
8739 varop, count);
8740 count = first_count;
8741 code = ASHIFTRT;
8742 continue;
8743 }
8744
d0ab8cd3
RK
8745 /* If this was (ashiftrt (ashift foo C1) C2) and FOO has more
8746 than C1 high-order bits equal to the sign bit, we can convert
8747 this to either an ASHIFT or a ASHIFTRT depending on the
8748 two counts.
230d793d
RS
8749
8750 We cannot do this if VAROP's mode is not SHIFT_MODE. */
8751
8752 if (code == ASHIFTRT && first_code == ASHIFT
8753 && GET_MODE (varop) == shift_mode
d0ab8cd3
RK
8754 && (num_sign_bit_copies (XEXP (varop, 0), shift_mode)
8755 > first_count))
230d793d 8756 {
d0ab8cd3
RK
8757 count -= first_count;
8758 if (count < 0)
8759 count = - count, code = ASHIFT;
8760 varop = XEXP (varop, 0);
8761 continue;
230d793d
RS
8762 }
8763
8764 /* There are some cases we can't do. If CODE is ASHIFTRT,
8765 we can only do this if FIRST_CODE is also ASHIFTRT.
8766
8767 We can't do the case when CODE is ROTATE and FIRST_CODE is
8768 ASHIFTRT.
8769
8770 If the mode of this shift is not the mode of the outer shift,
bdaae9a0 8771 we can't do this if either shift is a right shift or ROTATE.
230d793d
RS
8772
8773 Finally, we can't do any of these if the mode is too wide
8774 unless the codes are the same.
8775
8776 Handle the case where the shift codes are the same
8777 first. */
8778
8779 if (code == first_code)
8780 {
8781 if (GET_MODE (varop) != result_mode
bdaae9a0
RK
8782 && (code == ASHIFTRT || code == LSHIFTRT
8783 || code == ROTATE))
230d793d
RS
8784 break;
8785
8786 count += first_count;
8787 varop = XEXP (varop, 0);
8788 continue;
8789 }
8790
8791 if (code == ASHIFTRT
8792 || (code == ROTATE && first_code == ASHIFTRT)
5f4f0e22 8793 || GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT
230d793d 8794 || (GET_MODE (varop) != result_mode
bdaae9a0
RK
8795 && (first_code == ASHIFTRT || first_code == LSHIFTRT
8796 || first_code == ROTATE
230d793d
RS
8797 || code == ROTATE)))
8798 break;
8799
8800 /* To compute the mask to apply after the shift, shift the
951553af 8801 nonzero bits of the inner shift the same way the
230d793d
RS
8802 outer shift will. */
8803
951553af 8804 mask_rtx = GEN_INT (nonzero_bits (varop, GET_MODE (varop)));
230d793d
RS
8805
8806 mask_rtx
8807 = simplify_binary_operation (code, result_mode, mask_rtx,
5f4f0e22 8808 GEN_INT (count));
230d793d
RS
8809
8810 /* Give up if we can't compute an outer operation to use. */
8811 if (mask_rtx == 0
8812 || GET_CODE (mask_rtx) != CONST_INT
8813 || ! merge_outer_ops (&outer_op, &outer_const, AND,
8814 INTVAL (mask_rtx),
8815 result_mode, &complement_p))
8816 break;
8817
8818 /* If the shifts are in the same direction, we add the
8819 counts. Otherwise, we subtract them. */
8820 if ((code == ASHIFTRT || code == LSHIFTRT)
8821 == (first_code == ASHIFTRT || first_code == LSHIFTRT))
8822 count += first_count;
8823 else
8824 count -= first_count;
8825
8826 /* If COUNT is positive, the new shift is usually CODE,
8827 except for the two exceptions below, in which case it is
8828 FIRST_CODE. If the count is negative, FIRST_CODE should
8829 always be used */
8830 if (count > 0
8831 && ((first_code == ROTATE && code == ASHIFT)
8832 || (first_code == ASHIFTRT && code == LSHIFTRT)))
8833 code = first_code;
8834 else if (count < 0)
8835 code = first_code, count = - count;
8836
8837 varop = XEXP (varop, 0);
8838 continue;
8839 }
8840
8841 /* If we have (A << B << C) for any shift, we can convert this to
8842 (A << C << B). This wins if A is a constant. Only try this if
8843 B is not a constant. */
8844
8845 else if (GET_CODE (varop) == code
8846 && GET_CODE (XEXP (varop, 1)) != CONST_INT
8847 && 0 != (new
8848 = simplify_binary_operation (code, mode,
8849 XEXP (varop, 0),
5f4f0e22 8850 GEN_INT (count))))
230d793d
RS
8851 {
8852 varop = gen_rtx_combine (code, mode, new, XEXP (varop, 1));
8853 count = 0;
8854 continue;
8855 }
8856 break;
8857
8858 case NOT:
8859 /* Make this fit the case below. */
8860 varop = gen_rtx_combine (XOR, mode, XEXP (varop, 0),
5f4f0e22 8861 GEN_INT (GET_MODE_MASK (mode)));
230d793d
RS
8862 continue;
8863
8864 case IOR:
8865 case AND:
8866 case XOR:
8867 /* If we have (xshiftrt (ior (plus X (const_int -1)) X) C)
8868 with C the size of VAROP - 1 and the shift is logical if
8869 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
8870 we have an (le X 0) operation. If we have an arithmetic shift
8871 and STORE_FLAG_VALUE is 1 or we have a logical shift with
8872 STORE_FLAG_VALUE of -1, we have a (neg (le X 0)) operation. */
8873
8874 if (GET_CODE (varop) == IOR && GET_CODE (XEXP (varop, 0)) == PLUS
8875 && XEXP (XEXP (varop, 0), 1) == constm1_rtx
8876 && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
8877 && (code == LSHIFTRT || code == ASHIFTRT)
8878 && count == GET_MODE_BITSIZE (GET_MODE (varop)) - 1
8879 && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
8880 {
8881 count = 0;
8882 varop = gen_rtx_combine (LE, GET_MODE (varop), XEXP (varop, 1),
8883 const0_rtx);
8884
8885 if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
8886 varop = gen_rtx_combine (NEG, GET_MODE (varop), varop);
8887
8888 continue;
8889 }
8890
8891 /* If we have (shift (logical)), move the logical to the outside
8892 to allow it to possibly combine with another logical and the
8893 shift to combine with another shift. This also canonicalizes to
8894 what a ZERO_EXTRACT looks like. Also, some machines have
8895 (and (shift)) insns. */
8896
8897 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
8898 && (new = simplify_binary_operation (code, result_mode,
8899 XEXP (varop, 1),
5f4f0e22 8900 GEN_INT (count))) != 0
7d171a1e 8901 && GET_CODE(new) == CONST_INT
230d793d
RS
8902 && merge_outer_ops (&outer_op, &outer_const, GET_CODE (varop),
8903 INTVAL (new), result_mode, &complement_p))
8904 {
8905 varop = XEXP (varop, 0);
8906 continue;
8907 }
8908
8909 /* If we can't do that, try to simplify the shift in each arm of the
8910 logical expression, make a new logical expression, and apply
8911 the inverse distributive law. */
8912 {
00d4ca1c 8913 rtx lhs = simplify_shift_const (NULL_RTX, code, shift_mode,
230d793d 8914 XEXP (varop, 0), count);
00d4ca1c 8915 rtx rhs = simplify_shift_const (NULL_RTX, code, shift_mode,
230d793d
RS
8916 XEXP (varop, 1), count);
8917
21a64bf1 8918 varop = gen_binary (GET_CODE (varop), shift_mode, lhs, rhs);
230d793d
RS
8919 varop = apply_distributive_law (varop);
8920
8921 count = 0;
8922 }
8923 break;
8924
8925 case EQ:
45620ed4 8926 /* convert (lshiftrt (eq FOO 0) C) to (xor FOO 1) if STORE_FLAG_VALUE
230d793d 8927 says that the sign bit can be tested, FOO has mode MODE, C is
45620ed4
RK
8928 GET_MODE_BITSIZE (MODE) - 1, and FOO has only its low-order bit
8929 that may be nonzero. */
8930 if (code == LSHIFTRT
230d793d
RS
8931 && XEXP (varop, 1) == const0_rtx
8932 && GET_MODE (XEXP (varop, 0)) == result_mode
8933 && count == GET_MODE_BITSIZE (result_mode) - 1
5f4f0e22 8934 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
230d793d 8935 && ((STORE_FLAG_VALUE
5f4f0e22 8936 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (result_mode) - 1))))
951553af 8937 && nonzero_bits (XEXP (varop, 0), result_mode) == 1
5f4f0e22
CH
8938 && merge_outer_ops (&outer_op, &outer_const, XOR,
8939 (HOST_WIDE_INT) 1, result_mode,
8940 &complement_p))
230d793d
RS
8941 {
8942 varop = XEXP (varop, 0);
8943 count = 0;
8944 continue;
8945 }
8946 break;
8947
8948 case NEG:
d0ab8cd3
RK
8949 /* (lshiftrt (neg A) C) where A is either 0 or 1 and C is one less
8950 than the number of bits in the mode is equivalent to A. */
8951 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1
951553af 8952 && nonzero_bits (XEXP (varop, 0), result_mode) == 1)
230d793d 8953 {
d0ab8cd3 8954 varop = XEXP (varop, 0);
230d793d
RS
8955 count = 0;
8956 continue;
8957 }
8958
8959 /* NEG commutes with ASHIFT since it is multiplication. Move the
8960 NEG outside to allow shifts to combine. */
8961 if (code == ASHIFT
5f4f0e22
CH
8962 && merge_outer_ops (&outer_op, &outer_const, NEG,
8963 (HOST_WIDE_INT) 0, result_mode,
8964 &complement_p))
230d793d
RS
8965 {
8966 varop = XEXP (varop, 0);
8967 continue;
8968 }
8969 break;
8970
8971 case PLUS:
d0ab8cd3
RK
8972 /* (lshiftrt (plus A -1) C) where A is either 0 or 1 and C
8973 is one less than the number of bits in the mode is
8974 equivalent to (xor A 1). */
230d793d
RS
8975 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1
8976 && XEXP (varop, 1) == constm1_rtx
951553af 8977 && nonzero_bits (XEXP (varop, 0), result_mode) == 1
5f4f0e22
CH
8978 && merge_outer_ops (&outer_op, &outer_const, XOR,
8979 (HOST_WIDE_INT) 1, result_mode,
8980 &complement_p))
230d793d
RS
8981 {
8982 count = 0;
8983 varop = XEXP (varop, 0);
8984 continue;
8985 }
8986
3f508eca 8987 /* If we have (xshiftrt (plus FOO BAR) C), and the only bits
951553af 8988 that might be nonzero in BAR are those being shifted out and those
3f508eca
RK
8989 bits are known zero in FOO, we can replace the PLUS with FOO.
8990 Similarly in the other operand order. This code occurs when
8991 we are computing the size of a variable-size array. */
8992
8993 if ((code == ASHIFTRT || code == LSHIFTRT)
5f4f0e22 8994 && count < HOST_BITS_PER_WIDE_INT
951553af
RK
8995 && nonzero_bits (XEXP (varop, 1), result_mode) >> count == 0
8996 && (nonzero_bits (XEXP (varop, 1), result_mode)
8997 & nonzero_bits (XEXP (varop, 0), result_mode)) == 0)
3f508eca
RK
8998 {
8999 varop = XEXP (varop, 0);
9000 continue;
9001 }
9002 else if ((code == ASHIFTRT || code == LSHIFTRT)
5f4f0e22 9003 && count < HOST_BITS_PER_WIDE_INT
ac49a949 9004 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
951553af 9005 && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
3f508eca 9006 >> count)
951553af
RK
9007 && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
9008 & nonzero_bits (XEXP (varop, 1),
3f508eca
RK
9009 result_mode)))
9010 {
9011 varop = XEXP (varop, 1);
9012 continue;
9013 }
9014
230d793d
RS
9015 /* (ashift (plus foo C) N) is (plus (ashift foo N) C'). */
9016 if (code == ASHIFT
9017 && GET_CODE (XEXP (varop, 1)) == CONST_INT
9018 && (new = simplify_binary_operation (ASHIFT, result_mode,
9019 XEXP (varop, 1),
5f4f0e22 9020 GEN_INT (count))) != 0
7d171a1e 9021 && GET_CODE(new) == CONST_INT
230d793d
RS
9022 && merge_outer_ops (&outer_op, &outer_const, PLUS,
9023 INTVAL (new), result_mode, &complement_p))
9024 {
9025 varop = XEXP (varop, 0);
9026 continue;
9027 }
9028 break;
9029
9030 case MINUS:
9031 /* If we have (xshiftrt (minus (ashiftrt X C)) X) C)
9032 with C the size of VAROP - 1 and the shift is logical if
9033 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
9034 we have a (gt X 0) operation. If the shift is arithmetic with
9035 STORE_FLAG_VALUE of 1 or logical with STORE_FLAG_VALUE == -1,
9036 we have a (neg (gt X 0)) operation. */
9037
0802d516
RK
9038 if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
9039 && GET_CODE (XEXP (varop, 0)) == ASHIFTRT
230d793d 9040 && count == GET_MODE_BITSIZE (GET_MODE (varop)) - 1
230d793d
RS
9041 && (code == LSHIFTRT || code == ASHIFTRT)
9042 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
9043 && INTVAL (XEXP (XEXP (varop, 0), 1)) == count
9044 && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
9045 {
9046 count = 0;
9047 varop = gen_rtx_combine (GT, GET_MODE (varop), XEXP (varop, 1),
9048 const0_rtx);
9049
9050 if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
9051 varop = gen_rtx_combine (NEG, GET_MODE (varop), varop);
9052
9053 continue;
9054 }
9055 break;
6e0ef100
JC
9056
9057 case TRUNCATE:
9058 /* Change (lshiftrt (truncate (lshiftrt))) to (truncate (lshiftrt))
9059 if the truncate does not affect the value. */
9060 if (code == LSHIFTRT
9061 && GET_CODE (XEXP (varop, 0)) == LSHIFTRT
9062 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
9063 && (INTVAL (XEXP (XEXP (varop, 0), 1))
b577a8ff
JL
9064 >= (GET_MODE_BITSIZE (GET_MODE (XEXP (varop, 0)))
9065 - GET_MODE_BITSIZE (GET_MODE (varop)))))
6e0ef100
JC
9066 {
9067 rtx varop_inner = XEXP (varop, 0);
9068
9069 varop_inner = gen_rtx_combine (LSHIFTRT,
9070 GET_MODE (varop_inner),
9071 XEXP (varop_inner, 0),
9072 GEN_INT (count + INTVAL (XEXP (varop_inner, 1))));
9073 varop = gen_rtx_combine (TRUNCATE, GET_MODE (varop),
9074 varop_inner);
9075 count = 0;
9076 continue;
9077 }
9078 break;
e9a25f70
JL
9079
9080 default:
9081 break;
230d793d
RS
9082 }
9083
9084 break;
9085 }
9086
9087 /* We need to determine what mode to do the shift in. If the shift is
f6789c77
RK
9088 a right shift or ROTATE, we must always do it in the mode it was
9089 originally done in. Otherwise, we can do it in MODE, the widest mode
9090 encountered. The code we care about is that of the shift that will
9091 actually be done, not the shift that was originally requested. */
9092 shift_mode
9093 = (code == ASHIFTRT || code == LSHIFTRT || code == ROTATE
9094 ? result_mode : mode);
230d793d
RS
9095
9096 /* We have now finished analyzing the shift. The result should be
9097 a shift of type CODE with SHIFT_MODE shifting VAROP COUNT places. If
9098 OUTER_OP is non-NIL, it is an operation that needs to be applied
9099 to the result of the shift. OUTER_CONST is the relevant constant,
9100 but we must turn off all bits turned off in the shift.
9101
9102 If we were passed a value for X, see if we can use any pieces of
9103 it. If not, make new rtx. */
9104
9105 if (x && GET_RTX_CLASS (GET_CODE (x)) == '2'
9106 && GET_CODE (XEXP (x, 1)) == CONST_INT
9107 && INTVAL (XEXP (x, 1)) == count)
9108 const_rtx = XEXP (x, 1);
9109 else
5f4f0e22 9110 const_rtx = GEN_INT (count);
230d793d
RS
9111
9112 if (x && GET_CODE (XEXP (x, 0)) == SUBREG
9113 && GET_MODE (XEXP (x, 0)) == shift_mode
9114 && SUBREG_REG (XEXP (x, 0)) == varop)
9115 varop = XEXP (x, 0);
9116 else if (GET_MODE (varop) != shift_mode)
9117 varop = gen_lowpart_for_combine (shift_mode, varop);
9118
0f41302f 9119 /* If we can't make the SUBREG, try to return what we were given. */
230d793d
RS
9120 if (GET_CODE (varop) == CLOBBER)
9121 return x ? x : varop;
9122
9123 new = simplify_binary_operation (code, shift_mode, varop, const_rtx);
9124 if (new != 0)
9125 x = new;
9126 else
9127 {
9128 if (x == 0 || GET_CODE (x) != code || GET_MODE (x) != shift_mode)
9129 x = gen_rtx_combine (code, shift_mode, varop, const_rtx);
9130
9131 SUBST (XEXP (x, 0), varop);
9132 SUBST (XEXP (x, 1), const_rtx);
9133 }
9134
224eeff2
RK
9135 /* If we have an outer operation and we just made a shift, it is
9136 possible that we could have simplified the shift were it not
9137 for the outer operation. So try to do the simplification
9138 recursively. */
9139
9140 if (outer_op != NIL && GET_CODE (x) == code
9141 && GET_CODE (XEXP (x, 1)) == CONST_INT)
9142 x = simplify_shift_const (x, code, shift_mode, XEXP (x, 0),
9143 INTVAL (XEXP (x, 1)));
9144
230d793d
RS
9145 /* If we were doing a LSHIFTRT in a wider mode than it was originally,
9146 turn off all the bits that the shift would have turned off. */
9147 if (orig_code == LSHIFTRT && result_mode != shift_mode)
5f4f0e22 9148 x = simplify_and_const_int (NULL_RTX, shift_mode, x,
230d793d
RS
9149 GET_MODE_MASK (result_mode) >> orig_count);
9150
9151 /* Do the remainder of the processing in RESULT_MODE. */
9152 x = gen_lowpart_for_combine (result_mode, x);
9153
9154 /* If COMPLEMENT_P is set, we have to complement X before doing the outer
9155 operation. */
9156 if (complement_p)
0c1c8ea6 9157 x = gen_unary (NOT, result_mode, result_mode, x);
230d793d
RS
9158
9159 if (outer_op != NIL)
9160 {
5f4f0e22 9161 if (GET_MODE_BITSIZE (result_mode) < HOST_BITS_PER_WIDE_INT)
7e4ce834 9162 outer_const = trunc_int_for_mode (outer_const, result_mode);
230d793d
RS
9163
9164 if (outer_op == AND)
5f4f0e22 9165 x = simplify_and_const_int (NULL_RTX, result_mode, x, outer_const);
230d793d
RS
9166 else if (outer_op == SET)
9167 /* This means that we have determined that the result is
9168 equivalent to a constant. This should be rare. */
5f4f0e22 9169 x = GEN_INT (outer_const);
230d793d 9170 else if (GET_RTX_CLASS (outer_op) == '1')
0c1c8ea6 9171 x = gen_unary (outer_op, result_mode, result_mode, x);
230d793d 9172 else
5f4f0e22 9173 x = gen_binary (outer_op, result_mode, x, GEN_INT (outer_const));
230d793d
RS
9174 }
9175
9176 return x;
9177}
9178\f
9179/* Like recog, but we receive the address of a pointer to a new pattern.
9180 We try to match the rtx that the pointer points to.
9181 If that fails, we may try to modify or replace the pattern,
9182 storing the replacement into the same pointer object.
9183
9184 Modifications include deletion or addition of CLOBBERs.
9185
9186 PNOTES is a pointer to a location where any REG_UNUSED notes added for
9187 the CLOBBERs are placed.
9188
9189 The value is the final insn code from the pattern ultimately matched,
9190 or -1. */
9191
9192static int
8e2f6e35 9193recog_for_combine (pnewpat, insn, pnotes)
230d793d
RS
9194 rtx *pnewpat;
9195 rtx insn;
9196 rtx *pnotes;
9197{
9198 register rtx pat = *pnewpat;
9199 int insn_code_number;
9200 int num_clobbers_to_add = 0;
9201 int i;
9202 rtx notes = 0;
9203
974f4146
RK
9204 /* If PAT is a PARALLEL, check to see if it contains the CLOBBER
9205 we use to indicate that something didn't match. If we find such a
9206 thing, force rejection. */
d96023cf 9207 if (GET_CODE (pat) == PARALLEL)
974f4146 9208 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
d96023cf
RK
9209 if (GET_CODE (XVECEXP (pat, 0, i)) == CLOBBER
9210 && XEXP (XVECEXP (pat, 0, i), 0) == const0_rtx)
974f4146
RK
9211 return -1;
9212
230d793d
RS
9213 /* Is the result of combination a valid instruction? */
9214 insn_code_number = recog (pat, insn, &num_clobbers_to_add);
9215
9216 /* If it isn't, there is the possibility that we previously had an insn
9217 that clobbered some register as a side effect, but the combined
9218 insn doesn't need to do that. So try once more without the clobbers
9219 unless this represents an ASM insn. */
9220
9221 if (insn_code_number < 0 && ! check_asm_operands (pat)
9222 && GET_CODE (pat) == PARALLEL)
9223 {
9224 int pos;
9225
9226 for (pos = 0, i = 0; i < XVECLEN (pat, 0); i++)
9227 if (GET_CODE (XVECEXP (pat, 0, i)) != CLOBBER)
9228 {
9229 if (i != pos)
9230 SUBST (XVECEXP (pat, 0, pos), XVECEXP (pat, 0, i));
9231 pos++;
9232 }
9233
9234 SUBST_INT (XVECLEN (pat, 0), pos);
9235
9236 if (pos == 1)
9237 pat = XVECEXP (pat, 0, 0);
9238
9239 insn_code_number = recog (pat, insn, &num_clobbers_to_add);
9240 }
9241
9242 /* If we had any clobbers to add, make a new pattern than contains
9243 them. Then check to make sure that all of them are dead. */
9244 if (num_clobbers_to_add)
9245 {
38a448ca
RH
9246 rtx newpat = gen_rtx_PARALLEL (VOIDmode,
9247 gen_rtvec (GET_CODE (pat) == PARALLEL
c5c76735
JL
9248 ? (XVECLEN (pat, 0)
9249 + num_clobbers_to_add)
38a448ca 9250 : num_clobbers_to_add + 1));
230d793d
RS
9251
9252 if (GET_CODE (pat) == PARALLEL)
9253 for (i = 0; i < XVECLEN (pat, 0); i++)
9254 XVECEXP (newpat, 0, i) = XVECEXP (pat, 0, i);
9255 else
9256 XVECEXP (newpat, 0, 0) = pat;
9257
9258 add_clobbers (newpat, insn_code_number);
9259
9260 for (i = XVECLEN (newpat, 0) - num_clobbers_to_add;
9261 i < XVECLEN (newpat, 0); i++)
9262 {
9263 if (GET_CODE (XEXP (XVECEXP (newpat, 0, i), 0)) == REG
9264 && ! reg_dead_at_p (XEXP (XVECEXP (newpat, 0, i), 0), insn))
9265 return -1;
38a448ca
RH
9266 notes = gen_rtx_EXPR_LIST (REG_UNUSED,
9267 XEXP (XVECEXP (newpat, 0, i), 0), notes);
230d793d
RS
9268 }
9269 pat = newpat;
9270 }
9271
9272 *pnewpat = pat;
9273 *pnotes = notes;
9274
9275 return insn_code_number;
9276}
9277\f
9278/* Like gen_lowpart but for use by combine. In combine it is not possible
9279 to create any new pseudoregs. However, it is safe to create
9280 invalid memory addresses, because combine will try to recognize
9281 them and all they will do is make the combine attempt fail.
9282
9283 If for some reason this cannot do its job, an rtx
9284 (clobber (const_int 0)) is returned.
9285 An insn containing that will not be recognized. */
9286
9287#undef gen_lowpart
9288
9289static rtx
9290gen_lowpart_for_combine (mode, x)
9291 enum machine_mode mode;
9292 register rtx x;
9293{
9294 rtx result;
9295
9296 if (GET_MODE (x) == mode)
9297 return x;
9298
eae957a8
RK
9299 /* We can only support MODE being wider than a word if X is a
9300 constant integer or has a mode the same size. */
9301
9302 if (GET_MODE_SIZE (mode) > UNITS_PER_WORD
9303 && ! ((GET_MODE (x) == VOIDmode
9304 && (GET_CODE (x) == CONST_INT
9305 || GET_CODE (x) == CONST_DOUBLE))
9306 || GET_MODE_SIZE (GET_MODE (x)) == GET_MODE_SIZE (mode)))
38a448ca 9307 return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
230d793d
RS
9308
9309 /* X might be a paradoxical (subreg (mem)). In that case, gen_lowpart
9310 won't know what to do. So we will strip off the SUBREG here and
9311 process normally. */
9312 if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
9313 {
9314 x = SUBREG_REG (x);
9315 if (GET_MODE (x) == mode)
9316 return x;
9317 }
9318
9319 result = gen_lowpart_common (mode, x);
64bf47a2
RK
9320 if (result != 0
9321 && GET_CODE (result) == SUBREG
9322 && GET_CODE (SUBREG_REG (result)) == REG
9323 && REGNO (SUBREG_REG (result)) >= FIRST_PSEUDO_REGISTER
9324 && (GET_MODE_SIZE (GET_MODE (result))
9325 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (result)))))
b1f21e0a 9326 REG_CHANGES_SIZE (REGNO (SUBREG_REG (result))) = 1;
64bf47a2 9327
230d793d
RS
9328 if (result)
9329 return result;
9330
9331 if (GET_CODE (x) == MEM)
9332 {
9333 register int offset = 0;
9334 rtx new;
9335
9336 /* Refuse to work on a volatile memory ref or one with a mode-dependent
9337 address. */
9338 if (MEM_VOLATILE_P (x) || mode_dependent_address_p (XEXP (x, 0)))
38a448ca 9339 return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
230d793d
RS
9340
9341 /* If we want to refer to something bigger than the original memref,
9342 generate a perverse subreg instead. That will force a reload
9343 of the original memref X. */
9344 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode))
38a448ca 9345 return gen_rtx_SUBREG (mode, x, 0);
230d793d 9346
f76b9db2
ILT
9347 if (WORDS_BIG_ENDIAN)
9348 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
9349 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
c5c76735 9350
f76b9db2
ILT
9351 if (BYTES_BIG_ENDIAN)
9352 {
9353 /* Adjust the address so that the address-after-the-data is
9354 unchanged. */
9355 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
9356 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
9357 }
38a448ca 9358 new = gen_rtx_MEM (mode, plus_constant (XEXP (x, 0), offset));
230d793d 9359 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
c6df88cb 9360 MEM_COPY_ATTRIBUTES (new, x);
230d793d
RS
9361 return new;
9362 }
9363
9364 /* If X is a comparison operator, rewrite it in a new mode. This
9365 probably won't match, but may allow further simplifications. */
9366 else if (GET_RTX_CLASS (GET_CODE (x)) == '<')
9367 return gen_rtx_combine (GET_CODE (x), mode, XEXP (x, 0), XEXP (x, 1));
9368
9369 /* If we couldn't simplify X any other way, just enclose it in a
9370 SUBREG. Normally, this SUBREG won't match, but some patterns may
a7c99304 9371 include an explicit SUBREG or we may simplify it further in combine. */
230d793d 9372 else
dfbe1b2f
RK
9373 {
9374 int word = 0;
9375
9376 if (WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD)
9377 word = ((GET_MODE_SIZE (GET_MODE (x))
9378 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD))
9379 / UNITS_PER_WORD);
38a448ca 9380 return gen_rtx_SUBREG (mode, x, word);
dfbe1b2f 9381 }
230d793d
RS
9382}
9383\f
9384/* Make an rtx expression. This is a subset of gen_rtx and only supports
9385 expressions of 1, 2, or 3 operands, each of which are rtx expressions.
9386
9387 If the identical expression was previously in the insn (in the undobuf),
9388 it will be returned. Only if it is not found will a new expression
9389 be made. */
9390
9391/*VARARGS2*/
9392static rtx
4f90e4a0 9393gen_rtx_combine VPROTO((enum rtx_code code, enum machine_mode mode, ...))
230d793d 9394{
5148a72b 9395#ifndef ANSI_PROTOTYPES
230d793d
RS
9396 enum rtx_code code;
9397 enum machine_mode mode;
4f90e4a0
RK
9398#endif
9399 va_list p;
230d793d
RS
9400 int n_args;
9401 rtx args[3];
b729186a 9402 int j;
6f7d635c 9403 const char *fmt;
230d793d 9404 rtx rt;
241cea85 9405 struct undo *undo;
230d793d 9406
4f90e4a0
RK
9407 VA_START (p, mode);
9408
5148a72b 9409#ifndef ANSI_PROTOTYPES
230d793d
RS
9410 code = va_arg (p, enum rtx_code);
9411 mode = va_arg (p, enum machine_mode);
4f90e4a0
RK
9412#endif
9413
230d793d
RS
9414 n_args = GET_RTX_LENGTH (code);
9415 fmt = GET_RTX_FORMAT (code);
9416
9417 if (n_args == 0 || n_args > 3)
9418 abort ();
9419
9420 /* Get each arg and verify that it is supposed to be an expression. */
9421 for (j = 0; j < n_args; j++)
9422 {
9423 if (*fmt++ != 'e')
9424 abort ();
9425
9426 args[j] = va_arg (p, rtx);
9427 }
9428
f0305a2b
KG
9429 va_end (p);
9430
230d793d
RS
9431 /* See if this is in undobuf. Be sure we don't use objects that came
9432 from another insn; this could produce circular rtl structures. */
9433
241cea85
RK
9434 for (undo = undobuf.undos; undo != undobuf.previous_undos; undo = undo->next)
9435 if (!undo->is_int
9436 && GET_CODE (undo->old_contents.r) == code
9437 && GET_MODE (undo->old_contents.r) == mode)
230d793d
RS
9438 {
9439 for (j = 0; j < n_args; j++)
241cea85 9440 if (XEXP (undo->old_contents.r, j) != args[j])
230d793d
RS
9441 break;
9442
9443 if (j == n_args)
241cea85 9444 return undo->old_contents.r;
230d793d
RS
9445 }
9446
9447 /* Otherwise make a new rtx. We know we have 1, 2, or 3 args.
9448 Use rtx_alloc instead of gen_rtx because it's faster on RISC. */
9449 rt = rtx_alloc (code);
9450 PUT_MODE (rt, mode);
9451 XEXP (rt, 0) = args[0];
9452 if (n_args > 1)
9453 {
9454 XEXP (rt, 1) = args[1];
9455 if (n_args > 2)
9456 XEXP (rt, 2) = args[2];
9457 }
9458 return rt;
9459}
9460
9461/* These routines make binary and unary operations by first seeing if they
9462 fold; if not, a new expression is allocated. */
9463
9464static rtx
9465gen_binary (code, mode, op0, op1)
9466 enum rtx_code code;
9467 enum machine_mode mode;
9468 rtx op0, op1;
9469{
9470 rtx result;
1a26b032
RK
9471 rtx tem;
9472
9473 if (GET_RTX_CLASS (code) == 'c'
9474 && (GET_CODE (op0) == CONST_INT
9475 || (CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)))
9476 tem = op0, op0 = op1, op1 = tem;
230d793d
RS
9477
9478 if (GET_RTX_CLASS (code) == '<')
9479 {
9480 enum machine_mode op_mode = GET_MODE (op0);
9210df58
RK
9481
9482 /* Strip the COMPARE from (REL_OP (compare X Y) 0) to get
0f41302f 9483 just (REL_OP X Y). */
9210df58
RK
9484 if (GET_CODE (op0) == COMPARE && op1 == const0_rtx)
9485 {
9486 op1 = XEXP (op0, 1);
9487 op0 = XEXP (op0, 0);
9488 op_mode = GET_MODE (op0);
9489 }
9490
230d793d
RS
9491 if (op_mode == VOIDmode)
9492 op_mode = GET_MODE (op1);
9493 result = simplify_relational_operation (code, op_mode, op0, op1);
9494 }
9495 else
9496 result = simplify_binary_operation (code, mode, op0, op1);
9497
9498 if (result)
9499 return result;
9500
9501 /* Put complex operands first and constants second. */
9502 if (GET_RTX_CLASS (code) == 'c'
9503 && ((CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)
9504 || (GET_RTX_CLASS (GET_CODE (op0)) == 'o'
9505 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')
9506 || (GET_CODE (op0) == SUBREG
9507 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op0))) == 'o'
9508 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')))
9509 return gen_rtx_combine (code, mode, op1, op0);
9510
e5e809f4
JL
9511 /* If we are turning off bits already known off in OP0, we need not do
9512 an AND. */
9513 else if (code == AND && GET_CODE (op1) == CONST_INT
9514 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
9515 && (nonzero_bits (op0, mode) & ~ INTVAL (op1)) == 0)
9516 return op0;
9517
230d793d
RS
9518 return gen_rtx_combine (code, mode, op0, op1);
9519}
9520
9521static rtx
0c1c8ea6 9522gen_unary (code, mode, op0_mode, op0)
230d793d 9523 enum rtx_code code;
0c1c8ea6 9524 enum machine_mode mode, op0_mode;
230d793d
RS
9525 rtx op0;
9526{
0c1c8ea6 9527 rtx result = simplify_unary_operation (code, mode, op0, op0_mode);
230d793d
RS
9528
9529 if (result)
9530 return result;
9531
9532 return gen_rtx_combine (code, mode, op0);
9533}
9534\f
9535/* Simplify a comparison between *POP0 and *POP1 where CODE is the
9536 comparison code that will be tested.
9537
9538 The result is a possibly different comparison code to use. *POP0 and
9539 *POP1 may be updated.
9540
9541 It is possible that we might detect that a comparison is either always
9542 true or always false. However, we do not perform general constant
5089e22e 9543 folding in combine, so this knowledge isn't useful. Such tautologies
230d793d
RS
9544 should have been detected earlier. Hence we ignore all such cases. */
9545
9546static enum rtx_code
9547simplify_comparison (code, pop0, pop1)
9548 enum rtx_code code;
9549 rtx *pop0;
9550 rtx *pop1;
9551{
9552 rtx op0 = *pop0;
9553 rtx op1 = *pop1;
9554 rtx tem, tem1;
9555 int i;
9556 enum machine_mode mode, tmode;
9557
9558 /* Try a few ways of applying the same transformation to both operands. */
9559 while (1)
9560 {
3a19aabc
RK
9561#ifndef WORD_REGISTER_OPERATIONS
9562 /* The test below this one won't handle SIGN_EXTENDs on these machines,
9563 so check specially. */
9564 if (code != GTU && code != GEU && code != LTU && code != LEU
9565 && GET_CODE (op0) == ASHIFTRT && GET_CODE (op1) == ASHIFTRT
9566 && GET_CODE (XEXP (op0, 0)) == ASHIFT
9567 && GET_CODE (XEXP (op1, 0)) == ASHIFT
9568 && GET_CODE (XEXP (XEXP (op0, 0), 0)) == SUBREG
9569 && GET_CODE (XEXP (XEXP (op1, 0), 0)) == SUBREG
9570 && (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0)))
ad25ba17 9571 == GET_MODE (SUBREG_REG (XEXP (XEXP (op1, 0), 0))))
3a19aabc
RK
9572 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9573 && GET_CODE (XEXP (op1, 1)) == CONST_INT
9574 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
9575 && GET_CODE (XEXP (XEXP (op1, 0), 1)) == CONST_INT
9576 && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (op1, 1))
9577 && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (XEXP (op0, 0), 1))
9578 && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (XEXP (op1, 0), 1))
9579 && (INTVAL (XEXP (op0, 1))
9580 == (GET_MODE_BITSIZE (GET_MODE (op0))
9581 - (GET_MODE_BITSIZE
9582 (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0))))))))
9583 {
9584 op0 = SUBREG_REG (XEXP (XEXP (op0, 0), 0));
9585 op1 = SUBREG_REG (XEXP (XEXP (op1, 0), 0));
9586 }
9587#endif
9588
230d793d
RS
9589 /* If both operands are the same constant shift, see if we can ignore the
9590 shift. We can if the shift is a rotate or if the bits shifted out of
951553af 9591 this shift are known to be zero for both inputs and if the type of
230d793d 9592 comparison is compatible with the shift. */
67232b23
RK
9593 if (GET_CODE (op0) == GET_CODE (op1)
9594 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
9595 && ((GET_CODE (op0) == ROTATE && (code == NE || code == EQ))
45620ed4 9596 || ((GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFT)
67232b23
RK
9597 && (code != GT && code != LT && code != GE && code != LE))
9598 || (GET_CODE (op0) == ASHIFTRT
9599 && (code != GTU && code != LTU
9600 && code != GEU && code != GEU)))
9601 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9602 && INTVAL (XEXP (op0, 1)) >= 0
9603 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
9604 && XEXP (op0, 1) == XEXP (op1, 1))
230d793d
RS
9605 {
9606 enum machine_mode mode = GET_MODE (op0);
5f4f0e22 9607 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
230d793d
RS
9608 int shift_count = INTVAL (XEXP (op0, 1));
9609
9610 if (GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFTRT)
9611 mask &= (mask >> shift_count) << shift_count;
45620ed4 9612 else if (GET_CODE (op0) == ASHIFT)
230d793d
RS
9613 mask = (mask & (mask << shift_count)) >> shift_count;
9614
951553af
RK
9615 if ((nonzero_bits (XEXP (op0, 0), mode) & ~ mask) == 0
9616 && (nonzero_bits (XEXP (op1, 0), mode) & ~ mask) == 0)
230d793d
RS
9617 op0 = XEXP (op0, 0), op1 = XEXP (op1, 0);
9618 else
9619 break;
9620 }
9621
9622 /* If both operands are AND's of a paradoxical SUBREG by constant, the
9623 SUBREGs are of the same mode, and, in both cases, the AND would
9624 be redundant if the comparison was done in the narrower mode,
9625 do the comparison in the narrower mode (e.g., we are AND'ing with 1
951553af
RK
9626 and the operand's possibly nonzero bits are 0xffffff01; in that case
9627 if we only care about QImode, we don't need the AND). This case
9628 occurs if the output mode of an scc insn is not SImode and
7e4dc511
RK
9629 STORE_FLAG_VALUE == 1 (e.g., the 386).
9630
9631 Similarly, check for a case where the AND's are ZERO_EXTEND
9632 operations from some narrower mode even though a SUBREG is not
9633 present. */
230d793d
RS
9634
9635 else if (GET_CODE (op0) == AND && GET_CODE (op1) == AND
9636 && GET_CODE (XEXP (op0, 1)) == CONST_INT
7e4dc511 9637 && GET_CODE (XEXP (op1, 1)) == CONST_INT)
230d793d 9638 {
7e4dc511
RK
9639 rtx inner_op0 = XEXP (op0, 0);
9640 rtx inner_op1 = XEXP (op1, 0);
9641 HOST_WIDE_INT c0 = INTVAL (XEXP (op0, 1));
9642 HOST_WIDE_INT c1 = INTVAL (XEXP (op1, 1));
9643 int changed = 0;
9644
9645 if (GET_CODE (inner_op0) == SUBREG && GET_CODE (inner_op1) == SUBREG
9646 && (GET_MODE_SIZE (GET_MODE (inner_op0))
9647 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (inner_op0))))
9648 && (GET_MODE (SUBREG_REG (inner_op0))
9649 == GET_MODE (SUBREG_REG (inner_op1)))
729a2bc6 9650 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (inner_op0)))
7e4dc511 9651 <= HOST_BITS_PER_WIDE_INT)
01c82bbb 9652 && (0 == ((~c0) & nonzero_bits (SUBREG_REG (inner_op0),
729a2bc6 9653 GET_MODE (SUBREG_REG (inner_op0)))))
01c82bbb
RK
9654 && (0 == ((~c1) & nonzero_bits (SUBREG_REG (inner_op1),
9655 GET_MODE (SUBREG_REG (inner_op1))))))
7e4dc511
RK
9656 {
9657 op0 = SUBREG_REG (inner_op0);
9658 op1 = SUBREG_REG (inner_op1);
9659
9660 /* The resulting comparison is always unsigned since we masked
0f41302f 9661 off the original sign bit. */
7e4dc511
RK
9662 code = unsigned_condition (code);
9663
9664 changed = 1;
9665 }
230d793d 9666
7e4dc511
RK
9667 else if (c0 == c1)
9668 for (tmode = GET_CLASS_NARROWEST_MODE
9669 (GET_MODE_CLASS (GET_MODE (op0)));
9670 tmode != GET_MODE (op0); tmode = GET_MODE_WIDER_MODE (tmode))
e51712db 9671 if ((unsigned HOST_WIDE_INT) c0 == GET_MODE_MASK (tmode))
7e4dc511
RK
9672 {
9673 op0 = gen_lowpart_for_combine (tmode, inner_op0);
9674 op1 = gen_lowpart_for_combine (tmode, inner_op1);
66415c8b 9675 code = unsigned_condition (code);
7e4dc511
RK
9676 changed = 1;
9677 break;
9678 }
9679
9680 if (! changed)
9681 break;
230d793d 9682 }
3a19aabc 9683
ad25ba17
RK
9684 /* If both operands are NOT, we can strip off the outer operation
9685 and adjust the comparison code for swapped operands; similarly for
9686 NEG, except that this must be an equality comparison. */
9687 else if ((GET_CODE (op0) == NOT && GET_CODE (op1) == NOT)
9688 || (GET_CODE (op0) == NEG && GET_CODE (op1) == NEG
9689 && (code == EQ || code == NE)))
9690 op0 = XEXP (op0, 0), op1 = XEXP (op1, 0), code = swap_condition (code);
3a19aabc 9691
230d793d
RS
9692 else
9693 break;
9694 }
9695
9696 /* If the first operand is a constant, swap the operands and adjust the
3aceff0d
RK
9697 comparison code appropriately, but don't do this if the second operand
9698 is already a constant integer. */
9699 if (CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)
230d793d
RS
9700 {
9701 tem = op0, op0 = op1, op1 = tem;
9702 code = swap_condition (code);
9703 }
9704
9705 /* We now enter a loop during which we will try to simplify the comparison.
9706 For the most part, we only are concerned with comparisons with zero,
9707 but some things may really be comparisons with zero but not start
9708 out looking that way. */
9709
9710 while (GET_CODE (op1) == CONST_INT)
9711 {
9712 enum machine_mode mode = GET_MODE (op0);
9713 int mode_width = GET_MODE_BITSIZE (mode);
5f4f0e22 9714 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
230d793d
RS
9715 int equality_comparison_p;
9716 int sign_bit_comparison_p;
9717 int unsigned_comparison_p;
5f4f0e22 9718 HOST_WIDE_INT const_op;
230d793d
RS
9719
9720 /* We only want to handle integral modes. This catches VOIDmode,
9721 CCmode, and the floating-point modes. An exception is that we
9722 can handle VOIDmode if OP0 is a COMPARE or a comparison
9723 operation. */
9724
9725 if (GET_MODE_CLASS (mode) != MODE_INT
9726 && ! (mode == VOIDmode
9727 && (GET_CODE (op0) == COMPARE
9728 || GET_RTX_CLASS (GET_CODE (op0)) == '<')))
9729 break;
9730
9731 /* Get the constant we are comparing against and turn off all bits
9732 not on in our mode. */
9733 const_op = INTVAL (op1);
5f4f0e22 9734 if (mode_width <= HOST_BITS_PER_WIDE_INT)
4803a34a 9735 const_op &= mask;
230d793d
RS
9736
9737 /* If we are comparing against a constant power of two and the value
951553af 9738 being compared can only have that single bit nonzero (e.g., it was
230d793d
RS
9739 `and'ed with that bit), we can replace this with a comparison
9740 with zero. */
9741 if (const_op
9742 && (code == EQ || code == NE || code == GE || code == GEU
9743 || code == LT || code == LTU)
5f4f0e22 9744 && mode_width <= HOST_BITS_PER_WIDE_INT
230d793d 9745 && exact_log2 (const_op) >= 0
e51712db 9746 && nonzero_bits (op0, mode) == (unsigned HOST_WIDE_INT) const_op)
230d793d
RS
9747 {
9748 code = (code == EQ || code == GE || code == GEU ? NE : EQ);
9749 op1 = const0_rtx, const_op = 0;
9750 }
9751
d0ab8cd3
RK
9752 /* Similarly, if we are comparing a value known to be either -1 or
9753 0 with -1, change it to the opposite comparison against zero. */
9754
9755 if (const_op == -1
9756 && (code == EQ || code == NE || code == GT || code == LE
9757 || code == GEU || code == LTU)
9758 && num_sign_bit_copies (op0, mode) == mode_width)
9759 {
9760 code = (code == EQ || code == LE || code == GEU ? NE : EQ);
9761 op1 = const0_rtx, const_op = 0;
9762 }
9763
230d793d 9764 /* Do some canonicalizations based on the comparison code. We prefer
4803a34a
RK
9765 comparisons against zero and then prefer equality comparisons.
9766 If we can reduce the size of a constant, we will do that too. */
230d793d
RS
9767
9768 switch (code)
9769 {
9770 case LT:
4803a34a
RK
9771 /* < C is equivalent to <= (C - 1) */
9772 if (const_op > 0)
230d793d 9773 {
4803a34a 9774 const_op -= 1;
5f4f0e22 9775 op1 = GEN_INT (const_op);
230d793d
RS
9776 code = LE;
9777 /* ... fall through to LE case below. */
9778 }
9779 else
9780 break;
9781
9782 case LE:
4803a34a
RK
9783 /* <= C is equivalent to < (C + 1); we do this for C < 0 */
9784 if (const_op < 0)
9785 {
9786 const_op += 1;
5f4f0e22 9787 op1 = GEN_INT (const_op);
4803a34a
RK
9788 code = LT;
9789 }
230d793d
RS
9790
9791 /* If we are doing a <= 0 comparison on a value known to have
9792 a zero sign bit, we can replace this with == 0. */
9793 else if (const_op == 0
5f4f0e22 9794 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 9795 && (nonzero_bits (op0, mode)
5f4f0e22 9796 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
230d793d
RS
9797 code = EQ;
9798 break;
9799
9800 case GE:
0f41302f 9801 /* >= C is equivalent to > (C - 1). */
4803a34a 9802 if (const_op > 0)
230d793d 9803 {
4803a34a 9804 const_op -= 1;
5f4f0e22 9805 op1 = GEN_INT (const_op);
230d793d
RS
9806 code = GT;
9807 /* ... fall through to GT below. */
9808 }
9809 else
9810 break;
9811
9812 case GT:
4803a34a
RK
9813 /* > C is equivalent to >= (C + 1); we do this for C < 0*/
9814 if (const_op < 0)
9815 {
9816 const_op += 1;
5f4f0e22 9817 op1 = GEN_INT (const_op);
4803a34a
RK
9818 code = GE;
9819 }
230d793d
RS
9820
9821 /* If we are doing a > 0 comparison on a value known to have
9822 a zero sign bit, we can replace this with != 0. */
9823 else if (const_op == 0
5f4f0e22 9824 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 9825 && (nonzero_bits (op0, mode)
5f4f0e22 9826 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
230d793d
RS
9827 code = NE;
9828 break;
9829
230d793d 9830 case LTU:
4803a34a
RK
9831 /* < C is equivalent to <= (C - 1). */
9832 if (const_op > 0)
9833 {
9834 const_op -= 1;
5f4f0e22 9835 op1 = GEN_INT (const_op);
4803a34a 9836 code = LEU;
0f41302f 9837 /* ... fall through ... */
4803a34a 9838 }
d0ab8cd3
RK
9839
9840 /* (unsigned) < 0x80000000 is equivalent to >= 0. */
f77aada2
JW
9841 else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
9842 && (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1)))
d0ab8cd3
RK
9843 {
9844 const_op = 0, op1 = const0_rtx;
9845 code = GE;
9846 break;
9847 }
4803a34a
RK
9848 else
9849 break;
230d793d
RS
9850
9851 case LEU:
9852 /* unsigned <= 0 is equivalent to == 0 */
9853 if (const_op == 0)
9854 code = EQ;
d0ab8cd3 9855
0f41302f 9856 /* (unsigned) <= 0x7fffffff is equivalent to >= 0. */
f77aada2
JW
9857 else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
9858 && (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1))
d0ab8cd3
RK
9859 {
9860 const_op = 0, op1 = const0_rtx;
9861 code = GE;
9862 }
230d793d
RS
9863 break;
9864
4803a34a
RK
9865 case GEU:
9866 /* >= C is equivalent to < (C - 1). */
9867 if (const_op > 1)
9868 {
9869 const_op -= 1;
5f4f0e22 9870 op1 = GEN_INT (const_op);
4803a34a 9871 code = GTU;
0f41302f 9872 /* ... fall through ... */
4803a34a 9873 }
d0ab8cd3
RK
9874
9875 /* (unsigned) >= 0x80000000 is equivalent to < 0. */
f77aada2
JW
9876 else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
9877 && (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1)))
d0ab8cd3
RK
9878 {
9879 const_op = 0, op1 = const0_rtx;
9880 code = LT;
8b2e69e1 9881 break;
d0ab8cd3 9882 }
4803a34a
RK
9883 else
9884 break;
9885
230d793d
RS
9886 case GTU:
9887 /* unsigned > 0 is equivalent to != 0 */
9888 if (const_op == 0)
9889 code = NE;
d0ab8cd3
RK
9890
9891 /* (unsigned) > 0x7fffffff is equivalent to < 0. */
f77aada2
JW
9892 else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
9893 && (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1))
d0ab8cd3
RK
9894 {
9895 const_op = 0, op1 = const0_rtx;
9896 code = LT;
9897 }
230d793d 9898 break;
e9a25f70
JL
9899
9900 default:
9901 break;
230d793d
RS
9902 }
9903
9904 /* Compute some predicates to simplify code below. */
9905
9906 equality_comparison_p = (code == EQ || code == NE);
9907 sign_bit_comparison_p = ((code == LT || code == GE) && const_op == 0);
9908 unsigned_comparison_p = (code == LTU || code == LEU || code == GTU
9909 || code == LEU);
9910
6139ff20
RK
9911 /* If this is a sign bit comparison and we can do arithmetic in
9912 MODE, say that we will only be needing the sign bit of OP0. */
9913 if (sign_bit_comparison_p
9914 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
9915 op0 = force_to_mode (op0, mode,
9916 ((HOST_WIDE_INT) 1
9917 << (GET_MODE_BITSIZE (mode) - 1)),
e3d616e3 9918 NULL_RTX, 0);
6139ff20 9919
230d793d
RS
9920 /* Now try cases based on the opcode of OP0. If none of the cases
9921 does a "continue", we exit this loop immediately after the
9922 switch. */
9923
9924 switch (GET_CODE (op0))
9925 {
9926 case ZERO_EXTRACT:
9927 /* If we are extracting a single bit from a variable position in
9928 a constant that has only a single bit set and are comparing it
9929 with zero, we can convert this into an equality comparison
d7cd794f 9930 between the position and the location of the single bit. */
230d793d 9931
230d793d
RS
9932 if (GET_CODE (XEXP (op0, 0)) == CONST_INT
9933 && XEXP (op0, 1) == const1_rtx
9934 && equality_comparison_p && const_op == 0
d7cd794f 9935 && (i = exact_log2 (INTVAL (XEXP (op0, 0)))) >= 0)
230d793d 9936 {
f76b9db2 9937 if (BITS_BIG_ENDIAN)
0d8e55d8 9938 {
d7cd794f 9939#ifdef HAVE_extzv
a995e389 9940 mode = insn_data[(int) CODE_FOR_extzv].operand[1].mode;
0d8e55d8
JL
9941 if (mode == VOIDmode)
9942 mode = word_mode;
9943 i = (GET_MODE_BITSIZE (mode) - 1 - i);
d7cd794f 9944#else
0d8e55d8 9945 i = BITS_PER_WORD - 1 - i;
230d793d 9946#endif
0d8e55d8 9947 }
230d793d
RS
9948
9949 op0 = XEXP (op0, 2);
5f4f0e22 9950 op1 = GEN_INT (i);
230d793d
RS
9951 const_op = i;
9952
9953 /* Result is nonzero iff shift count is equal to I. */
9954 code = reverse_condition (code);
9955 continue;
9956 }
230d793d 9957
0f41302f 9958 /* ... fall through ... */
230d793d
RS
9959
9960 case SIGN_EXTRACT:
9961 tem = expand_compound_operation (op0);
9962 if (tem != op0)
9963 {
9964 op0 = tem;
9965 continue;
9966 }
9967 break;
9968
9969 case NOT:
9970 /* If testing for equality, we can take the NOT of the constant. */
9971 if (equality_comparison_p
9972 && (tem = simplify_unary_operation (NOT, mode, op1, mode)) != 0)
9973 {
9974 op0 = XEXP (op0, 0);
9975 op1 = tem;
9976 continue;
9977 }
9978
9979 /* If just looking at the sign bit, reverse the sense of the
9980 comparison. */
9981 if (sign_bit_comparison_p)
9982 {
9983 op0 = XEXP (op0, 0);
9984 code = (code == GE ? LT : GE);
9985 continue;
9986 }
9987 break;
9988
9989 case NEG:
9990 /* If testing for equality, we can take the NEG of the constant. */
9991 if (equality_comparison_p
9992 && (tem = simplify_unary_operation (NEG, mode, op1, mode)) != 0)
9993 {
9994 op0 = XEXP (op0, 0);
9995 op1 = tem;
9996 continue;
9997 }
9998
9999 /* The remaining cases only apply to comparisons with zero. */
10000 if (const_op != 0)
10001 break;
10002
10003 /* When X is ABS or is known positive,
10004 (neg X) is < 0 if and only if X != 0. */
10005
10006 if (sign_bit_comparison_p
10007 && (GET_CODE (XEXP (op0, 0)) == ABS
5f4f0e22 10008 || (mode_width <= HOST_BITS_PER_WIDE_INT
951553af 10009 && (nonzero_bits (XEXP (op0, 0), mode)
5f4f0e22 10010 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)))
230d793d
RS
10011 {
10012 op0 = XEXP (op0, 0);
10013 code = (code == LT ? NE : EQ);
10014 continue;
10015 }
10016
3bed8141 10017 /* If we have NEG of something whose two high-order bits are the
0f41302f 10018 same, we know that "(-a) < 0" is equivalent to "a > 0". */
3bed8141 10019 if (num_sign_bit_copies (op0, mode) >= 2)
230d793d
RS
10020 {
10021 op0 = XEXP (op0, 0);
10022 code = swap_condition (code);
10023 continue;
10024 }
10025 break;
10026
10027 case ROTATE:
10028 /* If we are testing equality and our count is a constant, we
10029 can perform the inverse operation on our RHS. */
10030 if (equality_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
10031 && (tem = simplify_binary_operation (ROTATERT, mode,
10032 op1, XEXP (op0, 1))) != 0)
10033 {
10034 op0 = XEXP (op0, 0);
10035 op1 = tem;
10036 continue;
10037 }
10038
10039 /* If we are doing a < 0 or >= 0 comparison, it means we are testing
10040 a particular bit. Convert it to an AND of a constant of that
10041 bit. This will be converted into a ZERO_EXTRACT. */
10042 if (const_op == 0 && sign_bit_comparison_p
10043 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5f4f0e22 10044 && mode_width <= HOST_BITS_PER_WIDE_INT)
230d793d 10045 {
5f4f0e22
CH
10046 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
10047 ((HOST_WIDE_INT) 1
10048 << (mode_width - 1
10049 - INTVAL (XEXP (op0, 1)))));
230d793d
RS
10050 code = (code == LT ? NE : EQ);
10051 continue;
10052 }
10053
0f41302f 10054 /* ... fall through ... */
230d793d
RS
10055
10056 case ABS:
10057 /* ABS is ignorable inside an equality comparison with zero. */
10058 if (const_op == 0 && equality_comparison_p)
10059 {
10060 op0 = XEXP (op0, 0);
10061 continue;
10062 }
10063 break;
10064
10065
10066 case SIGN_EXTEND:
10067 /* Can simplify (compare (zero/sign_extend FOO) CONST)
10068 to (compare FOO CONST) if CONST fits in FOO's mode and we
10069 are either testing inequality or have an unsigned comparison
10070 with ZERO_EXTEND or a signed comparison with SIGN_EXTEND. */
10071 if (! unsigned_comparison_p
10072 && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
5f4f0e22
CH
10073 <= HOST_BITS_PER_WIDE_INT)
10074 && ((unsigned HOST_WIDE_INT) const_op
e51712db 10075 < (((unsigned HOST_WIDE_INT) 1
5f4f0e22 10076 << (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0))) - 1)))))
230d793d
RS
10077 {
10078 op0 = XEXP (op0, 0);
10079 continue;
10080 }
10081 break;
10082
10083 case SUBREG:
a687e897 10084 /* Check for the case where we are comparing A - C1 with C2,
abc95ed3 10085 both constants are smaller than 1/2 the maximum positive
a687e897
RK
10086 value in MODE, and the comparison is equality or unsigned.
10087 In that case, if A is either zero-extended to MODE or has
10088 sufficient sign bits so that the high-order bit in MODE
10089 is a copy of the sign in the inner mode, we can prove that it is
10090 safe to do the operation in the wider mode. This simplifies
10091 many range checks. */
10092
10093 if (mode_width <= HOST_BITS_PER_WIDE_INT
10094 && subreg_lowpart_p (op0)
10095 && GET_CODE (SUBREG_REG (op0)) == PLUS
10096 && GET_CODE (XEXP (SUBREG_REG (op0), 1)) == CONST_INT
10097 && INTVAL (XEXP (SUBREG_REG (op0), 1)) < 0
10098 && (- INTVAL (XEXP (SUBREG_REG (op0), 1))
e51712db 10099 < (HOST_WIDE_INT)(GET_MODE_MASK (mode) / 2))
adb7a1cb 10100 && (unsigned HOST_WIDE_INT) const_op < GET_MODE_MASK (mode) / 2
951553af
RK
10101 && (0 == (nonzero_bits (XEXP (SUBREG_REG (op0), 0),
10102 GET_MODE (SUBREG_REG (op0)))
a687e897
RK
10103 & ~ GET_MODE_MASK (mode))
10104 || (num_sign_bit_copies (XEXP (SUBREG_REG (op0), 0),
10105 GET_MODE (SUBREG_REG (op0)))
10106 > (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
10107 - GET_MODE_BITSIZE (mode)))))
10108 {
10109 op0 = SUBREG_REG (op0);
10110 continue;
10111 }
10112
fe0cf571
RK
10113 /* If the inner mode is narrower and we are extracting the low part,
10114 we can treat the SUBREG as if it were a ZERO_EXTEND. */
10115 if (subreg_lowpart_p (op0)
89f1c7f2
RS
10116 && GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) < mode_width)
10117 /* Fall through */ ;
10118 else
230d793d
RS
10119 break;
10120
0f41302f 10121 /* ... fall through ... */
230d793d
RS
10122
10123 case ZERO_EXTEND:
10124 if ((unsigned_comparison_p || equality_comparison_p)
10125 && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
5f4f0e22
CH
10126 <= HOST_BITS_PER_WIDE_INT)
10127 && ((unsigned HOST_WIDE_INT) const_op
230d793d
RS
10128 < GET_MODE_MASK (GET_MODE (XEXP (op0, 0)))))
10129 {
10130 op0 = XEXP (op0, 0);
10131 continue;
10132 }
10133 break;
10134
10135 case PLUS:
20fdd649 10136 /* (eq (plus X A) B) -> (eq X (minus B A)). We can only do
5089e22e 10137 this for equality comparisons due to pathological cases involving
230d793d 10138 overflows. */
20fdd649
RK
10139 if (equality_comparison_p
10140 && 0 != (tem = simplify_binary_operation (MINUS, mode,
10141 op1, XEXP (op0, 1))))
230d793d
RS
10142 {
10143 op0 = XEXP (op0, 0);
10144 op1 = tem;
10145 continue;
10146 }
10147
10148 /* (plus (abs X) (const_int -1)) is < 0 if and only if X == 0. */
10149 if (const_op == 0 && XEXP (op0, 1) == constm1_rtx
10150 && GET_CODE (XEXP (op0, 0)) == ABS && sign_bit_comparison_p)
10151 {
10152 op0 = XEXP (XEXP (op0, 0), 0);
10153 code = (code == LT ? EQ : NE);
10154 continue;
10155 }
10156 break;
10157
10158 case MINUS:
20fdd649
RK
10159 /* (eq (minus A B) C) -> (eq A (plus B C)) or
10160 (eq B (minus A C)), whichever simplifies. We can only do
10161 this for equality comparisons due to pathological cases involving
10162 overflows. */
10163 if (equality_comparison_p
10164 && 0 != (tem = simplify_binary_operation (PLUS, mode,
10165 XEXP (op0, 1), op1)))
10166 {
10167 op0 = XEXP (op0, 0);
10168 op1 = tem;
10169 continue;
10170 }
10171
10172 if (equality_comparison_p
10173 && 0 != (tem = simplify_binary_operation (MINUS, mode,
10174 XEXP (op0, 0), op1)))
10175 {
10176 op0 = XEXP (op0, 1);
10177 op1 = tem;
10178 continue;
10179 }
10180
230d793d
RS
10181 /* The sign bit of (minus (ashiftrt X C) X), where C is the number
10182 of bits in X minus 1, is one iff X > 0. */
10183 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == ASHIFTRT
10184 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
10185 && INTVAL (XEXP (XEXP (op0, 0), 1)) == mode_width - 1
10186 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
10187 {
10188 op0 = XEXP (op0, 1);
10189 code = (code == GE ? LE : GT);
10190 continue;
10191 }
10192 break;
10193
10194 case XOR:
10195 /* (eq (xor A B) C) -> (eq A (xor B C)). This is a simplification
10196 if C is zero or B is a constant. */
10197 if (equality_comparison_p
10198 && 0 != (tem = simplify_binary_operation (XOR, mode,
10199 XEXP (op0, 1), op1)))
10200 {
10201 op0 = XEXP (op0, 0);
10202 op1 = tem;
10203 continue;
10204 }
10205 break;
10206
10207 case EQ: case NE:
10208 case LT: case LTU: case LE: case LEU:
10209 case GT: case GTU: case GE: case GEU:
10210 /* We can't do anything if OP0 is a condition code value, rather
10211 than an actual data value. */
10212 if (const_op != 0
10213#ifdef HAVE_cc0
10214 || XEXP (op0, 0) == cc0_rtx
10215#endif
10216 || GET_MODE_CLASS (GET_MODE (XEXP (op0, 0))) == MODE_CC)
10217 break;
10218
10219 /* Get the two operands being compared. */
10220 if (GET_CODE (XEXP (op0, 0)) == COMPARE)
10221 tem = XEXP (XEXP (op0, 0), 0), tem1 = XEXP (XEXP (op0, 0), 1);
10222 else
10223 tem = XEXP (op0, 0), tem1 = XEXP (op0, 1);
10224
10225 /* Check for the cases where we simply want the result of the
10226 earlier test or the opposite of that result. */
10227 if (code == NE
10228 || (code == EQ && reversible_comparison_p (op0))
5f4f0e22 10229 || (GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
3f508eca 10230 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
230d793d 10231 && (STORE_FLAG_VALUE
5f4f0e22
CH
10232 & (((HOST_WIDE_INT) 1
10233 << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
230d793d
RS
10234 && (code == LT
10235 || (code == GE && reversible_comparison_p (op0)))))
10236 {
10237 code = (code == LT || code == NE
10238 ? GET_CODE (op0) : reverse_condition (GET_CODE (op0)));
10239 op0 = tem, op1 = tem1;
10240 continue;
10241 }
10242 break;
10243
10244 case IOR:
10245 /* The sign bit of (ior (plus X (const_int -1)) X) is non-zero
10246 iff X <= 0. */
10247 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == PLUS
10248 && XEXP (XEXP (op0, 0), 1) == constm1_rtx
10249 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
10250 {
10251 op0 = XEXP (op0, 1);
10252 code = (code == GE ? GT : LE);
10253 continue;
10254 }
10255 break;
10256
10257 case AND:
10258 /* Convert (and (xshift 1 X) Y) to (and (lshiftrt Y X) 1). This
10259 will be converted to a ZERO_EXTRACT later. */
10260 if (const_op == 0 && equality_comparison_p
45620ed4 10261 && GET_CODE (XEXP (op0, 0)) == ASHIFT
230d793d
RS
10262 && XEXP (XEXP (op0, 0), 0) == const1_rtx)
10263 {
10264 op0 = simplify_and_const_int
10265 (op0, mode, gen_rtx_combine (LSHIFTRT, mode,
10266 XEXP (op0, 1),
10267 XEXP (XEXP (op0, 0), 1)),
5f4f0e22 10268 (HOST_WIDE_INT) 1);
230d793d
RS
10269 continue;
10270 }
10271
10272 /* If we are comparing (and (lshiftrt X C1) C2) for equality with
10273 zero and X is a comparison and C1 and C2 describe only bits set
10274 in STORE_FLAG_VALUE, we can compare with X. */
10275 if (const_op == 0 && equality_comparison_p
5f4f0e22 10276 && mode_width <= HOST_BITS_PER_WIDE_INT
230d793d
RS
10277 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10278 && GET_CODE (XEXP (op0, 0)) == LSHIFTRT
10279 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
10280 && INTVAL (XEXP (XEXP (op0, 0), 1)) >= 0
5f4f0e22 10281 && INTVAL (XEXP (XEXP (op0, 0), 1)) < HOST_BITS_PER_WIDE_INT)
230d793d
RS
10282 {
10283 mask = ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
10284 << INTVAL (XEXP (XEXP (op0, 0), 1)));
10285 if ((~ STORE_FLAG_VALUE & mask) == 0
10286 && (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (op0, 0), 0))) == '<'
10287 || ((tem = get_last_value (XEXP (XEXP (op0, 0), 0))) != 0
10288 && GET_RTX_CLASS (GET_CODE (tem)) == '<')))
10289 {
10290 op0 = XEXP (XEXP (op0, 0), 0);
10291 continue;
10292 }
10293 }
10294
10295 /* If we are doing an equality comparison of an AND of a bit equal
10296 to the sign bit, replace this with a LT or GE comparison of
10297 the underlying value. */
10298 if (equality_comparison_p
10299 && const_op == 0
10300 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5f4f0e22 10301 && mode_width <= HOST_BITS_PER_WIDE_INT
230d793d 10302 && ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
e51712db 10303 == (unsigned HOST_WIDE_INT) 1 << (mode_width - 1)))
230d793d
RS
10304 {
10305 op0 = XEXP (op0, 0);
10306 code = (code == EQ ? GE : LT);
10307 continue;
10308 }
10309
10310 /* If this AND operation is really a ZERO_EXTEND from a narrower
10311 mode, the constant fits within that mode, and this is either an
10312 equality or unsigned comparison, try to do this comparison in
10313 the narrower mode. */
10314 if ((equality_comparison_p || unsigned_comparison_p)
10315 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10316 && (i = exact_log2 ((INTVAL (XEXP (op0, 1))
10317 & GET_MODE_MASK (mode))
10318 + 1)) >= 0
10319 && const_op >> i == 0
10320 && (tmode = mode_for_size (i, MODE_INT, 1)) != BLKmode)
10321 {
10322 op0 = gen_lowpart_for_combine (tmode, XEXP (op0, 0));
10323 continue;
10324 }
e5e809f4
JL
10325
10326 /* If this is (and:M1 (subreg:M2 X 0) (const_int C1)) where C1 fits
10327 in both M1 and M2 and the SUBREG is either paradoxical or
10328 represents the low part, permute the SUBREG and the AND and
10329 try again. */
10330 if (GET_CODE (XEXP (op0, 0)) == SUBREG
c5c76735 10331 && (0
9ec36da5 10332#ifdef WORD_REGISTER_OPERATIONS
c5c76735
JL
10333 || ((mode_width
10334 > (GET_MODE_BITSIZE
10335 (GET_MODE (SUBREG_REG (XEXP (op0, 0))))))
10336 && mode_width <= BITS_PER_WORD)
9ec36da5 10337#endif
c5c76735
JL
10338 || ((mode_width
10339 <= (GET_MODE_BITSIZE
10340 (GET_MODE (SUBREG_REG (XEXP (op0, 0))))))
10341 && subreg_lowpart_p (XEXP (op0, 0))))
adc05e6c
JL
10342#ifndef WORD_REGISTER_OPERATIONS
10343 /* It is unsafe to commute the AND into the SUBREG if the SUBREG
10344 is paradoxical and WORD_REGISTER_OPERATIONS is not defined.
10345 As originally written the upper bits have a defined value
10346 due to the AND operation. However, if we commute the AND
10347 inside the SUBREG then they no longer have defined values
10348 and the meaning of the code has been changed. */
10349 && (GET_MODE_SIZE (GET_MODE (XEXP (op0, 0)))
10350 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (op0, 0)))))
10351#endif
e5e809f4
JL
10352 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10353 && mode_width <= HOST_BITS_PER_WIDE_INT
10354 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (XEXP (op0, 0))))
10355 <= HOST_BITS_PER_WIDE_INT)
10356 && (INTVAL (XEXP (op0, 1)) & ~ mask) == 0
10357 && 0 == (~ GET_MODE_MASK (GET_MODE (SUBREG_REG (XEXP (op0, 0))))
9ec36da5 10358 & INTVAL (XEXP (op0, 1)))
e51712db
KG
10359 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (op0, 1)) != mask
10360 && ((unsigned HOST_WIDE_INT) INTVAL (XEXP (op0, 1))
9ec36da5 10361 != GET_MODE_MASK (GET_MODE (SUBREG_REG (XEXP (op0, 0))))))
e5e809f4
JL
10362
10363 {
10364 op0
10365 = gen_lowpart_for_combine
10366 (mode,
10367 gen_binary (AND, GET_MODE (SUBREG_REG (XEXP (op0, 0))),
10368 SUBREG_REG (XEXP (op0, 0)), XEXP (op0, 1)));
10369 continue;
10370 }
10371
230d793d
RS
10372 break;
10373
10374 case ASHIFT:
45620ed4 10375 /* If we have (compare (ashift FOO N) (const_int C)) and
230d793d 10376 the high order N bits of FOO (N+1 if an inequality comparison)
951553af 10377 are known to be zero, we can do this by comparing FOO with C
230d793d
RS
10378 shifted right N bits so long as the low-order N bits of C are
10379 zero. */
10380 if (GET_CODE (XEXP (op0, 1)) == CONST_INT
10381 && INTVAL (XEXP (op0, 1)) >= 0
10382 && ((INTVAL (XEXP (op0, 1)) + ! equality_comparison_p)
5f4f0e22
CH
10383 < HOST_BITS_PER_WIDE_INT)
10384 && ((const_op
34785d05 10385 & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0)
5f4f0e22 10386 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 10387 && (nonzero_bits (XEXP (op0, 0), mode)
230d793d
RS
10388 & ~ (mask >> (INTVAL (XEXP (op0, 1))
10389 + ! equality_comparison_p))) == 0)
10390 {
7ce787fe
NC
10391 /* We must perform a logical shift, not an arithmetic one,
10392 as we want the top N bits of C to be zero. */
aaaec114 10393 unsigned HOST_WIDE_INT temp = const_op & GET_MODE_MASK (mode);
7ce787fe
NC
10394
10395 temp >>= INTVAL (XEXP (op0, 1));
aaaec114 10396 op1 = GEN_INT (trunc_int_for_mode (temp, mode));
230d793d
RS
10397 op0 = XEXP (op0, 0);
10398 continue;
10399 }
10400
dfbe1b2f 10401 /* If we are doing a sign bit comparison, it means we are testing
230d793d 10402 a particular bit. Convert it to the appropriate AND. */
dfbe1b2f 10403 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
5f4f0e22 10404 && mode_width <= HOST_BITS_PER_WIDE_INT)
230d793d 10405 {
5f4f0e22
CH
10406 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
10407 ((HOST_WIDE_INT) 1
10408 << (mode_width - 1
10409 - INTVAL (XEXP (op0, 1)))));
230d793d
RS
10410 code = (code == LT ? NE : EQ);
10411 continue;
10412 }
dfbe1b2f
RK
10413
10414 /* If this an equality comparison with zero and we are shifting
10415 the low bit to the sign bit, we can convert this to an AND of the
10416 low-order bit. */
10417 if (const_op == 0 && equality_comparison_p
10418 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10419 && INTVAL (XEXP (op0, 1)) == mode_width - 1)
10420 {
5f4f0e22
CH
10421 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
10422 (HOST_WIDE_INT) 1);
dfbe1b2f
RK
10423 continue;
10424 }
230d793d
RS
10425 break;
10426
10427 case ASHIFTRT:
d0ab8cd3
RK
10428 /* If this is an equality comparison with zero, we can do this
10429 as a logical shift, which might be much simpler. */
10430 if (equality_comparison_p && const_op == 0
10431 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
10432 {
10433 op0 = simplify_shift_const (NULL_RTX, LSHIFTRT, mode,
10434 XEXP (op0, 0),
10435 INTVAL (XEXP (op0, 1)));
10436 continue;
10437 }
10438
230d793d
RS
10439 /* If OP0 is a sign extension and CODE is not an unsigned comparison,
10440 do the comparison in a narrower mode. */
10441 if (! unsigned_comparison_p
10442 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10443 && GET_CODE (XEXP (op0, 0)) == ASHIFT
10444 && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1)
10445 && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)),
22331794 10446 MODE_INT, 1)) != BLKmode
5f4f0e22
CH
10447 && ((unsigned HOST_WIDE_INT) const_op <= GET_MODE_MASK (tmode)
10448 || ((unsigned HOST_WIDE_INT) - const_op
10449 <= GET_MODE_MASK (tmode))))
230d793d
RS
10450 {
10451 op0 = gen_lowpart_for_combine (tmode, XEXP (XEXP (op0, 0), 0));
10452 continue;
10453 }
10454
0f41302f 10455 /* ... fall through ... */
230d793d
RS
10456 case LSHIFTRT:
10457 /* If we have (compare (xshiftrt FOO N) (const_int C)) and
951553af 10458 the low order N bits of FOO are known to be zero, we can do this
230d793d
RS
10459 by comparing FOO with C shifted left N bits so long as no
10460 overflow occurs. */
10461 if (GET_CODE (XEXP (op0, 1)) == CONST_INT
10462 && INTVAL (XEXP (op0, 1)) >= 0
5f4f0e22
CH
10463 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
10464 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 10465 && (nonzero_bits (XEXP (op0, 0), mode)
5f4f0e22 10466 & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0
230d793d
RS
10467 && (const_op == 0
10468 || (floor_log2 (const_op) + INTVAL (XEXP (op0, 1))
10469 < mode_width)))
10470 {
10471 const_op <<= INTVAL (XEXP (op0, 1));
5f4f0e22 10472 op1 = GEN_INT (const_op);
230d793d
RS
10473 op0 = XEXP (op0, 0);
10474 continue;
10475 }
10476
10477 /* If we are using this shift to extract just the sign bit, we
10478 can replace this with an LT or GE comparison. */
10479 if (const_op == 0
10480 && (equality_comparison_p || sign_bit_comparison_p)
10481 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10482 && INTVAL (XEXP (op0, 1)) == mode_width - 1)
10483 {
10484 op0 = XEXP (op0, 0);
10485 code = (code == NE || code == GT ? LT : GE);
10486 continue;
10487 }
10488 break;
e9a25f70
JL
10489
10490 default:
10491 break;
230d793d
RS
10492 }
10493
10494 break;
10495 }
10496
10497 /* Now make any compound operations involved in this comparison. Then,
76d31c63 10498 check for an outmost SUBREG on OP0 that is not doing anything or is
230d793d
RS
10499 paradoxical. The latter case can only occur when it is known that the
10500 "extra" bits will be zero. Therefore, it is safe to remove the SUBREG.
10501 We can never remove a SUBREG for a non-equality comparison because the
10502 sign bit is in a different place in the underlying object. */
10503
10504 op0 = make_compound_operation (op0, op1 == const0_rtx ? COMPARE : SET);
10505 op1 = make_compound_operation (op1, SET);
10506
10507 if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
10508 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
10509 && (code == NE || code == EQ)
10510 && ((GET_MODE_SIZE (GET_MODE (op0))
10511 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0))))))
10512 {
10513 op0 = SUBREG_REG (op0);
10514 op1 = gen_lowpart_for_combine (GET_MODE (op0), op1);
10515 }
10516
10517 else if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
10518 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
10519 && (code == NE || code == EQ)
ac49a949
RS
10520 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
10521 <= HOST_BITS_PER_WIDE_INT)
951553af 10522 && (nonzero_bits (SUBREG_REG (op0), GET_MODE (SUBREG_REG (op0)))
230d793d
RS
10523 & ~ GET_MODE_MASK (GET_MODE (op0))) == 0
10524 && (tem = gen_lowpart_for_combine (GET_MODE (SUBREG_REG (op0)),
10525 op1),
951553af 10526 (nonzero_bits (tem, GET_MODE (SUBREG_REG (op0)))
230d793d
RS
10527 & ~ GET_MODE_MASK (GET_MODE (op0))) == 0))
10528 op0 = SUBREG_REG (op0), op1 = tem;
10529
10530 /* We now do the opposite procedure: Some machines don't have compare
10531 insns in all modes. If OP0's mode is an integer mode smaller than a
10532 word and we can't do a compare in that mode, see if there is a larger
a687e897
RK
10533 mode for which we can do the compare. There are a number of cases in
10534 which we can use the wider mode. */
230d793d
RS
10535
10536 mode = GET_MODE (op0);
10537 if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT
10538 && GET_MODE_SIZE (mode) < UNITS_PER_WORD
10539 && cmp_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
10540 for (tmode = GET_MODE_WIDER_MODE (mode);
5f4f0e22
CH
10541 (tmode != VOIDmode
10542 && GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT);
230d793d 10543 tmode = GET_MODE_WIDER_MODE (tmode))
a687e897 10544 if (cmp_optab->handlers[(int) tmode].insn_code != CODE_FOR_nothing)
230d793d 10545 {
951553af 10546 /* If the only nonzero bits in OP0 and OP1 are those in the
a687e897
RK
10547 narrower mode and this is an equality or unsigned comparison,
10548 we can use the wider mode. Similarly for sign-extended
7e4dc511 10549 values, in which case it is true for all comparisons. */
a687e897
RK
10550 if (((code == EQ || code == NE
10551 || code == GEU || code == GTU || code == LEU || code == LTU)
951553af
RK
10552 && (nonzero_bits (op0, tmode) & ~ GET_MODE_MASK (mode)) == 0
10553 && (nonzero_bits (op1, tmode) & ~ GET_MODE_MASK (mode)) == 0)
7e4dc511
RK
10554 || ((num_sign_bit_copies (op0, tmode)
10555 > GET_MODE_BITSIZE (tmode) - GET_MODE_BITSIZE (mode))
a687e897 10556 && (num_sign_bit_copies (op1, tmode)
58744483 10557 > GET_MODE_BITSIZE (tmode) - GET_MODE_BITSIZE (mode))))
a687e897
RK
10558 {
10559 op0 = gen_lowpart_for_combine (tmode, op0);
10560 op1 = gen_lowpart_for_combine (tmode, op1);
10561 break;
10562 }
230d793d 10563
a687e897
RK
10564 /* If this is a test for negative, we can make an explicit
10565 test of the sign bit. */
10566
10567 if (op1 == const0_rtx && (code == LT || code == GE)
10568 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
230d793d 10569 {
a687e897
RK
10570 op0 = gen_binary (AND, tmode,
10571 gen_lowpart_for_combine (tmode, op0),
5f4f0e22
CH
10572 GEN_INT ((HOST_WIDE_INT) 1
10573 << (GET_MODE_BITSIZE (mode) - 1)));
230d793d 10574 code = (code == LT) ? NE : EQ;
a687e897 10575 break;
230d793d 10576 }
230d793d
RS
10577 }
10578
b7a775b2
RK
10579#ifdef CANONICALIZE_COMPARISON
10580 /* If this machine only supports a subset of valid comparisons, see if we
10581 can convert an unsupported one into a supported one. */
10582 CANONICALIZE_COMPARISON (code, op0, op1);
10583#endif
10584
230d793d
RS
10585 *pop0 = op0;
10586 *pop1 = op1;
10587
10588 return code;
10589}
10590\f
10591/* Return 1 if we know that X, a comparison operation, is not operating
10592 on a floating-point value or is EQ or NE, meaning that we can safely
10593 reverse it. */
10594
10595static int
10596reversible_comparison_p (x)
10597 rtx x;
10598{
10599 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
7e2a0d8e 10600 || flag_fast_math
230d793d
RS
10601 || GET_CODE (x) == NE || GET_CODE (x) == EQ)
10602 return 1;
10603
10604 switch (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))))
10605 {
10606 case MODE_INT:
3ad2180a
RK
10607 case MODE_PARTIAL_INT:
10608 case MODE_COMPLEX_INT:
230d793d
RS
10609 return 1;
10610
10611 case MODE_CC:
9210df58
RK
10612 /* If the mode of the condition codes tells us that this is safe,
10613 we need look no further. */
10614 if (REVERSIBLE_CC_MODE (GET_MODE (XEXP (x, 0))))
10615 return 1;
10616
10617 /* Otherwise try and find where the condition codes were last set and
10618 use that. */
230d793d
RS
10619 x = get_last_value (XEXP (x, 0));
10620 return (x && GET_CODE (x) == COMPARE
3ad2180a 10621 && ! FLOAT_MODE_P (GET_MODE (XEXP (x, 0))));
e9a25f70
JL
10622
10623 default:
10624 return 0;
230d793d 10625 }
230d793d
RS
10626}
10627\f
10628/* Utility function for following routine. Called when X is part of a value
10629 being stored into reg_last_set_value. Sets reg_last_set_table_tick
10630 for each register mentioned. Similar to mention_regs in cse.c */
10631
10632static void
10633update_table_tick (x)
10634 rtx x;
10635{
10636 register enum rtx_code code = GET_CODE (x);
6f7d635c 10637 register const char *fmt = GET_RTX_FORMAT (code);
230d793d
RS
10638 register int i;
10639
10640 if (code == REG)
10641 {
10642 int regno = REGNO (x);
10643 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
10644 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
10645
10646 for (i = regno; i < endregno; i++)
10647 reg_last_set_table_tick[i] = label_tick;
10648
10649 return;
10650 }
10651
10652 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
10653 /* Note that we can't have an "E" in values stored; see
10654 get_last_value_validate. */
10655 if (fmt[i] == 'e')
10656 update_table_tick (XEXP (x, i));
10657}
10658
10659/* Record that REG is set to VALUE in insn INSN. If VALUE is zero, we
10660 are saying that the register is clobbered and we no longer know its
7988fd36
RK
10661 value. If INSN is zero, don't update reg_last_set; this is only permitted
10662 with VALUE also zero and is used to invalidate the register. */
230d793d
RS
10663
10664static void
10665record_value_for_reg (reg, insn, value)
10666 rtx reg;
10667 rtx insn;
10668 rtx value;
10669{
10670 int regno = REGNO (reg);
10671 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
10672 ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1);
10673 int i;
10674
10675 /* If VALUE contains REG and we have a previous value for REG, substitute
10676 the previous value. */
10677 if (value && insn && reg_overlap_mentioned_p (reg, value))
10678 {
10679 rtx tem;
10680
10681 /* Set things up so get_last_value is allowed to see anything set up to
10682 our insn. */
10683 subst_low_cuid = INSN_CUID (insn);
10684 tem = get_last_value (reg);
10685
10686 if (tem)
10687 value = replace_rtx (copy_rtx (value), reg, tem);
10688 }
10689
10690 /* For each register modified, show we don't know its value, that
ef026f91
RS
10691 we don't know about its bitwise content, that its value has been
10692 updated, and that we don't know the location of the death of the
10693 register. */
230d793d
RS
10694 for (i = regno; i < endregno; i ++)
10695 {
10696 if (insn)
10697 reg_last_set[i] = insn;
10698 reg_last_set_value[i] = 0;
ef026f91
RS
10699 reg_last_set_mode[i] = 0;
10700 reg_last_set_nonzero_bits[i] = 0;
10701 reg_last_set_sign_bit_copies[i] = 0;
230d793d
RS
10702 reg_last_death[i] = 0;
10703 }
10704
10705 /* Mark registers that are being referenced in this value. */
10706 if (value)
10707 update_table_tick (value);
10708
10709 /* Now update the status of each register being set.
10710 If someone is using this register in this block, set this register
10711 to invalid since we will get confused between the two lives in this
10712 basic block. This makes using this register always invalid. In cse, we
10713 scan the table to invalidate all entries using this register, but this
10714 is too much work for us. */
10715
10716 for (i = regno; i < endregno; i++)
10717 {
10718 reg_last_set_label[i] = label_tick;
10719 if (value && reg_last_set_table_tick[i] == label_tick)
10720 reg_last_set_invalid[i] = 1;
10721 else
10722 reg_last_set_invalid[i] = 0;
10723 }
10724
10725 /* The value being assigned might refer to X (like in "x++;"). In that
10726 case, we must replace it with (clobber (const_int 0)) to prevent
10727 infinite loops. */
9a893315 10728 if (value && ! get_last_value_validate (&value, insn,
230d793d
RS
10729 reg_last_set_label[regno], 0))
10730 {
10731 value = copy_rtx (value);
9a893315
JW
10732 if (! get_last_value_validate (&value, insn,
10733 reg_last_set_label[regno], 1))
230d793d
RS
10734 value = 0;
10735 }
10736
55310dad
RK
10737 /* For the main register being modified, update the value, the mode, the
10738 nonzero bits, and the number of sign bit copies. */
10739
230d793d
RS
10740 reg_last_set_value[regno] = value;
10741
55310dad
RK
10742 if (value)
10743 {
2afabb48 10744 subst_low_cuid = INSN_CUID (insn);
55310dad
RK
10745 reg_last_set_mode[regno] = GET_MODE (reg);
10746 reg_last_set_nonzero_bits[regno] = nonzero_bits (value, GET_MODE (reg));
10747 reg_last_set_sign_bit_copies[regno]
10748 = num_sign_bit_copies (value, GET_MODE (reg));
10749 }
230d793d
RS
10750}
10751
10752/* Used for communication between the following two routines. */
10753static rtx record_dead_insn;
10754
10755/* Called via note_stores from record_dead_and_set_regs to handle one
10756 SET or CLOBBER in an insn. */
10757
10758static void
10759record_dead_and_set_regs_1 (dest, setter)
10760 rtx dest, setter;
10761{
ca89d290
RK
10762 if (GET_CODE (dest) == SUBREG)
10763 dest = SUBREG_REG (dest);
10764
230d793d
RS
10765 if (GET_CODE (dest) == REG)
10766 {
10767 /* If we are setting the whole register, we know its value. Otherwise
10768 show that we don't know the value. We can handle SUBREG in
10769 some cases. */
10770 if (GET_CODE (setter) == SET && dest == SET_DEST (setter))
10771 record_value_for_reg (dest, record_dead_insn, SET_SRC (setter));
10772 else if (GET_CODE (setter) == SET
10773 && GET_CODE (SET_DEST (setter)) == SUBREG
10774 && SUBREG_REG (SET_DEST (setter)) == dest
90bf8081 10775 && GET_MODE_BITSIZE (GET_MODE (dest)) <= BITS_PER_WORD
230d793d 10776 && subreg_lowpart_p (SET_DEST (setter)))
d0ab8cd3
RK
10777 record_value_for_reg (dest, record_dead_insn,
10778 gen_lowpart_for_combine (GET_MODE (dest),
10779 SET_SRC (setter)));
230d793d 10780 else
5f4f0e22 10781 record_value_for_reg (dest, record_dead_insn, NULL_RTX);
230d793d
RS
10782 }
10783 else if (GET_CODE (dest) == MEM
10784 /* Ignore pushes, they clobber nothing. */
10785 && ! push_operand (dest, GET_MODE (dest)))
10786 mem_last_set = INSN_CUID (record_dead_insn);
10787}
10788
10789/* Update the records of when each REG was most recently set or killed
10790 for the things done by INSN. This is the last thing done in processing
10791 INSN in the combiner loop.
10792
ef026f91
RS
10793 We update reg_last_set, reg_last_set_value, reg_last_set_mode,
10794 reg_last_set_nonzero_bits, reg_last_set_sign_bit_copies, reg_last_death,
10795 and also the similar information mem_last_set (which insn most recently
10796 modified memory) and last_call_cuid (which insn was the most recent
10797 subroutine call). */
230d793d
RS
10798
10799static void
10800record_dead_and_set_regs (insn)
10801 rtx insn;
10802{
10803 register rtx link;
55310dad
RK
10804 int i;
10805
230d793d
RS
10806 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
10807 {
dbc131f3
RK
10808 if (REG_NOTE_KIND (link) == REG_DEAD
10809 && GET_CODE (XEXP (link, 0)) == REG)
10810 {
10811 int regno = REGNO (XEXP (link, 0));
10812 int endregno
10813 = regno + (regno < FIRST_PSEUDO_REGISTER
10814 ? HARD_REGNO_NREGS (regno, GET_MODE (XEXP (link, 0)))
10815 : 1);
dbc131f3
RK
10816
10817 for (i = regno; i < endregno; i++)
10818 reg_last_death[i] = insn;
10819 }
230d793d 10820 else if (REG_NOTE_KIND (link) == REG_INC)
5f4f0e22 10821 record_value_for_reg (XEXP (link, 0), insn, NULL_RTX);
230d793d
RS
10822 }
10823
10824 if (GET_CODE (insn) == CALL_INSN)
55310dad
RK
10825 {
10826 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
10827 if (call_used_regs[i])
10828 {
10829 reg_last_set_value[i] = 0;
ef026f91
RS
10830 reg_last_set_mode[i] = 0;
10831 reg_last_set_nonzero_bits[i] = 0;
10832 reg_last_set_sign_bit_copies[i] = 0;
55310dad
RK
10833 reg_last_death[i] = 0;
10834 }
10835
10836 last_call_cuid = mem_last_set = INSN_CUID (insn);
10837 }
230d793d
RS
10838
10839 record_dead_insn = insn;
10840 note_stores (PATTERN (insn), record_dead_and_set_regs_1);
10841}
10842\f
10843/* Utility routine for the following function. Verify that all the registers
10844 mentioned in *LOC are valid when *LOC was part of a value set when
10845 label_tick == TICK. Return 0 if some are not.
10846
10847 If REPLACE is non-zero, replace the invalid reference with
10848 (clobber (const_int 0)) and return 1. This replacement is useful because
10849 we often can get useful information about the form of a value (e.g., if
10850 it was produced by a shift that always produces -1 or 0) even though
10851 we don't know exactly what registers it was produced from. */
10852
10853static int
9a893315 10854get_last_value_validate (loc, insn, tick, replace)
230d793d 10855 rtx *loc;
9a893315 10856 rtx insn;
230d793d
RS
10857 int tick;
10858 int replace;
10859{
10860 rtx x = *loc;
6f7d635c 10861 const char *fmt = GET_RTX_FORMAT (GET_CODE (x));
230d793d
RS
10862 int len = GET_RTX_LENGTH (GET_CODE (x));
10863 int i;
10864
10865 if (GET_CODE (x) == REG)
10866 {
10867 int regno = REGNO (x);
10868 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
10869 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
10870 int j;
10871
10872 for (j = regno; j < endregno; j++)
10873 if (reg_last_set_invalid[j]
57cf50a4
GRK
10874 /* If this is a pseudo-register that was only set once and not
10875 live at the beginning of the function, it is always valid. */
10876 || (! (regno >= FIRST_PSEUDO_REGISTER
10877 && REG_N_SETS (regno) == 1
10878 && ! REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno))
230d793d
RS
10879 && reg_last_set_label[j] > tick))
10880 {
10881 if (replace)
38a448ca 10882 *loc = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
230d793d
RS
10883 return replace;
10884 }
10885
10886 return 1;
10887 }
9a893315
JW
10888 /* If this is a memory reference, make sure that there were
10889 no stores after it that might have clobbered the value. We don't
10890 have alias info, so we assume any store invalidates it. */
10891 else if (GET_CODE (x) == MEM && ! RTX_UNCHANGING_P (x)
10892 && INSN_CUID (insn) <= mem_last_set)
10893 {
10894 if (replace)
38a448ca 10895 *loc = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
9a893315
JW
10896 return replace;
10897 }
230d793d
RS
10898
10899 for (i = 0; i < len; i++)
10900 if ((fmt[i] == 'e'
9a893315 10901 && get_last_value_validate (&XEXP (x, i), insn, tick, replace) == 0)
230d793d
RS
10902 /* Don't bother with these. They shouldn't occur anyway. */
10903 || fmt[i] == 'E')
10904 return 0;
10905
10906 /* If we haven't found a reason for it to be invalid, it is valid. */
10907 return 1;
10908}
10909
10910/* Get the last value assigned to X, if known. Some registers
10911 in the value may be replaced with (clobber (const_int 0)) if their value
10912 is known longer known reliably. */
10913
10914static rtx
10915get_last_value (x)
10916 rtx x;
10917{
10918 int regno;
10919 rtx value;
10920
10921 /* If this is a non-paradoxical SUBREG, get the value of its operand and
10922 then convert it to the desired mode. If this is a paradoxical SUBREG,
0f41302f 10923 we cannot predict what values the "extra" bits might have. */
230d793d
RS
10924 if (GET_CODE (x) == SUBREG
10925 && subreg_lowpart_p (x)
10926 && (GET_MODE_SIZE (GET_MODE (x))
10927 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
10928 && (value = get_last_value (SUBREG_REG (x))) != 0)
10929 return gen_lowpart_for_combine (GET_MODE (x), value);
10930
10931 if (GET_CODE (x) != REG)
10932 return 0;
10933
10934 regno = REGNO (x);
10935 value = reg_last_set_value[regno];
10936
57cf50a4
GRK
10937 /* If we don't have a value, or if it isn't for this basic block and
10938 it's either a hard register, set more than once, or it's a live
10939 at the beginning of the function, return 0.
10940
10941 Because if it's not live at the beginnning of the function then the reg
10942 is always set before being used (is never used without being set).
10943 And, if it's set only once, and it's always set before use, then all
10944 uses must have the same last value, even if it's not from this basic
10945 block. */
230d793d
RS
10946
10947 if (value == 0
57cf50a4
GRK
10948 || (reg_last_set_label[regno] != label_tick
10949 && (regno < FIRST_PSEUDO_REGISTER
10950 || REG_N_SETS (regno) != 1
10951 || REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno))))
230d793d
RS
10952 return 0;
10953
4255220d 10954 /* If the value was set in a later insn than the ones we are processing,
4090a6b3
RK
10955 we can't use it even if the register was only set once, but make a quick
10956 check to see if the previous insn set it to something. This is commonly
0d9641d1
JW
10957 the case when the same pseudo is used by repeated insns.
10958
10959 This does not work if there exists an instruction which is temporarily
10960 not on the insn chain. */
d0ab8cd3 10961
bcd49eb7 10962 if (INSN_CUID (reg_last_set[regno]) >= subst_low_cuid)
d0ab8cd3
RK
10963 {
10964 rtx insn, set;
10965
6c1b3bf2
BS
10966 /* We can't do anything if the value is set in between the insns we are
10967 processing. */
10968 if (INSN_CUID (reg_last_set[regno]) <= INSN_CUID (subst_insn))
10969 return 0;
10970
bcd49eb7
JW
10971 /* We can not do anything useful in this case, because there is
10972 an instruction which is not on the insn chain. */
10973 if (subst_prev_insn)
10974 return 0;
10975
4255220d
JW
10976 /* Skip over USE insns. They are not useful here, and they may have
10977 been made by combine, in which case they do not have a INSN_CUID
d6c80562 10978 value. We can't use prev_real_insn, because that would incorrectly
e340018d
JW
10979 take us backwards across labels. Skip over BARRIERs also, since
10980 they could have been made by combine. If we see one, we must be
10981 optimizing dead code, so it doesn't matter what we do. */
d6c80562
JW
10982 for (insn = prev_nonnote_insn (subst_insn);
10983 insn && ((GET_CODE (insn) == INSN
10984 && GET_CODE (PATTERN (insn)) == USE)
e340018d 10985 || GET_CODE (insn) == BARRIER
4255220d 10986 || INSN_CUID (insn) >= subst_low_cuid);
d6c80562 10987 insn = prev_nonnote_insn (insn))
3adde2a5 10988 ;
d0ab8cd3
RK
10989
10990 if (insn
10991 && (set = single_set (insn)) != 0
10992 && rtx_equal_p (SET_DEST (set), x))
10993 {
10994 value = SET_SRC (set);
10995
10996 /* Make sure that VALUE doesn't reference X. Replace any
ddd5a7c1 10997 explicit references with a CLOBBER. If there are any remaining
d0ab8cd3
RK
10998 references (rare), don't use the value. */
10999
11000 if (reg_mentioned_p (x, value))
11001 value = replace_rtx (copy_rtx (value), x,
38a448ca 11002 gen_rtx_CLOBBER (GET_MODE (x), const0_rtx));
d0ab8cd3
RK
11003
11004 if (reg_overlap_mentioned_p (x, value))
11005 return 0;
11006 }
11007 else
11008 return 0;
11009 }
11010
11011 /* If the value has all its registers valid, return it. */
9a893315
JW
11012 if (get_last_value_validate (&value, reg_last_set[regno],
11013 reg_last_set_label[regno], 0))
230d793d
RS
11014 return value;
11015
11016 /* Otherwise, make a copy and replace any invalid register with
11017 (clobber (const_int 0)). If that fails for some reason, return 0. */
11018
11019 value = copy_rtx (value);
9a893315
JW
11020 if (get_last_value_validate (&value, reg_last_set[regno],
11021 reg_last_set_label[regno], 1))
230d793d
RS
11022 return value;
11023
11024 return 0;
11025}
11026\f
11027/* Return nonzero if expression X refers to a REG or to memory
11028 that is set in an instruction more recent than FROM_CUID. */
11029
11030static int
11031use_crosses_set_p (x, from_cuid)
11032 register rtx x;
11033 int from_cuid;
11034{
6f7d635c 11035 register const char *fmt;
230d793d
RS
11036 register int i;
11037 register enum rtx_code code = GET_CODE (x);
11038
11039 if (code == REG)
11040 {
11041 register int regno = REGNO (x);
e28f5732
RK
11042 int endreg = regno + (regno < FIRST_PSEUDO_REGISTER
11043 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
11044
230d793d
RS
11045#ifdef PUSH_ROUNDING
11046 /* Don't allow uses of the stack pointer to be moved,
11047 because we don't know whether the move crosses a push insn. */
11048 if (regno == STACK_POINTER_REGNUM)
11049 return 1;
11050#endif
e28f5732
RK
11051 for (;regno < endreg; regno++)
11052 if (reg_last_set[regno]
11053 && INSN_CUID (reg_last_set[regno]) > from_cuid)
11054 return 1;
11055 return 0;
230d793d
RS
11056 }
11057
11058 if (code == MEM && mem_last_set > from_cuid)
11059 return 1;
11060
11061 fmt = GET_RTX_FORMAT (code);
11062
11063 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
11064 {
11065 if (fmt[i] == 'E')
11066 {
11067 register int j;
11068 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
11069 if (use_crosses_set_p (XVECEXP (x, i, j), from_cuid))
11070 return 1;
11071 }
11072 else if (fmt[i] == 'e'
11073 && use_crosses_set_p (XEXP (x, i), from_cuid))
11074 return 1;
11075 }
11076 return 0;
11077}
11078\f
11079/* Define three variables used for communication between the following
11080 routines. */
11081
11082static int reg_dead_regno, reg_dead_endregno;
11083static int reg_dead_flag;
11084
11085/* Function called via note_stores from reg_dead_at_p.
11086
ddd5a7c1 11087 If DEST is within [reg_dead_regno, reg_dead_endregno), set
230d793d
RS
11088 reg_dead_flag to 1 if X is a CLOBBER and to -1 it is a SET. */
11089
11090static void
11091reg_dead_at_p_1 (dest, x)
11092 rtx dest;
11093 rtx x;
11094{
11095 int regno, endregno;
11096
11097 if (GET_CODE (dest) != REG)
11098 return;
11099
11100 regno = REGNO (dest);
11101 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
11102 ? HARD_REGNO_NREGS (regno, GET_MODE (dest)) : 1);
11103
11104 if (reg_dead_endregno > regno && reg_dead_regno < endregno)
11105 reg_dead_flag = (GET_CODE (x) == CLOBBER) ? 1 : -1;
11106}
11107
11108/* Return non-zero if REG is known to be dead at INSN.
11109
11110 We scan backwards from INSN. If we hit a REG_DEAD note or a CLOBBER
11111 referencing REG, it is dead. If we hit a SET referencing REG, it is
11112 live. Otherwise, see if it is live or dead at the start of the basic
6e25d159
RK
11113 block we are in. Hard regs marked as being live in NEWPAT_USED_REGS
11114 must be assumed to be always live. */
230d793d
RS
11115
11116static int
11117reg_dead_at_p (reg, insn)
11118 rtx reg;
11119 rtx insn;
11120{
11121 int block, i;
11122
11123 /* Set variables for reg_dead_at_p_1. */
11124 reg_dead_regno = REGNO (reg);
11125 reg_dead_endregno = reg_dead_regno + (reg_dead_regno < FIRST_PSEUDO_REGISTER
11126 ? HARD_REGNO_NREGS (reg_dead_regno,
11127 GET_MODE (reg))
11128 : 1);
11129
11130 reg_dead_flag = 0;
11131
6e25d159
RK
11132 /* Check that reg isn't mentioned in NEWPAT_USED_REGS. */
11133 if (reg_dead_regno < FIRST_PSEUDO_REGISTER)
11134 {
11135 for (i = reg_dead_regno; i < reg_dead_endregno; i++)
11136 if (TEST_HARD_REG_BIT (newpat_used_regs, i))
11137 return 0;
11138 }
11139
230d793d
RS
11140 /* Scan backwards until we find a REG_DEAD note, SET, CLOBBER, label, or
11141 beginning of function. */
60715d0b 11142 for (; insn && GET_CODE (insn) != CODE_LABEL && GET_CODE (insn) != BARRIER;
230d793d
RS
11143 insn = prev_nonnote_insn (insn))
11144 {
11145 note_stores (PATTERN (insn), reg_dead_at_p_1);
11146 if (reg_dead_flag)
11147 return reg_dead_flag == 1 ? 1 : 0;
11148
11149 if (find_regno_note (insn, REG_DEAD, reg_dead_regno))
11150 return 1;
11151 }
11152
11153 /* Get the basic block number that we were in. */
11154 if (insn == 0)
11155 block = 0;
11156 else
11157 {
11158 for (block = 0; block < n_basic_blocks; block++)
3b413743 11159 if (insn == BLOCK_HEAD (block))
230d793d
RS
11160 break;
11161
11162 if (block == n_basic_blocks)
11163 return 0;
11164 }
11165
11166 for (i = reg_dead_regno; i < reg_dead_endregno; i++)
e881bb1b 11167 if (REGNO_REG_SET_P (BASIC_BLOCK (block)->global_live_at_start, i))
230d793d
RS
11168 return 0;
11169
11170 return 1;
11171}
6e25d159
RK
11172\f
11173/* Note hard registers in X that are used. This code is similar to
11174 that in flow.c, but much simpler since we don't care about pseudos. */
11175
11176static void
11177mark_used_regs_combine (x)
11178 rtx x;
11179{
11180 register RTX_CODE code = GET_CODE (x);
11181 register int regno;
11182 int i;
11183
11184 switch (code)
11185 {
11186 case LABEL_REF:
11187 case SYMBOL_REF:
11188 case CONST_INT:
11189 case CONST:
11190 case CONST_DOUBLE:
11191 case PC:
11192 case ADDR_VEC:
11193 case ADDR_DIFF_VEC:
11194 case ASM_INPUT:
11195#ifdef HAVE_cc0
11196 /* CC0 must die in the insn after it is set, so we don't need to take
11197 special note of it here. */
11198 case CC0:
11199#endif
11200 return;
11201
11202 case CLOBBER:
11203 /* If we are clobbering a MEM, mark any hard registers inside the
11204 address as used. */
11205 if (GET_CODE (XEXP (x, 0)) == MEM)
11206 mark_used_regs_combine (XEXP (XEXP (x, 0), 0));
11207 return;
11208
11209 case REG:
11210 regno = REGNO (x);
11211 /* A hard reg in a wide mode may really be multiple registers.
11212 If so, mark all of them just like the first. */
11213 if (regno < FIRST_PSEUDO_REGISTER)
11214 {
11215 /* None of this applies to the stack, frame or arg pointers */
11216 if (regno == STACK_POINTER_REGNUM
11217#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
11218 || regno == HARD_FRAME_POINTER_REGNUM
11219#endif
11220#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
11221 || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
11222#endif
11223 || regno == FRAME_POINTER_REGNUM)
11224 return;
11225
11226 i = HARD_REGNO_NREGS (regno, GET_MODE (x));
11227 while (i-- > 0)
11228 SET_HARD_REG_BIT (newpat_used_regs, regno + i);
11229 }
11230 return;
11231
11232 case SET:
11233 {
11234 /* If setting a MEM, or a SUBREG of a MEM, then note any hard regs in
11235 the address. */
11236 register rtx testreg = SET_DEST (x);
11237
e048778f
RK
11238 while (GET_CODE (testreg) == SUBREG
11239 || GET_CODE (testreg) == ZERO_EXTRACT
11240 || GET_CODE (testreg) == SIGN_EXTRACT
11241 || GET_CODE (testreg) == STRICT_LOW_PART)
6e25d159
RK
11242 testreg = XEXP (testreg, 0);
11243
11244 if (GET_CODE (testreg) == MEM)
11245 mark_used_regs_combine (XEXP (testreg, 0));
11246
11247 mark_used_regs_combine (SET_SRC (x));
6e25d159 11248 }
e9a25f70
JL
11249 return;
11250
11251 default:
11252 break;
6e25d159
RK
11253 }
11254
11255 /* Recursively scan the operands of this expression. */
11256
11257 {
6f7d635c 11258 register const char *fmt = GET_RTX_FORMAT (code);
6e25d159
RK
11259
11260 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
11261 {
11262 if (fmt[i] == 'e')
11263 mark_used_regs_combine (XEXP (x, i));
11264 else if (fmt[i] == 'E')
11265 {
11266 register int j;
11267
11268 for (j = 0; j < XVECLEN (x, i); j++)
11269 mark_used_regs_combine (XVECEXP (x, i, j));
11270 }
11271 }
11272 }
11273}
11274
230d793d
RS
11275\f
11276/* Remove register number REGNO from the dead registers list of INSN.
11277
11278 Return the note used to record the death, if there was one. */
11279
11280rtx
11281remove_death (regno, insn)
11282 int regno;
11283 rtx insn;
11284{
11285 register rtx note = find_regno_note (insn, REG_DEAD, regno);
11286
11287 if (note)
1a26b032 11288 {
b1f21e0a 11289 REG_N_DEATHS (regno)--;
1a26b032
RK
11290 remove_note (insn, note);
11291 }
230d793d
RS
11292
11293 return note;
11294}
11295
11296/* For each register (hardware or pseudo) used within expression X, if its
11297 death is in an instruction with cuid between FROM_CUID (inclusive) and
11298 TO_INSN (exclusive), put a REG_DEAD note for that register in the
11299 list headed by PNOTES.
11300
6eb12cef
RK
11301 That said, don't move registers killed by maybe_kill_insn.
11302
230d793d
RS
11303 This is done when X is being merged by combination into TO_INSN. These
11304 notes will then be distributed as needed. */
11305
11306static void
6eb12cef 11307move_deaths (x, maybe_kill_insn, from_cuid, to_insn, pnotes)
230d793d 11308 rtx x;
6eb12cef 11309 rtx maybe_kill_insn;
230d793d
RS
11310 int from_cuid;
11311 rtx to_insn;
11312 rtx *pnotes;
11313{
6f7d635c 11314 register const char *fmt;
230d793d
RS
11315 register int len, i;
11316 register enum rtx_code code = GET_CODE (x);
11317
11318 if (code == REG)
11319 {
11320 register int regno = REGNO (x);
11321 register rtx where_dead = reg_last_death[regno];
e340018d
JW
11322 register rtx before_dead, after_dead;
11323
6eb12cef
RK
11324 /* Don't move the register if it gets killed in between from and to */
11325 if (maybe_kill_insn && reg_set_p (x, maybe_kill_insn)
11326 && !reg_referenced_p (x, maybe_kill_insn))
11327 return;
11328
e340018d
JW
11329 /* WHERE_DEAD could be a USE insn made by combine, so first we
11330 make sure that we have insns with valid INSN_CUID values. */
11331 before_dead = where_dead;
11332 while (before_dead && INSN_UID (before_dead) > max_uid_cuid)
11333 before_dead = PREV_INSN (before_dead);
11334 after_dead = where_dead;
11335 while (after_dead && INSN_UID (after_dead) > max_uid_cuid)
11336 after_dead = NEXT_INSN (after_dead);
11337
11338 if (before_dead && after_dead
11339 && INSN_CUID (before_dead) >= from_cuid
11340 && (INSN_CUID (after_dead) < INSN_CUID (to_insn)
11341 || (where_dead != after_dead
11342 && INSN_CUID (after_dead) == INSN_CUID (to_insn))))
230d793d 11343 {
dbc131f3 11344 rtx note = remove_death (regno, where_dead);
230d793d
RS
11345
11346 /* It is possible for the call above to return 0. This can occur
11347 when reg_last_death points to I2 or I1 that we combined with.
dbc131f3
RK
11348 In that case make a new note.
11349
11350 We must also check for the case where X is a hard register
11351 and NOTE is a death note for a range of hard registers
11352 including X. In that case, we must put REG_DEAD notes for
11353 the remaining registers in place of NOTE. */
11354
11355 if (note != 0 && regno < FIRST_PSEUDO_REGISTER
11356 && (GET_MODE_SIZE (GET_MODE (XEXP (note, 0)))
24e46fc4 11357 > GET_MODE_SIZE (GET_MODE (x))))
dbc131f3
RK
11358 {
11359 int deadregno = REGNO (XEXP (note, 0));
11360 int deadend
11361 = (deadregno + HARD_REGNO_NREGS (deadregno,
11362 GET_MODE (XEXP (note, 0))));
11363 int ourend = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
11364 int i;
11365
11366 for (i = deadregno; i < deadend; i++)
11367 if (i < regno || i >= ourend)
11368 REG_NOTES (where_dead)
38a448ca
RH
11369 = gen_rtx_EXPR_LIST (REG_DEAD,
11370 gen_rtx_REG (reg_raw_mode[i], i),
11371 REG_NOTES (where_dead));
dbc131f3 11372 }
24e46fc4
JW
11373 /* If we didn't find any note, or if we found a REG_DEAD note that
11374 covers only part of the given reg, and we have a multi-reg hard
fabd69e8
RK
11375 register, then to be safe we must check for REG_DEAD notes
11376 for each register other than the first. They could have
11377 their own REG_DEAD notes lying around. */
24e46fc4
JW
11378 else if ((note == 0
11379 || (note != 0
11380 && (GET_MODE_SIZE (GET_MODE (XEXP (note, 0)))
11381 < GET_MODE_SIZE (GET_MODE (x)))))
11382 && regno < FIRST_PSEUDO_REGISTER
fabd69e8
RK
11383 && HARD_REGNO_NREGS (regno, GET_MODE (x)) > 1)
11384 {
11385 int ourend = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
24e46fc4 11386 int i, offset;
fabd69e8
RK
11387 rtx oldnotes = 0;
11388
24e46fc4
JW
11389 if (note)
11390 offset = HARD_REGNO_NREGS (regno, GET_MODE (XEXP (note, 0)));
11391 else
11392 offset = 1;
11393
11394 for (i = regno + offset; i < ourend; i++)
38a448ca 11395 move_deaths (gen_rtx_REG (reg_raw_mode[i], i),
6eb12cef 11396 maybe_kill_insn, from_cuid, to_insn, &oldnotes);
fabd69e8 11397 }
230d793d 11398
dbc131f3 11399 if (note != 0 && GET_MODE (XEXP (note, 0)) == GET_MODE (x))
230d793d
RS
11400 {
11401 XEXP (note, 1) = *pnotes;
11402 *pnotes = note;
11403 }
11404 else
38a448ca 11405 *pnotes = gen_rtx_EXPR_LIST (REG_DEAD, x, *pnotes);
1a26b032 11406
b1f21e0a 11407 REG_N_DEATHS (regno)++;
230d793d
RS
11408 }
11409
11410 return;
11411 }
11412
11413 else if (GET_CODE (x) == SET)
11414 {
11415 rtx dest = SET_DEST (x);
11416
6eb12cef 11417 move_deaths (SET_SRC (x), maybe_kill_insn, from_cuid, to_insn, pnotes);
230d793d 11418
a7c99304
RK
11419 /* In the case of a ZERO_EXTRACT, a STRICT_LOW_PART, or a SUBREG
11420 that accesses one word of a multi-word item, some
11421 piece of everything register in the expression is used by
11422 this insn, so remove any old death. */
11423
11424 if (GET_CODE (dest) == ZERO_EXTRACT
11425 || GET_CODE (dest) == STRICT_LOW_PART
11426 || (GET_CODE (dest) == SUBREG
11427 && (((GET_MODE_SIZE (GET_MODE (dest))
11428 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
11429 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
11430 + UNITS_PER_WORD - 1) / UNITS_PER_WORD))))
230d793d 11431 {
6eb12cef 11432 move_deaths (dest, maybe_kill_insn, from_cuid, to_insn, pnotes);
a7c99304 11433 return;
230d793d
RS
11434 }
11435
a7c99304
RK
11436 /* If this is some other SUBREG, we know it replaces the entire
11437 value, so use that as the destination. */
11438 if (GET_CODE (dest) == SUBREG)
11439 dest = SUBREG_REG (dest);
11440
11441 /* If this is a MEM, adjust deaths of anything used in the address.
11442 For a REG (the only other possibility), the entire value is
11443 being replaced so the old value is not used in this insn. */
230d793d
RS
11444
11445 if (GET_CODE (dest) == MEM)
6eb12cef
RK
11446 move_deaths (XEXP (dest, 0), maybe_kill_insn, from_cuid,
11447 to_insn, pnotes);
230d793d
RS
11448 return;
11449 }
11450
11451 else if (GET_CODE (x) == CLOBBER)
11452 return;
11453
11454 len = GET_RTX_LENGTH (code);
11455 fmt = GET_RTX_FORMAT (code);
11456
11457 for (i = 0; i < len; i++)
11458 {
11459 if (fmt[i] == 'E')
11460 {
11461 register int j;
11462 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6eb12cef
RK
11463 move_deaths (XVECEXP (x, i, j), maybe_kill_insn, from_cuid,
11464 to_insn, pnotes);
230d793d
RS
11465 }
11466 else if (fmt[i] == 'e')
6eb12cef 11467 move_deaths (XEXP (x, i), maybe_kill_insn, from_cuid, to_insn, pnotes);
230d793d
RS
11468 }
11469}
11470\f
a7c99304
RK
11471/* Return 1 if X is the target of a bit-field assignment in BODY, the
11472 pattern of an insn. X must be a REG. */
230d793d
RS
11473
11474static int
a7c99304
RK
11475reg_bitfield_target_p (x, body)
11476 rtx x;
230d793d
RS
11477 rtx body;
11478{
11479 int i;
11480
11481 if (GET_CODE (body) == SET)
a7c99304
RK
11482 {
11483 rtx dest = SET_DEST (body);
11484 rtx target;
11485 int regno, tregno, endregno, endtregno;
11486
11487 if (GET_CODE (dest) == ZERO_EXTRACT)
11488 target = XEXP (dest, 0);
11489 else if (GET_CODE (dest) == STRICT_LOW_PART)
11490 target = SUBREG_REG (XEXP (dest, 0));
11491 else
11492 return 0;
11493
11494 if (GET_CODE (target) == SUBREG)
11495 target = SUBREG_REG (target);
11496
11497 if (GET_CODE (target) != REG)
11498 return 0;
11499
11500 tregno = REGNO (target), regno = REGNO (x);
11501 if (tregno >= FIRST_PSEUDO_REGISTER || regno >= FIRST_PSEUDO_REGISTER)
11502 return target == x;
11503
11504 endtregno = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (target));
11505 endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
11506
11507 return endregno > tregno && regno < endtregno;
11508 }
230d793d
RS
11509
11510 else if (GET_CODE (body) == PARALLEL)
11511 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
a7c99304 11512 if (reg_bitfield_target_p (x, XVECEXP (body, 0, i)))
230d793d
RS
11513 return 1;
11514
11515 return 0;
11516}
11517\f
11518/* Given a chain of REG_NOTES originally from FROM_INSN, try to place them
11519 as appropriate. I3 and I2 are the insns resulting from the combination
11520 insns including FROM (I2 may be zero).
11521
11522 ELIM_I2 and ELIM_I1 are either zero or registers that we know will
11523 not need REG_DEAD notes because they are being substituted for. This
11524 saves searching in the most common cases.
11525
11526 Each note in the list is either ignored or placed on some insns, depending
11527 on the type of note. */
11528
11529static void
11530distribute_notes (notes, from_insn, i3, i2, elim_i2, elim_i1)
11531 rtx notes;
11532 rtx from_insn;
11533 rtx i3, i2;
11534 rtx elim_i2, elim_i1;
11535{
11536 rtx note, next_note;
11537 rtx tem;
11538
11539 for (note = notes; note; note = next_note)
11540 {
11541 rtx place = 0, place2 = 0;
11542
11543 /* If this NOTE references a pseudo register, ensure it references
11544 the latest copy of that register. */
11545 if (XEXP (note, 0) && GET_CODE (XEXP (note, 0)) == REG
11546 && REGNO (XEXP (note, 0)) >= FIRST_PSEUDO_REGISTER)
11547 XEXP (note, 0) = regno_reg_rtx[REGNO (XEXP (note, 0))];
11548
11549 next_note = XEXP (note, 1);
11550 switch (REG_NOTE_KIND (note))
11551 {
c9903b44
DE
11552 case REG_BR_PROB:
11553 case REG_EXEC_COUNT:
11554 /* Doesn't matter much where we put this, as long as it's somewhere.
11555 It is preferable to keep these notes on branches, which is most
11556 likely to be i3. */
11557 place = i3;
11558 break;
11559
4b7c585f 11560 case REG_EH_REGION:
0e403ec3
AS
11561 case REG_EH_RETHROW:
11562 /* These notes must remain with the call. It should not be
11563 possible for both I2 and I3 to be a call. */
4b7c585f
JL
11564 if (GET_CODE (i3) == CALL_INSN)
11565 place = i3;
11566 else if (i2 && GET_CODE (i2) == CALL_INSN)
11567 place = i2;
11568 else
11569 abort ();
11570 break;
11571
230d793d 11572 case REG_UNUSED:
07d0cbdd 11573 /* Any clobbers for i3 may still exist, and so we must process
176c9e6b
JW
11574 REG_UNUSED notes from that insn.
11575
11576 Any clobbers from i2 or i1 can only exist if they were added by
11577 recog_for_combine. In that case, recog_for_combine created the
11578 necessary REG_UNUSED notes. Trying to keep any original
11579 REG_UNUSED notes from these insns can cause incorrect output
11580 if it is for the same register as the original i3 dest.
11581 In that case, we will notice that the register is set in i3,
11582 and then add a REG_UNUSED note for the destination of i3, which
07d0cbdd
JW
11583 is wrong. However, it is possible to have REG_UNUSED notes from
11584 i2 or i1 for register which were both used and clobbered, so
11585 we keep notes from i2 or i1 if they will turn into REG_DEAD
11586 notes. */
176c9e6b 11587
230d793d
RS
11588 /* If this register is set or clobbered in I3, put the note there
11589 unless there is one already. */
07d0cbdd 11590 if (reg_set_p (XEXP (note, 0), PATTERN (i3)))
230d793d 11591 {
07d0cbdd
JW
11592 if (from_insn != i3)
11593 break;
11594
230d793d
RS
11595 if (! (GET_CODE (XEXP (note, 0)) == REG
11596 ? find_regno_note (i3, REG_UNUSED, REGNO (XEXP (note, 0)))
11597 : find_reg_note (i3, REG_UNUSED, XEXP (note, 0))))
11598 place = i3;
11599 }
11600 /* Otherwise, if this register is used by I3, then this register
11601 now dies here, so we must put a REG_DEAD note here unless there
11602 is one already. */
11603 else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3))
11604 && ! (GET_CODE (XEXP (note, 0)) == REG
11605 ? find_regno_note (i3, REG_DEAD, REGNO (XEXP (note, 0)))
11606 : find_reg_note (i3, REG_DEAD, XEXP (note, 0))))
11607 {
11608 PUT_REG_NOTE_KIND (note, REG_DEAD);
11609 place = i3;
11610 }
11611 break;
11612
11613 case REG_EQUAL:
11614 case REG_EQUIV:
11615 case REG_NONNEG:
9ae8ffe7 11616 case REG_NOALIAS:
230d793d
RS
11617 /* These notes say something about results of an insn. We can
11618 only support them if they used to be on I3 in which case they
a687e897
RK
11619 remain on I3. Otherwise they are ignored.
11620
11621 If the note refers to an expression that is not a constant, we
11622 must also ignore the note since we cannot tell whether the
11623 equivalence is still true. It might be possible to do
11624 slightly better than this (we only have a problem if I2DEST
11625 or I1DEST is present in the expression), but it doesn't
11626 seem worth the trouble. */
11627
11628 if (from_insn == i3
11629 && (XEXP (note, 0) == 0 || CONSTANT_P (XEXP (note, 0))))
230d793d
RS
11630 place = i3;
11631 break;
11632
11633 case REG_INC:
11634 case REG_NO_CONFLICT:
230d793d
RS
11635 /* These notes say something about how a register is used. They must
11636 be present on any use of the register in I2 or I3. */
11637 if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3)))
11638 place = i3;
11639
11640 if (i2 && reg_mentioned_p (XEXP (note, 0), PATTERN (i2)))
11641 {
11642 if (place)
11643 place2 = i2;
11644 else
11645 place = i2;
11646 }
11647 break;
11648
e55b4486
RH
11649 case REG_LABEL:
11650 /* This can show up in several ways -- either directly in the
11651 pattern, or hidden off in the constant pool with (or without?)
11652 a REG_EQUAL note. */
11653 /* ??? Ignore the without-reg_equal-note problem for now. */
11654 if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3))
11655 || ((tem = find_reg_note (i3, REG_EQUAL, NULL_RTX))
11656 && GET_CODE (XEXP (tem, 0)) == LABEL_REF
11657 && XEXP (XEXP (tem, 0), 0) == XEXP (note, 0)))
11658 place = i3;
11659
11660 if (i2
11661 && (reg_mentioned_p (XEXP (note, 0), PATTERN (i2))
11662 || ((tem = find_reg_note (i2, REG_EQUAL, NULL_RTX))
11663 && GET_CODE (XEXP (tem, 0)) == LABEL_REF
11664 && XEXP (XEXP (tem, 0), 0) == XEXP (note, 0))))
11665 {
11666 if (place)
11667 place2 = i2;
11668 else
11669 place = i2;
11670 }
11671 break;
11672
230d793d
RS
11673 case REG_WAS_0:
11674 /* It is too much trouble to try to see if this note is still
11675 correct in all situations. It is better to simply delete it. */
11676 break;
11677
11678 case REG_RETVAL:
11679 /* If the insn previously containing this note still exists,
11680 put it back where it was. Otherwise move it to the previous
11681 insn. Adjust the corresponding REG_LIBCALL note. */
11682 if (GET_CODE (from_insn) != NOTE)
11683 place = from_insn;
11684 else
11685 {
5f4f0e22 11686 tem = find_reg_note (XEXP (note, 0), REG_LIBCALL, NULL_RTX);
230d793d
RS
11687 place = prev_real_insn (from_insn);
11688 if (tem && place)
11689 XEXP (tem, 0) = place;
11690 }
11691 break;
11692
11693 case REG_LIBCALL:
11694 /* This is handled similarly to REG_RETVAL. */
11695 if (GET_CODE (from_insn) != NOTE)
11696 place = from_insn;
11697 else
11698 {
5f4f0e22 11699 tem = find_reg_note (XEXP (note, 0), REG_RETVAL, NULL_RTX);
230d793d
RS
11700 place = next_real_insn (from_insn);
11701 if (tem && place)
11702 XEXP (tem, 0) = place;
11703 }
11704 break;
11705
11706 case REG_DEAD:
11707 /* If the register is used as an input in I3, it dies there.
11708 Similarly for I2, if it is non-zero and adjacent to I3.
11709
11710 If the register is not used as an input in either I3 or I2
11711 and it is not one of the registers we were supposed to eliminate,
11712 there are two possibilities. We might have a non-adjacent I2
11713 or we might have somehow eliminated an additional register
11714 from a computation. For example, we might have had A & B where
11715 we discover that B will always be zero. In this case we will
11716 eliminate the reference to A.
11717
11718 In both cases, we must search to see if we can find a previous
11719 use of A and put the death note there. */
11720
6e2d1486
RK
11721 if (from_insn
11722 && GET_CODE (from_insn) == CALL_INSN
11723 && find_reg_fusage (from_insn, USE, XEXP (note, 0)))
11724 place = from_insn;
11725 else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3)))
230d793d
RS
11726 place = i3;
11727 else if (i2 != 0 && next_nonnote_insn (i2) == i3
11728 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
11729 place = i2;
11730
11731 if (XEXP (note, 0) == elim_i2 || XEXP (note, 0) == elim_i1)
11732 break;
11733
510dd77e
RK
11734 /* If the register is used in both I2 and I3 and it dies in I3,
11735 we might have added another reference to it. If reg_n_refs
11736 was 2, bump it to 3. This has to be correct since the
11737 register must have been set somewhere. The reason this is
11738 done is because local-alloc.c treats 2 references as a
11739 special case. */
11740
11741 if (place == i3 && i2 != 0 && GET_CODE (XEXP (note, 0)) == REG
b1f21e0a 11742 && REG_N_REFS (REGNO (XEXP (note, 0)))== 2
510dd77e 11743 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
b1f21e0a 11744 REG_N_REFS (REGNO (XEXP (note, 0))) = 3;
510dd77e 11745
230d793d 11746 if (place == 0)
38d8473f 11747 {
d3a923ee
RH
11748 basic_block bb = BASIC_BLOCK (this_basic_block);
11749
11750 for (tem = PREV_INSN (i3); place == 0; tem = PREV_INSN (tem))
38d8473f 11751 {
d3a923ee
RH
11752 if (GET_RTX_CLASS (GET_CODE (tem)) != 'i')
11753 {
11754 if (tem == bb->head)
11755 break;
11756 continue;
11757 }
11758
38d8473f
RK
11759 /* If the register is being set at TEM, see if that is all
11760 TEM is doing. If so, delete TEM. Otherwise, make this
11761 into a REG_UNUSED note instead. */
11762 if (reg_set_p (XEXP (note, 0), PATTERN (tem)))
11763 {
11764 rtx set = single_set (tem);
e5e809f4 11765 rtx inner_dest = 0;
e51712db 11766#ifdef HAVE_cc0
f5c97640 11767 rtx cc0_setter = NULL_RTX;
e51712db 11768#endif
e5e809f4
JL
11769
11770 if (set != 0)
11771 for (inner_dest = SET_DEST (set);
11772 GET_CODE (inner_dest) == STRICT_LOW_PART
d3a923ee
RH
11773 || GET_CODE (inner_dest) == SUBREG
11774 || GET_CODE (inner_dest) == ZERO_EXTRACT;
e5e809f4
JL
11775 inner_dest = XEXP (inner_dest, 0))
11776 ;
38d8473f
RK
11777
11778 /* Verify that it was the set, and not a clobber that
f5c97640
RH
11779 modified the register.
11780
11781 CC0 targets must be careful to maintain setter/user
11782 pairs. If we cannot delete the setter due to side
11783 effects, mark the user with an UNUSED note instead
11784 of deleting it. */
38d8473f
RK
11785
11786 if (set != 0 && ! side_effects_p (SET_SRC (set))
f5c97640
RH
11787 && rtx_equal_p (XEXP (note, 0), inner_dest)
11788#ifdef HAVE_cc0
11789 && (! reg_mentioned_p (cc0_rtx, SET_SRC (set))
11790 || ((cc0_setter = prev_cc0_setter (tem)) != NULL
11791 && sets_cc0_p (PATTERN (cc0_setter)) > 0))
11792#endif
11793 )
38d8473f
RK
11794 {
11795 /* Move the notes and links of TEM elsewhere.
11796 This might delete other dead insns recursively.
11797 First set the pattern to something that won't use
11798 any register. */
11799
11800 PATTERN (tem) = pc_rtx;
11801
11802 distribute_notes (REG_NOTES (tem), tem, tem,
11803 NULL_RTX, NULL_RTX, NULL_RTX);
11804 distribute_links (LOG_LINKS (tem));
11805
11806 PUT_CODE (tem, NOTE);
11807 NOTE_LINE_NUMBER (tem) = NOTE_INSN_DELETED;
11808 NOTE_SOURCE_FILE (tem) = 0;
f5c97640
RH
11809
11810#ifdef HAVE_cc0
11811 /* Delete the setter too. */
11812 if (cc0_setter)
11813 {
11814 PATTERN (cc0_setter) = pc_rtx;
11815
11816 distribute_notes (REG_NOTES (cc0_setter),
11817 cc0_setter, cc0_setter,
11818 NULL_RTX, NULL_RTX, NULL_RTX);
11819 distribute_links (LOG_LINKS (cc0_setter));
11820
11821 PUT_CODE (cc0_setter, NOTE);
d3a923ee
RH
11822 NOTE_LINE_NUMBER (cc0_setter)
11823 = NOTE_INSN_DELETED;
f5c97640
RH
11824 NOTE_SOURCE_FILE (cc0_setter) = 0;
11825 }
11826#endif
38d8473f 11827 }
e5e809f4
JL
11828 /* If the register is both set and used here, put the
11829 REG_DEAD note here, but place a REG_UNUSED note
11830 here too unless there already is one. */
11831 else if (reg_referenced_p (XEXP (note, 0),
11832 PATTERN (tem)))
11833 {
11834 place = tem;
11835
11836 if (! find_regno_note (tem, REG_UNUSED,
11837 REGNO (XEXP (note, 0))))
11838 REG_NOTES (tem)
c5c76735 11839 = gen_rtx_EXPR_LIST (REG_UNUSED, XEXP (note, 0),
9e6a5703 11840 REG_NOTES (tem));
e5e809f4 11841 }
38d8473f
RK
11842 else
11843 {
11844 PUT_REG_NOTE_KIND (note, REG_UNUSED);
11845
11846 /* If there isn't already a REG_UNUSED note, put one
11847 here. */
11848 if (! find_regno_note (tem, REG_UNUSED,
11849 REGNO (XEXP (note, 0))))
11850 place = tem;
11851 break;
d3a923ee
RH
11852 }
11853 }
11854 else if (reg_referenced_p (XEXP (note, 0), PATTERN (tem))
11855 || (GET_CODE (tem) == CALL_INSN
11856 && find_reg_fusage (tem, USE, XEXP (note, 0))))
11857 {
11858 place = tem;
11859
11860 /* If we are doing a 3->2 combination, and we have a
11861 register which formerly died in i3 and was not used
11862 by i2, which now no longer dies in i3 and is used in
11863 i2 but does not die in i2, and place is between i2
11864 and i3, then we may need to move a link from place to
11865 i2. */
11866 if (i2 && INSN_UID (place) <= max_uid_cuid
11867 && INSN_CUID (place) > INSN_CUID (i2)
11868 && from_insn && INSN_CUID (from_insn) > INSN_CUID (i2)
11869 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
11870 {
11871 rtx links = LOG_LINKS (place);
11872 LOG_LINKS (place) = 0;
11873 distribute_links (links);
11874 }
11875 break;
11876 }
11877
11878 if (tem == bb->head)
230d793d 11879 break;
38d8473f
RK
11880 }
11881
d3a923ee
RH
11882 /* We haven't found an insn for the death note and it
11883 is still a REG_DEAD note, but we have hit the beginning
11884 of the block. If the existing life info says the reg
715e7fbc
RH
11885 was dead, there's nothing left to do. Otherwise, we'll
11886 need to do a global life update after combine. */
d3a923ee 11887 if (REG_NOTE_KIND (note) == REG_DEAD && place == 0)
e2cce0cf 11888 {
d3a923ee 11889 int regno = REGNO (XEXP (note, 0));
d3a923ee
RH
11890 if (REGNO_REG_SET_P (bb->global_live_at_start, regno))
11891 {
715e7fbc
RH
11892 SET_BIT (refresh_blocks, this_basic_block);
11893 need_refresh = 1;
d3a923ee 11894 }
e2cce0cf 11895 }
38d8473f 11896 }
230d793d
RS
11897
11898 /* If the register is set or already dead at PLACE, we needn't do
e5e809f4
JL
11899 anything with this note if it is still a REG_DEAD note.
11900 We can here if it is set at all, not if is it totally replace,
11901 which is what `dead_or_set_p' checks, so also check for it being
11902 set partially. */
11903
230d793d
RS
11904 if (place && REG_NOTE_KIND (note) == REG_DEAD)
11905 {
11906 int regno = REGNO (XEXP (note, 0));
11907
11908 if (dead_or_set_p (place, XEXP (note, 0))
11909 || reg_bitfield_target_p (XEXP (note, 0), PATTERN (place)))
11910 {
11911 /* Unless the register previously died in PLACE, clear
11912 reg_last_death. [I no longer understand why this is
11913 being done.] */
11914 if (reg_last_death[regno] != place)
11915 reg_last_death[regno] = 0;
11916 place = 0;
11917 }
11918 else
11919 reg_last_death[regno] = place;
11920
11921 /* If this is a death note for a hard reg that is occupying
11922 multiple registers, ensure that we are still using all
11923 parts of the object. If we find a piece of the object
11924 that is unused, we must add a USE for that piece before
11925 PLACE and put the appropriate REG_DEAD note on it.
11926
11927 An alternative would be to put a REG_UNUSED for the pieces
11928 on the insn that set the register, but that can't be done if
11929 it is not in the same block. It is simpler, though less
11930 efficient, to add the USE insns. */
11931
11932 if (place && regno < FIRST_PSEUDO_REGISTER
11933 && HARD_REGNO_NREGS (regno, GET_MODE (XEXP (note, 0))) > 1)
11934 {
11935 int endregno
11936 = regno + HARD_REGNO_NREGS (regno,
11937 GET_MODE (XEXP (note, 0)));
11938 int all_used = 1;
11939 int i;
11940
11941 for (i = regno; i < endregno; i++)
9fd5bb62
JW
11942 if (! refers_to_regno_p (i, i + 1, PATTERN (place), 0)
11943 && ! find_regno_fusage (place, USE, i))
230d793d 11944 {
38a448ca 11945 rtx piece = gen_rtx_REG (reg_raw_mode[i], i);
28f6d3af
RK
11946 rtx p;
11947
11948 /* See if we already placed a USE note for this
11949 register in front of PLACE. */
11950 for (p = place;
11951 GET_CODE (PREV_INSN (p)) == INSN
11952 && GET_CODE (PATTERN (PREV_INSN (p))) == USE;
11953 p = PREV_INSN (p))
11954 if (rtx_equal_p (piece,
11955 XEXP (PATTERN (PREV_INSN (p)), 0)))
11956 {
11957 p = 0;
11958 break;
11959 }
11960
11961 if (p)
11962 {
11963 rtx use_insn
38a448ca
RH
11964 = emit_insn_before (gen_rtx_USE (VOIDmode,
11965 piece),
28f6d3af
RK
11966 p);
11967 REG_NOTES (use_insn)
38a448ca
RH
11968 = gen_rtx_EXPR_LIST (REG_DEAD, piece,
11969 REG_NOTES (use_insn));
28f6d3af 11970 }
230d793d 11971
5089e22e 11972 all_used = 0;
230d793d
RS
11973 }
11974
a394b17b
JW
11975 /* Check for the case where the register dying partially
11976 overlaps the register set by this insn. */
11977 if (all_used)
11978 for (i = regno; i < endregno; i++)
11979 if (dead_or_set_regno_p (place, i))
11980 {
11981 all_used = 0;
11982 break;
11983 }
11984
230d793d
RS
11985 if (! all_used)
11986 {
11987 /* Put only REG_DEAD notes for pieces that are
11988 still used and that are not already dead or set. */
11989
11990 for (i = regno; i < endregno; i++)
11991 {
38a448ca 11992 rtx piece = gen_rtx_REG (reg_raw_mode[i], i);
230d793d 11993
17cbf358
JW
11994 if ((reg_referenced_p (piece, PATTERN (place))
11995 || (GET_CODE (place) == CALL_INSN
11996 && find_reg_fusage (place, USE, piece)))
230d793d
RS
11997 && ! dead_or_set_p (place, piece)
11998 && ! reg_bitfield_target_p (piece,
11999 PATTERN (place)))
38a448ca 12000 REG_NOTES (place)
c5c76735
JL
12001 = gen_rtx_EXPR_LIST (REG_DEAD, piece,
12002 REG_NOTES (place));
230d793d
RS
12003 }
12004
12005 place = 0;
12006 }
12007 }
12008 }
12009 break;
12010
12011 default:
12012 /* Any other notes should not be present at this point in the
12013 compilation. */
12014 abort ();
12015 }
12016
12017 if (place)
12018 {
12019 XEXP (note, 1) = REG_NOTES (place);
12020 REG_NOTES (place) = note;
12021 }
1a26b032
RK
12022 else if ((REG_NOTE_KIND (note) == REG_DEAD
12023 || REG_NOTE_KIND (note) == REG_UNUSED)
12024 && GET_CODE (XEXP (note, 0)) == REG)
b1f21e0a 12025 REG_N_DEATHS (REGNO (XEXP (note, 0)))--;
230d793d
RS
12026
12027 if (place2)
1a26b032
RK
12028 {
12029 if ((REG_NOTE_KIND (note) == REG_DEAD
12030 || REG_NOTE_KIND (note) == REG_UNUSED)
12031 && GET_CODE (XEXP (note, 0)) == REG)
b1f21e0a 12032 REG_N_DEATHS (REGNO (XEXP (note, 0)))++;
1a26b032 12033
38a448ca
RH
12034 REG_NOTES (place2) = gen_rtx_fmt_ee (GET_CODE (note),
12035 REG_NOTE_KIND (note),
12036 XEXP (note, 0),
12037 REG_NOTES (place2));
1a26b032 12038 }
230d793d
RS
12039 }
12040}
12041\f
12042/* Similarly to above, distribute the LOG_LINKS that used to be present on
5089e22e
RS
12043 I3, I2, and I1 to new locations. This is also called in one case to
12044 add a link pointing at I3 when I3's destination is changed. */
230d793d
RS
12045
12046static void
12047distribute_links (links)
12048 rtx links;
12049{
12050 rtx link, next_link;
12051
12052 for (link = links; link; link = next_link)
12053 {
12054 rtx place = 0;
12055 rtx insn;
12056 rtx set, reg;
12057
12058 next_link = XEXP (link, 1);
12059
12060 /* If the insn that this link points to is a NOTE or isn't a single
12061 set, ignore it. In the latter case, it isn't clear what we
12062 can do other than ignore the link, since we can't tell which
12063 register it was for. Such links wouldn't be used by combine
12064 anyway.
12065
12066 It is not possible for the destination of the target of the link to
12067 have been changed by combine. The only potential of this is if we
12068 replace I3, I2, and I1 by I3 and I2. But in that case the
12069 destination of I2 also remains unchanged. */
12070
12071 if (GET_CODE (XEXP (link, 0)) == NOTE
12072 || (set = single_set (XEXP (link, 0))) == 0)
12073 continue;
12074
12075 reg = SET_DEST (set);
12076 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
12077 || GET_CODE (reg) == SIGN_EXTRACT
12078 || GET_CODE (reg) == STRICT_LOW_PART)
12079 reg = XEXP (reg, 0);
12080
12081 /* A LOG_LINK is defined as being placed on the first insn that uses
12082 a register and points to the insn that sets the register. Start
12083 searching at the next insn after the target of the link and stop
12084 when we reach a set of the register or the end of the basic block.
12085
12086 Note that this correctly handles the link that used to point from
5089e22e 12087 I3 to I2. Also note that not much searching is typically done here
230d793d
RS
12088 since most links don't point very far away. */
12089
12090 for (insn = NEXT_INSN (XEXP (link, 0));
0d4d42c3 12091 (insn && (this_basic_block == n_basic_blocks - 1
3b413743 12092 || BLOCK_HEAD (this_basic_block + 1) != insn));
230d793d
RS
12093 insn = NEXT_INSN (insn))
12094 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
12095 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
12096 {
12097 if (reg_referenced_p (reg, PATTERN (insn)))
12098 place = insn;
12099 break;
12100 }
6e2d1486
RK
12101 else if (GET_CODE (insn) == CALL_INSN
12102 && find_reg_fusage (insn, USE, reg))
12103 {
12104 place = insn;
12105 break;
12106 }
230d793d
RS
12107
12108 /* If we found a place to put the link, place it there unless there
12109 is already a link to the same insn as LINK at that point. */
12110
12111 if (place)
12112 {
12113 rtx link2;
12114
12115 for (link2 = LOG_LINKS (place); link2; link2 = XEXP (link2, 1))
12116 if (XEXP (link2, 0) == XEXP (link, 0))
12117 break;
12118
12119 if (link2 == 0)
12120 {
12121 XEXP (link, 1) = LOG_LINKS (place);
12122 LOG_LINKS (place) = link;
abe6e52f
RK
12123
12124 /* Set added_links_insn to the earliest insn we added a
12125 link to. */
12126 if (added_links_insn == 0
12127 || INSN_CUID (added_links_insn) > INSN_CUID (place))
12128 added_links_insn = place;
230d793d
RS
12129 }
12130 }
12131 }
12132}
12133\f
1427d6d2
RK
12134/* Compute INSN_CUID for INSN, which is an insn made by combine. */
12135
12136static int
12137insn_cuid (insn)
12138 rtx insn;
12139{
12140 while (insn != 0 && INSN_UID (insn) > max_uid_cuid
12141 && GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == USE)
12142 insn = NEXT_INSN (insn);
12143
12144 if (INSN_UID (insn) > max_uid_cuid)
12145 abort ();
12146
12147 return INSN_CUID (insn);
12148}
12149\f
230d793d
RS
12150void
12151dump_combine_stats (file)
12152 FILE *file;
12153{
ab87f8c8 12154 fnotice
230d793d
RS
12155 (file,
12156 ";; Combiner statistics: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n\n",
12157 combine_attempts, combine_merges, combine_extras, combine_successes);
12158}
12159
12160void
12161dump_combine_total_stats (file)
12162 FILE *file;
12163{
ab87f8c8 12164 fnotice
230d793d
RS
12165 (file,
12166 "\n;; Combiner totals: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n",
12167 total_attempts, total_merges, total_extras, total_successes);
12168}
This page took 2.487545 seconds and 5 git commands to generate.