]> gcc.gnu.org Git - gcc.git/blame - gcc/combine.c
Daily bump.
[gcc.git] / gcc / combine.c
CommitLineData
230d793d 1/* Optimize by combining instructions for GNU compiler.
3c71940f 2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
8f8d8d6e 3 1999, 2000, 2001 Free Software Foundation, Inc.
230d793d
RS
4
5This file is part of GNU CC.
6
7GNU CC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 2, or (at your option)
10any later version.
11
12GNU CC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GNU CC; see the file COPYING. If not, write to
940d9d63
RK
19the Free Software Foundation, 59 Temple Place - Suite 330,
20Boston, MA 02111-1307, USA. */
230d793d 21
230d793d
RS
22/* This module is essentially the "combiner" phase of the U. of Arizona
23 Portable Optimizer, but redone to work on our list-structured
24 representation for RTL instead of their string representation.
25
26 The LOG_LINKS of each insn identify the most recent assignment
27 to each REG used in the insn. It is a list of previous insns,
28 each of which contains a SET for a REG that is used in this insn
29 and not used or set in between. LOG_LINKs never cross basic blocks.
30 They were set up by the preceding pass (lifetime analysis).
31
32 We try to combine each pair of insns joined by a logical link.
33 We also try to combine triples of insns A, B and C when
34 C has a link back to B and B has a link back to A.
35
36 LOG_LINKS does not have links for use of the CC0. They don't
37 need to, because the insn that sets the CC0 is always immediately
38 before the insn that tests it. So we always regard a branch
39 insn as having a logical link to the preceding insn. The same is true
40 for an insn explicitly using CC0.
41
42 We check (with use_crosses_set_p) to avoid combining in such a way
43 as to move a computation to a place where its value would be different.
44
45 Combination is done by mathematically substituting the previous
46 insn(s) values for the regs they set into the expressions in
47 the later insns that refer to these regs. If the result is a valid insn
48 for our target machine, according to the machine description,
49 we install it, delete the earlier insns, and update the data flow
50 information (LOG_LINKS and REG_NOTES) for what we did.
51
52 There are a few exceptions where the dataflow information created by
53 flow.c aren't completely updated:
54
55 - reg_live_length is not updated
56 - reg_n_refs is not adjusted in the rare case when a register is
57 no longer required in a computation
58 - there are extremely rare cases (see distribute_regnotes) when a
59 REG_DEAD note is lost
60 - a LOG_LINKS entry that refers to an insn with multiple SETs may be
663522cb 61 removed because there is no way to know which register it was
230d793d
RS
62 linking
63
64 To simplify substitution, we combine only when the earlier insn(s)
65 consist of only a single assignment. To simplify updating afterward,
66 we never combine when a subroutine call appears in the middle.
67
68 Since we do not represent assignments to CC0 explicitly except when that
69 is all an insn does, there is no LOG_LINKS entry in an insn that uses
70 the condition code for the insn that set the condition code.
71 Fortunately, these two insns must be consecutive.
72 Therefore, every JUMP_INSN is taken to have an implicit logical link
73 to the preceding insn. This is not quite right, since non-jumps can
74 also use the condition code; but in practice such insns would not
75 combine anyway. */
76
230d793d 77#include "config.h"
670ee920 78#include "system.h"
c5c76735 79#include "rtl.h"
a091679a 80#include "tm_p.h"
230d793d
RS
81#include "flags.h"
82#include "regs.h"
55310dad 83#include "hard-reg-set.h"
230d793d
RS
84#include "basic-block.h"
85#include "insn-config.h"
49ad7cfa 86#include "function.h"
d6f4ec51
KG
87/* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
88#include "expr.h"
230d793d
RS
89#include "insn-attr.h"
90#include "recog.h"
91#include "real.h"
2e107e9e 92#include "toplev.h"
f73ad30e 93
230d793d
RS
94/* It is not safe to use ordinary gen_lowpart in combine.
95 Use gen_lowpart_for_combine instead. See comments there. */
96#define gen_lowpart dont_use_gen_lowpart_you_dummy
97
98/* Number of attempts to combine instructions in this function. */
99
100static int combine_attempts;
101
102/* Number of attempts that got as far as substitution in this function. */
103
104static int combine_merges;
105
106/* Number of instructions combined with added SETs in this function. */
107
108static int combine_extras;
109
110/* Number of instructions combined in this function. */
111
112static int combine_successes;
113
114/* Totals over entire compilation. */
115
116static int total_attempts, total_merges, total_extras, total_successes;
9210df58 117
230d793d
RS
118\f
119/* Vector mapping INSN_UIDs to cuids.
5089e22e 120 The cuids are like uids but increase monotonically always.
230d793d
RS
121 Combine always uses cuids so that it can compare them.
122 But actually renumbering the uids, which we used to do,
123 proves to be a bad idea because it makes it hard to compare
124 the dumps produced by earlier passes with those from later passes. */
125
126static int *uid_cuid;
4255220d 127static int max_uid_cuid;
230d793d
RS
128
129/* Get the cuid of an insn. */
130
1427d6d2
RK
131#define INSN_CUID(INSN) \
132(INSN_UID (INSN) > max_uid_cuid ? insn_cuid (INSN) : uid_cuid[INSN_UID (INSN)])
230d793d 133
42a6ff51
AO
134/* In case BITS_PER_WORD == HOST_BITS_PER_WIDE_INT, shifting by
135 BITS_PER_WORD would invoke undefined behavior. Work around it. */
136
137#define UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD(val) \
138 (((unsigned HOST_WIDE_INT)(val) << (BITS_PER_WORD - 1)) << 1)
139
230d793d
RS
140/* Maximum register number, which is the size of the tables below. */
141
770ae6cc 142static unsigned int combine_max_regno;
230d793d
RS
143
144/* Record last point of death of (hard or pseudo) register n. */
145
146static rtx *reg_last_death;
147
148/* Record last point of modification of (hard or pseudo) register n. */
149
150static rtx *reg_last_set;
151
152/* Record the cuid of the last insn that invalidated memory
153 (anything that writes memory, and subroutine calls, but not pushes). */
154
155static int mem_last_set;
156
157/* Record the cuid of the last CALL_INSN
158 so we can tell whether a potential combination crosses any calls. */
159
160static int last_call_cuid;
161
162/* When `subst' is called, this is the insn that is being modified
163 (by combining in a previous insn). The PATTERN of this insn
164 is still the old pattern partially modified and it should not be
165 looked at, but this may be used to examine the successors of the insn
166 to judge whether a simplification is valid. */
167
168static rtx subst_insn;
169
0d9641d1
JW
170/* This is an insn that belongs before subst_insn, but is not currently
171 on the insn chain. */
172
173static rtx subst_prev_insn;
174
230d793d
RS
175/* This is the lowest CUID that `subst' is currently dealing with.
176 get_last_value will not return a value if the register was set at or
177 after this CUID. If not for this mechanism, we could get confused if
178 I2 or I1 in try_combine were an insn that used the old value of a register
179 to obtain a new value. In that case, we might erroneously get the
180 new value of the register when we wanted the old one. */
181
182static int subst_low_cuid;
183
6e25d159
RK
184/* This contains any hard registers that are used in newpat; reg_dead_at_p
185 must consider all these registers to be always live. */
186
187static HARD_REG_SET newpat_used_regs;
188
abe6e52f
RK
189/* This is an insn to which a LOG_LINKS entry has been added. If this
190 insn is the earlier than I2 or I3, combine should rescan starting at
191 that location. */
192
193static rtx added_links_insn;
194
0d4d42c3
RK
195/* Basic block number of the block in which we are performing combines. */
196static int this_basic_block;
715e7fbc 197
663522cb
KH
198/* A bitmap indicating which blocks had registers go dead at entry.
199 After combine, we'll need to re-do global life analysis with
715e7fbc
RH
200 those blocks as starting points. */
201static sbitmap refresh_blocks;
202static int need_refresh;
230d793d
RS
203\f
204/* The next group of arrays allows the recording of the last value assigned
205 to (hard or pseudo) register n. We use this information to see if a
5089e22e 206 operation being processed is redundant given a prior operation performed
230d793d
RS
207 on the register. For example, an `and' with a constant is redundant if
208 all the zero bits are already known to be turned off.
209
210 We use an approach similar to that used by cse, but change it in the
211 following ways:
212
213 (1) We do not want to reinitialize at each label.
214 (2) It is useful, but not critical, to know the actual value assigned
215 to a register. Often just its form is helpful.
216
217 Therefore, we maintain the following arrays:
218
219 reg_last_set_value the last value assigned
220 reg_last_set_label records the value of label_tick when the
221 register was assigned
222 reg_last_set_table_tick records the value of label_tick when a
223 value using the register is assigned
224 reg_last_set_invalid set to non-zero when it is not valid
225 to use the value of this register in some
226 register's value
227
228 To understand the usage of these tables, it is important to understand
229 the distinction between the value in reg_last_set_value being valid
230 and the register being validly contained in some other expression in the
231 table.
232
233 Entry I in reg_last_set_value is valid if it is non-zero, and either
234 reg_n_sets[i] is 1 or reg_last_set_label[i] == label_tick.
235
236 Register I may validly appear in any expression returned for the value
237 of another register if reg_n_sets[i] is 1. It may also appear in the
238 value for register J if reg_last_set_label[i] < reg_last_set_label[j] or
239 reg_last_set_invalid[j] is zero.
240
241 If an expression is found in the table containing a register which may
242 not validly appear in an expression, the register is replaced by
243 something that won't match, (clobber (const_int 0)).
244
245 reg_last_set_invalid[i] is set non-zero when register I is being assigned
246 to and reg_last_set_table_tick[i] == label_tick. */
247
0f41302f 248/* Record last value assigned to (hard or pseudo) register n. */
230d793d
RS
249
250static rtx *reg_last_set_value;
251
252/* Record the value of label_tick when the value for register n is placed in
253 reg_last_set_value[n]. */
254
568356af 255static int *reg_last_set_label;
230d793d
RS
256
257/* Record the value of label_tick when an expression involving register n
0f41302f 258 is placed in reg_last_set_value. */
230d793d 259
568356af 260static int *reg_last_set_table_tick;
230d793d
RS
261
262/* Set non-zero if references to register n in expressions should not be
263 used. */
264
265static char *reg_last_set_invalid;
266
0f41302f 267/* Incremented for each label. */
230d793d 268
568356af 269static int label_tick;
230d793d
RS
270
271/* Some registers that are set more than once and used in more than one
272 basic block are nevertheless always set in similar ways. For example,
273 a QImode register may be loaded from memory in two places on a machine
274 where byte loads zero extend.
275
951553af 276 We record in the following array what we know about the nonzero
230d793d
RS
277 bits of a register, specifically which bits are known to be zero.
278
279 If an entry is zero, it means that we don't know anything special. */
280
55310dad 281static unsigned HOST_WIDE_INT *reg_nonzero_bits;
230d793d 282
951553af 283/* Mode used to compute significance in reg_nonzero_bits. It is the largest
5f4f0e22 284 integer mode that can fit in HOST_BITS_PER_WIDE_INT. */
230d793d 285
951553af 286static enum machine_mode nonzero_bits_mode;
230d793d 287
d0ab8cd3
RK
288/* Nonzero if we know that a register has some leading bits that are always
289 equal to the sign bit. */
290
770ae6cc 291static unsigned char *reg_sign_bit_copies;
d0ab8cd3 292
951553af 293/* Nonzero when reg_nonzero_bits and reg_sign_bit_copies can be safely used.
1a26b032
RK
294 It is zero while computing them and after combine has completed. This
295 former test prevents propagating values based on previously set values,
296 which can be incorrect if a variable is modified in a loop. */
230d793d 297
951553af 298static int nonzero_sign_valid;
55310dad
RK
299
300/* These arrays are maintained in parallel with reg_last_set_value
301 and are used to store the mode in which the register was last set,
302 the bits that were known to be zero when it was last set, and the
303 number of sign bits copies it was known to have when it was last set. */
304
305static enum machine_mode *reg_last_set_mode;
306static unsigned HOST_WIDE_INT *reg_last_set_nonzero_bits;
307static char *reg_last_set_sign_bit_copies;
230d793d
RS
308\f
309/* Record one modification to rtl structure
310 to be undone by storing old_contents into *where.
311 is_int is 1 if the contents are an int. */
312
313struct undo
314{
241cea85 315 struct undo *next;
230d793d 316 int is_int;
0345195a
RK
317 union {rtx r; unsigned int i;} old_contents;
318 union {rtx *r; unsigned int *i;} where;
230d793d
RS
319};
320
321/* Record a bunch of changes to be undone, up to MAX_UNDO of them.
322 num_undo says how many are currently recorded.
323
230d793d 324 other_insn is nonzero if we have modified some other insn in the process
f1c6ba8b 325 of working on subst_insn. It must be verified too. */
230d793d
RS
326
327struct undobuf
328{
241cea85
RK
329 struct undo *undos;
330 struct undo *frees;
230d793d
RS
331 rtx other_insn;
332};
333
334static struct undobuf undobuf;
335
230d793d
RS
336/* Number of times the pseudo being substituted for
337 was found and replaced. */
338
339static int n_occurrences;
340
83d2b3b9 341static void do_SUBST PARAMS ((rtx *, rtx));
0345195a
RK
342static void do_SUBST_INT PARAMS ((unsigned int *,
343 unsigned int));
83d2b3b9
KG
344static void init_reg_last_arrays PARAMS ((void));
345static void setup_incoming_promotions PARAMS ((void));
346static void set_nonzero_bits_and_sign_copies PARAMS ((rtx, rtx, void *));
c3410241 347static int cant_combine_insn_p PARAMS ((rtx));
83d2b3b9
KG
348static int can_combine_p PARAMS ((rtx, rtx, rtx, rtx, rtx *, rtx *));
349static int sets_function_arg_p PARAMS ((rtx));
350static int combinable_i3pat PARAMS ((rtx, rtx *, rtx, rtx, int, rtx *));
351static int contains_muldiv PARAMS ((rtx));
44a76fc8 352static rtx try_combine PARAMS ((rtx, rtx, rtx, int *));
83d2b3b9
KG
353static void undo_all PARAMS ((void));
354static void undo_commit PARAMS ((void));
355static rtx *find_split_point PARAMS ((rtx *, rtx));
356static rtx subst PARAMS ((rtx, rtx, rtx, int, int));
357static rtx combine_simplify_rtx PARAMS ((rtx, enum machine_mode, int, int));
358static rtx simplify_if_then_else PARAMS ((rtx));
359static rtx simplify_set PARAMS ((rtx));
360static rtx simplify_logical PARAMS ((rtx, int));
361static rtx expand_compound_operation PARAMS ((rtx));
362static rtx expand_field_assignment PARAMS ((rtx));
770ae6cc
RK
363static rtx make_extraction PARAMS ((enum machine_mode, rtx, HOST_WIDE_INT,
364 rtx, unsigned HOST_WIDE_INT, int,
365 int, int));
83d2b3b9
KG
366static rtx extract_left_shift PARAMS ((rtx, int));
367static rtx make_compound_operation PARAMS ((rtx, enum rtx_code));
770ae6cc
RK
368static int get_pos_from_mask PARAMS ((unsigned HOST_WIDE_INT,
369 unsigned HOST_WIDE_INT *));
83d2b3b9
KG
370static rtx force_to_mode PARAMS ((rtx, enum machine_mode,
371 unsigned HOST_WIDE_INT, rtx, int));
372static rtx if_then_else_cond PARAMS ((rtx, rtx *, rtx *));
373static rtx known_cond PARAMS ((rtx, enum rtx_code, rtx, rtx));
374static int rtx_equal_for_field_assignment_p PARAMS ((rtx, rtx));
375static rtx make_field_assignment PARAMS ((rtx));
376static rtx apply_distributive_law PARAMS ((rtx));
377static rtx simplify_and_const_int PARAMS ((rtx, enum machine_mode, rtx,
378 unsigned HOST_WIDE_INT));
379static unsigned HOST_WIDE_INT nonzero_bits PARAMS ((rtx, enum machine_mode));
770ae6cc 380static unsigned int num_sign_bit_copies PARAMS ((rtx, enum machine_mode));
83d2b3b9
KG
381static int merge_outer_ops PARAMS ((enum rtx_code *, HOST_WIDE_INT *,
382 enum rtx_code, HOST_WIDE_INT,
383 enum machine_mode, int *));
384static rtx simplify_shift_const PARAMS ((rtx, enum rtx_code, enum machine_mode,
385 rtx, int));
386static int recog_for_combine PARAMS ((rtx *, rtx, rtx *));
387static rtx gen_lowpart_for_combine PARAMS ((enum machine_mode, rtx));
83d2b3b9
KG
388static rtx gen_binary PARAMS ((enum rtx_code, enum machine_mode,
389 rtx, rtx));
83d2b3b9 390static enum rtx_code simplify_comparison PARAMS ((enum rtx_code, rtx *, rtx *));
83d2b3b9
KG
391static void update_table_tick PARAMS ((rtx));
392static void record_value_for_reg PARAMS ((rtx, rtx, rtx));
393static void check_promoted_subreg PARAMS ((rtx, rtx));
394static void record_dead_and_set_regs_1 PARAMS ((rtx, rtx, void *));
395static void record_dead_and_set_regs PARAMS ((rtx));
396static int get_last_value_validate PARAMS ((rtx *, rtx, int, int));
397static rtx get_last_value PARAMS ((rtx));
398static int use_crosses_set_p PARAMS ((rtx, int));
399static void reg_dead_at_p_1 PARAMS ((rtx, rtx, void *));
400static int reg_dead_at_p PARAMS ((rtx, rtx));
401static void move_deaths PARAMS ((rtx, rtx, int, rtx, rtx *));
402static int reg_bitfield_target_p PARAMS ((rtx, rtx));
403static void distribute_notes PARAMS ((rtx, rtx, rtx, rtx, rtx, rtx));
404static void distribute_links PARAMS ((rtx));
405static void mark_used_regs_combine PARAMS ((rtx));
406static int insn_cuid PARAMS ((rtx));
c6991660 407static void record_promoted_value PARAMS ((rtx, rtx));
9a915772
JH
408static rtx reversed_comparison PARAMS ((rtx, enum machine_mode, rtx, rtx));
409static enum rtx_code combine_reversed_comparison_code PARAMS ((rtx));
230d793d 410\f
76095e2f
RH
411/* Substitute NEWVAL, an rtx expression, into INTO, a place in some
412 insn. The substitution can be undone by undo_all. If INTO is already
413 set to NEWVAL, do not record this change. Because computing NEWVAL might
414 also call SUBST, we have to compute it before we put anything into
415 the undo table. */
416
417static void
663522cb 418do_SUBST (into, newval)
76095e2f
RH
419 rtx *into, newval;
420{
421 struct undo *buf;
422 rtx oldval = *into;
423
424 if (oldval == newval)
425 return;
426
427 if (undobuf.frees)
428 buf = undobuf.frees, undobuf.frees = buf->next;
429 else
430 buf = (struct undo *) xmalloc (sizeof (struct undo));
431
432 buf->is_int = 0;
433 buf->where.r = into;
434 buf->old_contents.r = oldval;
435 *into = newval;
436
437 buf->next = undobuf.undos, undobuf.undos = buf;
438}
439
440#define SUBST(INTO, NEWVAL) do_SUBST(&(INTO), (NEWVAL))
441
442/* Similar to SUBST, but NEWVAL is an int expression. Note that substitution
443 for the value of a HOST_WIDE_INT value (including CONST_INT) is
444 not safe. */
445
446static void
663522cb 447do_SUBST_INT (into, newval)
0345195a 448 unsigned int *into, newval;
76095e2f
RH
449{
450 struct undo *buf;
0345195a 451 unsigned int oldval = *into;
76095e2f
RH
452
453 if (oldval == newval)
454 return;
455
456 if (undobuf.frees)
457 buf = undobuf.frees, undobuf.frees = buf->next;
458 else
459 buf = (struct undo *) xmalloc (sizeof (struct undo));
460
461 buf->is_int = 1;
462 buf->where.i = into;
463 buf->old_contents.i = oldval;
464 *into = newval;
465
466 buf->next = undobuf.undos, undobuf.undos = buf;
467}
468
469#define SUBST_INT(INTO, NEWVAL) do_SUBST_INT(&(INTO), (NEWVAL))
470\f
230d793d 471/* Main entry point for combiner. F is the first insn of the function.
663522cb 472 NREGS is the first unused pseudo-reg number.
230d793d 473
44a76fc8
AG
474 Return non-zero if the combiner has turned an indirect jump
475 instruction into a direct jump. */
476int
230d793d
RS
477combine_instructions (f, nregs)
478 rtx f;
770ae6cc 479 unsigned int nregs;
230d793d 480{
b729186a
JL
481 register rtx insn, next;
482#ifdef HAVE_cc0
483 register rtx prev;
484#endif
230d793d
RS
485 register int i;
486 register rtx links, nextlinks;
487
44a76fc8
AG
488 int new_direct_jump_p = 0;
489
230d793d
RS
490 combine_attempts = 0;
491 combine_merges = 0;
492 combine_extras = 0;
493 combine_successes = 0;
494
495 combine_max_regno = nregs;
496
663522cb 497 reg_nonzero_bits = ((unsigned HOST_WIDE_INT *)
c05ddfa7 498 xcalloc (nregs, sizeof (unsigned HOST_WIDE_INT)));
770ae6cc
RK
499 reg_sign_bit_copies
500 = (unsigned char *) xcalloc (nregs, sizeof (unsigned char));
c05ddfa7
MM
501
502 reg_last_death = (rtx *) xmalloc (nregs * sizeof (rtx));
503 reg_last_set = (rtx *) xmalloc (nregs * sizeof (rtx));
504 reg_last_set_value = (rtx *) xmalloc (nregs * sizeof (rtx));
505 reg_last_set_table_tick = (int *) xmalloc (nregs * sizeof (int));
506 reg_last_set_label = (int *) xmalloc (nregs * sizeof (int));
507 reg_last_set_invalid = (char *) xmalloc (nregs * sizeof (char));
55310dad 508 reg_last_set_mode
c05ddfa7 509 = (enum machine_mode *) xmalloc (nregs * sizeof (enum machine_mode));
55310dad 510 reg_last_set_nonzero_bits
c05ddfa7 511 = (unsigned HOST_WIDE_INT *) xmalloc (nregs * sizeof (HOST_WIDE_INT));
55310dad 512 reg_last_set_sign_bit_copies
c05ddfa7 513 = (char *) xmalloc (nregs * sizeof (char));
55310dad 514
ef026f91 515 init_reg_last_arrays ();
230d793d
RS
516
517 init_recog_no_volatile ();
518
519 /* Compute maximum uid value so uid_cuid can be allocated. */
520
521 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
522 if (INSN_UID (insn) > i)
523 i = INSN_UID (insn);
524
c05ddfa7 525 uid_cuid = (int *) xmalloc ((i + 1) * sizeof (int));
4255220d 526 max_uid_cuid = i;
230d793d 527
951553af 528 nonzero_bits_mode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
230d793d 529
951553af 530 /* Don't use reg_nonzero_bits when computing it. This can cause problems
230d793d
RS
531 when, for example, we have j <<= 1 in a loop. */
532
951553af 533 nonzero_sign_valid = 0;
230d793d
RS
534
535 /* Compute the mapping from uids to cuids.
536 Cuids are numbers assigned to insns, like uids,
663522cb 537 except that cuids increase monotonically through the code.
230d793d
RS
538
539 Scan all SETs and see if we can deduce anything about what
951553af 540 bits are known to be zero for some registers and how many copies
d79f08e0
RK
541 of the sign bit are known to exist for those registers.
542
543 Also set any known values so that we can use it while searching
544 for what bits are known to be set. */
545
546 label_tick = 1;
230d793d 547
bcd49eb7
JW
548 /* We need to initialize it here, because record_dead_and_set_regs may call
549 get_last_value. */
550 subst_prev_insn = NULL_RTX;
551
7988fd36
RK
552 setup_incoming_promotions ();
553
715e7fbc
RH
554 refresh_blocks = sbitmap_alloc (n_basic_blocks);
555 sbitmap_zero (refresh_blocks);
556 need_refresh = 0;
557
230d793d
RS
558 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
559 {
4255220d 560 uid_cuid[INSN_UID (insn)] = ++i;
d79f08e0
RK
561 subst_low_cuid = i;
562 subst_insn = insn;
563
2c3c49de 564 if (INSN_P (insn))
d79f08e0 565 {
663522cb 566 note_stores (PATTERN (insn), set_nonzero_bits_and_sign_copies,
84832317 567 NULL);
d79f08e0 568 record_dead_and_set_regs (insn);
2dab894a
RK
569
570#ifdef AUTO_INC_DEC
571 for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
572 if (REG_NOTE_KIND (links) == REG_INC)
84832317
MM
573 set_nonzero_bits_and_sign_copies (XEXP (links, 0), NULL_RTX,
574 NULL);
2dab894a 575#endif
d79f08e0
RK
576 }
577
578 if (GET_CODE (insn) == CODE_LABEL)
579 label_tick++;
230d793d
RS
580 }
581
951553af 582 nonzero_sign_valid = 1;
230d793d
RS
583
584 /* Now scan all the insns in forward order. */
585
0d4d42c3 586 this_basic_block = -1;
230d793d
RS
587 label_tick = 1;
588 last_call_cuid = 0;
589 mem_last_set = 0;
ef026f91 590 init_reg_last_arrays ();
7988fd36
RK
591 setup_incoming_promotions ();
592
230d793d
RS
593 for (insn = f; insn; insn = next ? next : NEXT_INSN (insn))
594 {
595 next = 0;
596
0d4d42c3 597 /* If INSN starts a new basic block, update our basic block number. */
f085c9cd 598 if (this_basic_block + 1 < n_basic_blocks
3b413743 599 && BLOCK_HEAD (this_basic_block + 1) == insn)
0d4d42c3
RK
600 this_basic_block++;
601
230d793d
RS
602 if (GET_CODE (insn) == CODE_LABEL)
603 label_tick++;
604
2c3c49de 605 else if (INSN_P (insn))
230d793d 606 {
732f2ac9
JJ
607 /* See if we know about function return values before this
608 insn based upon SUBREG flags. */
609 check_promoted_subreg (insn, PATTERN (insn));
732f2ac9 610
230d793d
RS
611 /* Try this insn with each insn it links back to. */
612
613 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
663522cb 614 if ((next = try_combine (insn, XEXP (links, 0),
44a76fc8 615 NULL_RTX, &new_direct_jump_p)) != 0)
230d793d
RS
616 goto retry;
617
618 /* Try each sequence of three linked insns ending with this one. */
619
620 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
aabb6c74
NC
621 {
622 rtx link = XEXP (links, 0);
623
624 /* If the linked insn has been replaced by a note, then there
625 is no point in persuing this chain any further. */
626 if (GET_CODE (link) == NOTE)
627 break;
628
629 for (nextlinks = LOG_LINKS (link);
630 nextlinks;
631 nextlinks = XEXP (nextlinks, 1))
632 if ((next = try_combine (insn, XEXP (links, 0),
865f50c5
RH
633 XEXP (nextlinks, 0),
634 &new_direct_jump_p)) != 0)
aabb6c74
NC
635 goto retry;
636 }
230d793d
RS
637
638#ifdef HAVE_cc0
639 /* Try to combine a jump insn that uses CC0
640 with a preceding insn that sets CC0, and maybe with its
641 logical predecessor as well.
642 This is how we make decrement-and-branch insns.
643 We need this special code because data flow connections
644 via CC0 do not get entered in LOG_LINKS. */
645
646 if (GET_CODE (insn) == JUMP_INSN
647 && (prev = prev_nonnote_insn (insn)) != 0
648 && GET_CODE (prev) == INSN
649 && sets_cc0_p (PATTERN (prev)))
650 {
663522cb 651 if ((next = try_combine (insn, prev,
44a76fc8 652 NULL_RTX, &new_direct_jump_p)) != 0)
230d793d
RS
653 goto retry;
654
655 for (nextlinks = LOG_LINKS (prev); nextlinks;
656 nextlinks = XEXP (nextlinks, 1))
657 if ((next = try_combine (insn, prev,
44a76fc8
AG
658 XEXP (nextlinks, 0),
659 &new_direct_jump_p)) != 0)
230d793d
RS
660 goto retry;
661 }
662
663 /* Do the same for an insn that explicitly references CC0. */
664 if (GET_CODE (insn) == INSN
665 && (prev = prev_nonnote_insn (insn)) != 0
666 && GET_CODE (prev) == INSN
667 && sets_cc0_p (PATTERN (prev))
668 && GET_CODE (PATTERN (insn)) == SET
669 && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (insn))))
670 {
663522cb 671 if ((next = try_combine (insn, prev,
44a76fc8 672 NULL_RTX, &new_direct_jump_p)) != 0)
230d793d
RS
673 goto retry;
674
675 for (nextlinks = LOG_LINKS (prev); nextlinks;
676 nextlinks = XEXP (nextlinks, 1))
677 if ((next = try_combine (insn, prev,
44a76fc8
AG
678 XEXP (nextlinks, 0),
679 &new_direct_jump_p)) != 0)
230d793d
RS
680 goto retry;
681 }
682
683 /* Finally, see if any of the insns that this insn links to
684 explicitly references CC0. If so, try this insn, that insn,
5089e22e 685 and its predecessor if it sets CC0. */
230d793d
RS
686 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
687 if (GET_CODE (XEXP (links, 0)) == INSN
688 && GET_CODE (PATTERN (XEXP (links, 0))) == SET
689 && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (XEXP (links, 0))))
690 && (prev = prev_nonnote_insn (XEXP (links, 0))) != 0
691 && GET_CODE (prev) == INSN
692 && sets_cc0_p (PATTERN (prev))
663522cb 693 && (next = try_combine (insn, XEXP (links, 0),
44a76fc8 694 prev, &new_direct_jump_p)) != 0)
230d793d
RS
695 goto retry;
696#endif
697
698 /* Try combining an insn with two different insns whose results it
699 uses. */
700 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
701 for (nextlinks = XEXP (links, 1); nextlinks;
702 nextlinks = XEXP (nextlinks, 1))
703 if ((next = try_combine (insn, XEXP (links, 0),
44a76fc8
AG
704 XEXP (nextlinks, 0),
705 &new_direct_jump_p)) != 0)
230d793d
RS
706 goto retry;
707
708 if (GET_CODE (insn) != NOTE)
709 record_dead_and_set_regs (insn);
710
711 retry:
712 ;
713 }
714 }
715
715e7fbc 716 if (need_refresh)
49c3bb12
RH
717 {
718 compute_bb_for_insn (get_max_uid ());
719 update_life_info (refresh_blocks, UPDATE_LIFE_GLOBAL_RM_NOTES,
663522cb 720 PROP_DEATH_NOTES);
49c3bb12 721 }
c05ddfa7
MM
722
723 /* Clean up. */
715e7fbc 724 sbitmap_free (refresh_blocks);
c05ddfa7
MM
725 free (reg_nonzero_bits);
726 free (reg_sign_bit_copies);
727 free (reg_last_death);
728 free (reg_last_set);
729 free (reg_last_set_value);
730 free (reg_last_set_table_tick);
731 free (reg_last_set_label);
732 free (reg_last_set_invalid);
733 free (reg_last_set_mode);
734 free (reg_last_set_nonzero_bits);
735 free (reg_last_set_sign_bit_copies);
736 free (uid_cuid);
715e7fbc 737
e7749837
RH
738 {
739 struct undo *undo, *next;
740 for (undo = undobuf.frees; undo; undo = next)
741 {
742 next = undo->next;
743 free (undo);
744 }
745 undobuf.frees = 0;
746 }
747
230d793d
RS
748 total_attempts += combine_attempts;
749 total_merges += combine_merges;
750 total_extras += combine_extras;
751 total_successes += combine_successes;
1a26b032 752
951553af 753 nonzero_sign_valid = 0;
972b320c
R
754
755 /* Make recognizer allow volatile MEMs again. */
756 init_recog ();
44a76fc8
AG
757
758 return new_direct_jump_p;
230d793d 759}
ef026f91
RS
760
761/* Wipe the reg_last_xxx arrays in preparation for another pass. */
762
763static void
764init_reg_last_arrays ()
765{
770ae6cc 766 unsigned int nregs = combine_max_regno;
ef026f91 767
961192e1
JM
768 memset ((char *) reg_last_death, 0, nregs * sizeof (rtx));
769 memset ((char *) reg_last_set, 0, nregs * sizeof (rtx));
770 memset ((char *) reg_last_set_value, 0, nregs * sizeof (rtx));
771 memset ((char *) reg_last_set_table_tick, 0, nregs * sizeof (int));
772 memset ((char *) reg_last_set_label, 0, nregs * sizeof (int));
773 memset (reg_last_set_invalid, 0, nregs * sizeof (char));
774 memset ((char *) reg_last_set_mode, 0, nregs * sizeof (enum machine_mode));
775 memset ((char *) reg_last_set_nonzero_bits, 0, nregs * sizeof (HOST_WIDE_INT));
776 memset (reg_last_set_sign_bit_copies, 0, nregs * sizeof (char));
ef026f91 777}
230d793d 778\f
7988fd36
RK
779/* Set up any promoted values for incoming argument registers. */
780
ee791cc3 781static void
7988fd36
RK
782setup_incoming_promotions ()
783{
784#ifdef PROMOTE_FUNCTION_ARGS
770ae6cc 785 unsigned int regno;
7988fd36
RK
786 rtx reg;
787 enum machine_mode mode;
788 int unsignedp;
789 rtx first = get_insns ();
790
c285f57a
JJ
791#ifndef OUTGOING_REGNO
792#define OUTGOING_REGNO(N) N
793#endif
7988fd36 794 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
c285f57a
JJ
795 /* Check whether this register can hold an incoming pointer
796 argument. FUNCTION_ARG_REGNO_P tests outgoing register
797 numbers, so translate if necessary due to register windows. */
798 if (FUNCTION_ARG_REGNO_P (OUTGOING_REGNO (regno))
7988fd36 799 && (reg = promoted_input_arg (regno, &mode, &unsignedp)) != 0)
38a448ca
RH
800 {
801 record_value_for_reg
802 (reg, first, gen_rtx_fmt_e ((unsignedp ? ZERO_EXTEND
803 : SIGN_EXTEND),
804 GET_MODE (reg),
805 gen_rtx_CLOBBER (mode, const0_rtx)));
806 }
7988fd36
RK
807#endif
808}
809\f
91102d5a
RK
810/* Called via note_stores. If X is a pseudo that is narrower than
811 HOST_BITS_PER_WIDE_INT and is being set, record what bits are known zero.
230d793d
RS
812
813 If we are setting only a portion of X and we can't figure out what
814 portion, assume all bits will be used since we don't know what will
d0ab8cd3
RK
815 be happening.
816
817 Similarly, set how many bits of X are known to be copies of the sign bit
663522cb 818 at all locations in the function. This is the smallest number implied
d0ab8cd3 819 by any set of X. */
230d793d
RS
820
821static void
84832317 822set_nonzero_bits_and_sign_copies (x, set, data)
230d793d
RS
823 rtx x;
824 rtx set;
84832317 825 void *data ATTRIBUTE_UNUSED;
230d793d 826{
770ae6cc 827 unsigned int num;
d0ab8cd3 828
230d793d
RS
829 if (GET_CODE (x) == REG
830 && REGNO (x) >= FIRST_PSEUDO_REGISTER
e8095e80
RK
831 /* If this register is undefined at the start of the file, we can't
832 say what its contents were. */
e881bb1b 833 && ! REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, REGNO (x))
5f4f0e22 834 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
230d793d 835 {
2dab894a 836 if (set == 0 || GET_CODE (set) == CLOBBER)
e8095e80
RK
837 {
838 reg_nonzero_bits[REGNO (x)] = GET_MODE_MASK (GET_MODE (x));
88306d12 839 reg_sign_bit_copies[REGNO (x)] = 1;
e8095e80
RK
840 return;
841 }
230d793d
RS
842
843 /* If this is a complex assignment, see if we can convert it into a
5089e22e 844 simple assignment. */
230d793d 845 set = expand_field_assignment (set);
d79f08e0
RK
846
847 /* If this is a simple assignment, or we have a paradoxical SUBREG,
848 set what we know about X. */
849
850 if (SET_DEST (set) == x
851 || (GET_CODE (SET_DEST (set)) == SUBREG
705c7b3b
JW
852 && (GET_MODE_SIZE (GET_MODE (SET_DEST (set)))
853 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (set)))))
d79f08e0 854 && SUBREG_REG (SET_DEST (set)) == x))
d0ab8cd3 855 {
9afa3d54
RK
856 rtx src = SET_SRC (set);
857
858#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
859 /* If X is narrower than a word and SRC is a non-negative
860 constant that would appear negative in the mode of X,
861 sign-extend it for use in reg_nonzero_bits because some
862 machines (maybe most) will actually do the sign-extension
663522cb 863 and this is the conservative approach.
9afa3d54
RK
864
865 ??? For 2.5, try to tighten up the MD files in this regard
866 instead of this kludge. */
867
868 if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
869 && GET_CODE (src) == CONST_INT
870 && INTVAL (src) > 0
871 && 0 != (INTVAL (src)
872 & ((HOST_WIDE_INT) 1
9e69be8c 873 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
9afa3d54
RK
874 src = GEN_INT (INTVAL (src)
875 | ((HOST_WIDE_INT) (-1)
876 << GET_MODE_BITSIZE (GET_MODE (x))));
877#endif
878
951553af 879 reg_nonzero_bits[REGNO (x)]
9afa3d54 880 |= nonzero_bits (src, nonzero_bits_mode);
d0ab8cd3
RK
881 num = num_sign_bit_copies (SET_SRC (set), GET_MODE (x));
882 if (reg_sign_bit_copies[REGNO (x)] == 0
883 || reg_sign_bit_copies[REGNO (x)] > num)
884 reg_sign_bit_copies[REGNO (x)] = num;
885 }
230d793d 886 else
d0ab8cd3 887 {
951553af 888 reg_nonzero_bits[REGNO (x)] = GET_MODE_MASK (GET_MODE (x));
88306d12 889 reg_sign_bit_copies[REGNO (x)] = 1;
d0ab8cd3 890 }
230d793d
RS
891 }
892}
893\f
894/* See if INSN can be combined into I3. PRED and SUCC are optionally
895 insns that were previously combined into I3 or that will be combined
896 into the merger of INSN and I3.
897
898 Return 0 if the combination is not allowed for any reason.
899
663522cb 900 If the combination is allowed, *PDEST will be set to the single
230d793d
RS
901 destination of INSN and *PSRC to the single source, and this function
902 will return 1. */
903
904static int
905can_combine_p (insn, i3, pred, succ, pdest, psrc)
906 rtx insn;
907 rtx i3;
e51712db
KG
908 rtx pred ATTRIBUTE_UNUSED;
909 rtx succ;
230d793d
RS
910 rtx *pdest, *psrc;
911{
912 int i;
913 rtx set = 0, src, dest;
b729186a
JL
914 rtx p;
915#ifdef AUTO_INC_DEC
76d31c63 916 rtx link;
b729186a 917#endif
230d793d
RS
918 int all_adjacent = (succ ? (next_active_insn (insn) == succ
919 && next_active_insn (succ) == i3)
920 : next_active_insn (insn) == i3);
921
922 /* Can combine only if previous insn is a SET of a REG, a SUBREG or CC0.
663522cb 923 or a PARALLEL consisting of such a SET and CLOBBERs.
230d793d
RS
924
925 If INSN has CLOBBER parallel parts, ignore them for our processing.
926 By definition, these happen during the execution of the insn. When it
927 is merged with another insn, all bets are off. If they are, in fact,
928 needed and aren't also supplied in I3, they may be added by
663522cb 929 recog_for_combine. Otherwise, it won't match.
230d793d
RS
930
931 We can also ignore a SET whose SET_DEST is mentioned in a REG_UNUSED
932 note.
933
663522cb 934 Get the source and destination of INSN. If more than one, can't
230d793d 935 combine. */
663522cb 936
230d793d
RS
937 if (GET_CODE (PATTERN (insn)) == SET)
938 set = PATTERN (insn);
939 else if (GET_CODE (PATTERN (insn)) == PARALLEL
940 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
941 {
942 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
943 {
944 rtx elt = XVECEXP (PATTERN (insn), 0, i);
945
946 switch (GET_CODE (elt))
947 {
e3258cef
R
948 /* This is important to combine floating point insns
949 for the SH4 port. */
950 case USE:
951 /* Combining an isolated USE doesn't make sense.
952 We depend here on combinable_i3_pat to reject them. */
953 /* The code below this loop only verifies that the inputs of
954 the SET in INSN do not change. We call reg_set_between_p
955 to verify that the REG in the USE does not change betweeen
956 I3 and INSN.
957 If the USE in INSN was for a pseudo register, the matching
958 insn pattern will likely match any register; combining this
959 with any other USE would only be safe if we knew that the
960 used registers have identical values, or if there was
961 something to tell them apart, e.g. different modes. For
962 now, we forgo such compilcated tests and simply disallow
963 combining of USES of pseudo registers with any other USE. */
964 if (GET_CODE (XEXP (elt, 0)) == REG
965 && GET_CODE (PATTERN (i3)) == PARALLEL)
966 {
967 rtx i3pat = PATTERN (i3);
968 int i = XVECLEN (i3pat, 0) - 1;
770ae6cc
RK
969 unsigned int regno = REGNO (XEXP (elt, 0));
970
e3258cef
R
971 do
972 {
973 rtx i3elt = XVECEXP (i3pat, 0, i);
770ae6cc 974
e3258cef
R
975 if (GET_CODE (i3elt) == USE
976 && GET_CODE (XEXP (i3elt, 0)) == REG
977 && (REGNO (XEXP (i3elt, 0)) == regno
978 ? reg_set_between_p (XEXP (elt, 0),
979 PREV_INSN (insn), i3)
980 : regno >= FIRST_PSEUDO_REGISTER))
981 return 0;
982 }
983 while (--i >= 0);
984 }
985 break;
986
230d793d
RS
987 /* We can ignore CLOBBERs. */
988 case CLOBBER:
989 break;
990
991 case SET:
992 /* Ignore SETs whose result isn't used but not those that
993 have side-effects. */
994 if (find_reg_note (insn, REG_UNUSED, SET_DEST (elt))
995 && ! side_effects_p (elt))
996 break;
997
998 /* If we have already found a SET, this is a second one and
999 so we cannot combine with this insn. */
1000 if (set)
1001 return 0;
1002
1003 set = elt;
1004 break;
1005
1006 default:
1007 /* Anything else means we can't combine. */
1008 return 0;
1009 }
1010 }
1011
1012 if (set == 0
1013 /* If SET_SRC is an ASM_OPERANDS we can't throw away these CLOBBERs,
1014 so don't do anything with it. */
1015 || GET_CODE (SET_SRC (set)) == ASM_OPERANDS)
1016 return 0;
1017 }
1018 else
1019 return 0;
1020
1021 if (set == 0)
1022 return 0;
1023
1024 set = expand_field_assignment (set);
1025 src = SET_SRC (set), dest = SET_DEST (set);
1026
1027 /* Don't eliminate a store in the stack pointer. */
1028 if (dest == stack_pointer_rtx
230d793d
RS
1029 /* If we couldn't eliminate a field assignment, we can't combine. */
1030 || GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == STRICT_LOW_PART
1031 /* Don't combine with an insn that sets a register to itself if it has
1032 a REG_EQUAL note. This may be part of a REG_NO_CONFLICT sequence. */
5f4f0e22 1033 || (rtx_equal_p (src, dest) && find_reg_note (insn, REG_EQUAL, NULL_RTX))
62f7f1f5
GK
1034 /* Can't merge an ASM_OPERANDS. */
1035 || GET_CODE (src) == ASM_OPERANDS
230d793d
RS
1036 /* Can't merge a function call. */
1037 || GET_CODE (src) == CALL
cd5e8f1f 1038 /* Don't eliminate a function call argument. */
4dca5ec5
RK
1039 || (GET_CODE (i3) == CALL_INSN
1040 && (find_reg_fusage (i3, USE, dest)
1041 || (GET_CODE (dest) == REG
1042 && REGNO (dest) < FIRST_PSEUDO_REGISTER
1043 && global_regs[REGNO (dest)])))
230d793d
RS
1044 /* Don't substitute into an incremented register. */
1045 || FIND_REG_INC_NOTE (i3, dest)
1046 || (succ && FIND_REG_INC_NOTE (succ, dest))
ec35104c 1047#if 0
230d793d 1048 /* Don't combine the end of a libcall into anything. */
ec35104c
JL
1049 /* ??? This gives worse code, and appears to be unnecessary, since no
1050 pass after flow uses REG_LIBCALL/REG_RETVAL notes. Local-alloc does
1051 use REG_RETVAL notes for noconflict blocks, but other code here
1052 makes sure that those insns don't disappear. */
5f4f0e22 1053 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
ec35104c 1054#endif
230d793d
RS
1055 /* Make sure that DEST is not used after SUCC but before I3. */
1056 || (succ && ! all_adjacent
1057 && reg_used_between_p (dest, succ, i3))
1058 /* Make sure that the value that is to be substituted for the register
1059 does not use any registers whose values alter in between. However,
1060 If the insns are adjacent, a use can't cross a set even though we
1061 think it might (this can happen for a sequence of insns each setting
1062 the same destination; reg_last_set of that register might point to
d81481d3
RK
1063 a NOTE). If INSN has a REG_EQUIV note, the register is always
1064 equivalent to the memory so the substitution is valid even if there
1065 are intervening stores. Also, don't move a volatile asm or
1066 UNSPEC_VOLATILE across any other insns. */
230d793d 1067 || (! all_adjacent
d81481d3
RK
1068 && (((GET_CODE (src) != MEM
1069 || ! find_reg_note (insn, REG_EQUIV, src))
1070 && use_crosses_set_p (src, INSN_CUID (insn)))
a66a10c7
RS
1071 || (GET_CODE (src) == ASM_OPERANDS && MEM_VOLATILE_P (src))
1072 || GET_CODE (src) == UNSPEC_VOLATILE))
230d793d
RS
1073 /* If there is a REG_NO_CONFLICT note for DEST in I3 or SUCC, we get
1074 better register allocation by not doing the combine. */
1075 || find_reg_note (i3, REG_NO_CONFLICT, dest)
1076 || (succ && find_reg_note (succ, REG_NO_CONFLICT, dest))
1077 /* Don't combine across a CALL_INSN, because that would possibly
1078 change whether the life span of some REGs crosses calls or not,
1079 and it is a pain to update that information.
1080 Exception: if source is a constant, moving it later can't hurt.
1081 Accept that special case, because it helps -fforce-addr a lot. */
1082 || (INSN_CUID (insn) < last_call_cuid && ! CONSTANT_P (src)))
1083 return 0;
1084
1085 /* DEST must either be a REG or CC0. */
1086 if (GET_CODE (dest) == REG)
1087 {
1088 /* If register alignment is being enforced for multi-word items in all
1089 cases except for parameters, it is possible to have a register copy
1090 insn referencing a hard register that is not allowed to contain the
1091 mode being copied and which would not be valid as an operand of most
1092 insns. Eliminate this problem by not combining with such an insn.
1093
1094 Also, on some machines we don't want to extend the life of a hard
53895717 1095 register. */
230d793d
RS
1096
1097 if (GET_CODE (src) == REG
1098 && ((REGNO (dest) < FIRST_PSEUDO_REGISTER
1099 && ! HARD_REGNO_MODE_OK (REGNO (dest), GET_MODE (dest)))
c448a43e
RK
1100 /* Don't extend the life of a hard register unless it is
1101 user variable (if we have few registers) or it can't
1102 fit into the desired register (meaning something special
ecd40809
RK
1103 is going on).
1104 Also avoid substituting a return register into I3, because
1105 reload can't handle a conflict with constraints of other
1106 inputs. */
230d793d 1107 || (REGNO (src) < FIRST_PSEUDO_REGISTER
53895717 1108 && ! HARD_REGNO_MODE_OK (REGNO (src), GET_MODE (src)))))
230d793d
RS
1109 return 0;
1110 }
1111 else if (GET_CODE (dest) != CC0)
1112 return 0;
1113
5f96750d
RS
1114 /* Don't substitute for a register intended as a clobberable operand.
1115 Similarly, don't substitute an expression containing a register that
1116 will be clobbered in I3. */
230d793d
RS
1117 if (GET_CODE (PATTERN (i3)) == PARALLEL)
1118 for (i = XVECLEN (PATTERN (i3), 0) - 1; i >= 0; i--)
1119 if (GET_CODE (XVECEXP (PATTERN (i3), 0, i)) == CLOBBER
5f96750d
RS
1120 && (reg_overlap_mentioned_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0),
1121 src)
1122 || rtx_equal_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0), dest)))
230d793d
RS
1123 return 0;
1124
1125 /* If INSN contains anything volatile, or is an `asm' (whether volatile
d276f2bb 1126 or not), reject, unless nothing volatile comes between it and I3 */
230d793d
RS
1127
1128 if (GET_CODE (src) == ASM_OPERANDS || volatile_refs_p (src))
d276f2bb
CM
1129 {
1130 /* Make sure succ doesn't contain a volatile reference. */
1131 if (succ != 0 && volatile_refs_p (PATTERN (succ)))
1132 return 0;
663522cb 1133
d276f2bb 1134 for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
2c3c49de 1135 if (INSN_P (p) && p != succ && volatile_refs_p (PATTERN (p)))
d276f2bb
CM
1136 return 0;
1137 }
230d793d 1138
b79ee7eb
RH
1139 /* If INSN is an asm, and DEST is a hard register, reject, since it has
1140 to be an explicit register variable, and was chosen for a reason. */
1141
1142 if (GET_CODE (src) == ASM_OPERANDS
1143 && GET_CODE (dest) == REG && REGNO (dest) < FIRST_PSEUDO_REGISTER)
1144 return 0;
1145
4b2cb4a2
RS
1146 /* If there are any volatile insns between INSN and I3, reject, because
1147 they might affect machine state. */
1148
1149 for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
2c3c49de 1150 if (INSN_P (p) && p != succ && volatile_insn_p (PATTERN (p)))
4b2cb4a2
RS
1151 return 0;
1152
230d793d
RS
1153 /* If INSN or I2 contains an autoincrement or autodecrement,
1154 make sure that register is not used between there and I3,
1155 and not already used in I3 either.
1156 Also insist that I3 not be a jump; if it were one
1157 and the incremented register were spilled, we would lose. */
1158
1159#ifdef AUTO_INC_DEC
1160 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1161 if (REG_NOTE_KIND (link) == REG_INC
1162 && (GET_CODE (i3) == JUMP_INSN
1163 || reg_used_between_p (XEXP (link, 0), insn, i3)
1164 || reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i3))))
1165 return 0;
1166#endif
1167
1168#ifdef HAVE_cc0
1169 /* Don't combine an insn that follows a CC0-setting insn.
1170 An insn that uses CC0 must not be separated from the one that sets it.
1171 We do, however, allow I2 to follow a CC0-setting insn if that insn
1172 is passed as I1; in that case it will be deleted also.
1173 We also allow combining in this case if all the insns are adjacent
1174 because that would leave the two CC0 insns adjacent as well.
1175 It would be more logical to test whether CC0 occurs inside I1 or I2,
1176 but that would be much slower, and this ought to be equivalent. */
1177
1178 p = prev_nonnote_insn (insn);
1179 if (p && p != pred && GET_CODE (p) == INSN && sets_cc0_p (PATTERN (p))
1180 && ! all_adjacent)
1181 return 0;
1182#endif
1183
1184 /* If we get here, we have passed all the tests and the combination is
1185 to be allowed. */
1186
1187 *pdest = dest;
1188 *psrc = src;
1189
1190 return 1;
1191}
1192\f
956d6950
JL
1193/* Check if PAT is an insn - or a part of it - used to set up an
1194 argument for a function in a hard register. */
1195
1196static int
1197sets_function_arg_p (pat)
1198 rtx pat;
1199{
1200 int i;
1201 rtx inner_dest;
1202
1203 switch (GET_CODE (pat))
1204 {
1205 case INSN:
1206 return sets_function_arg_p (PATTERN (pat));
1207
1208 case PARALLEL:
1209 for (i = XVECLEN (pat, 0); --i >= 0;)
1210 if (sets_function_arg_p (XVECEXP (pat, 0, i)))
1211 return 1;
1212
1213 break;
1214
1215 case SET:
1216 inner_dest = SET_DEST (pat);
1217 while (GET_CODE (inner_dest) == STRICT_LOW_PART
1218 || GET_CODE (inner_dest) == SUBREG
1219 || GET_CODE (inner_dest) == ZERO_EXTRACT)
1220 inner_dest = XEXP (inner_dest, 0);
1221
1222 return (GET_CODE (inner_dest) == REG
1223 && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER
1224 && FUNCTION_ARG_REGNO_P (REGNO (inner_dest)));
1d300e19
KG
1225
1226 default:
1227 break;
956d6950
JL
1228 }
1229
1230 return 0;
1231}
1232
230d793d
RS
1233/* LOC is the location within I3 that contains its pattern or the component
1234 of a PARALLEL of the pattern. We validate that it is valid for combining.
1235
1236 One problem is if I3 modifies its output, as opposed to replacing it
1237 entirely, we can't allow the output to contain I2DEST or I1DEST as doing
1238 so would produce an insn that is not equivalent to the original insns.
1239
1240 Consider:
1241
1242 (set (reg:DI 101) (reg:DI 100))
1243 (set (subreg:SI (reg:DI 101) 0) <foo>)
1244
1245 This is NOT equivalent to:
1246
1247 (parallel [(set (subreg:SI (reg:DI 100) 0) <foo>)
1248 (set (reg:DI 101) (reg:DI 100))])
1249
1250 Not only does this modify 100 (in which case it might still be valid
663522cb 1251 if 100 were dead in I2), it sets 101 to the ORIGINAL value of 100.
230d793d
RS
1252
1253 We can also run into a problem if I2 sets a register that I1
1254 uses and I1 gets directly substituted into I3 (not via I2). In that
1255 case, we would be getting the wrong value of I2DEST into I3, so we
1256 must reject the combination. This case occurs when I2 and I1 both
1257 feed into I3, rather than when I1 feeds into I2, which feeds into I3.
1258 If I1_NOT_IN_SRC is non-zero, it means that finding I1 in the source
1259 of a SET must prevent combination from occurring.
1260
230d793d
RS
1261 Before doing the above check, we first try to expand a field assignment
1262 into a set of logical operations.
1263
1264 If PI3_DEST_KILLED is non-zero, it is a pointer to a location in which
1265 we place a register that is both set and used within I3. If more than one
1266 such register is detected, we fail.
1267
1268 Return 1 if the combination is valid, zero otherwise. */
1269
1270static int
1271combinable_i3pat (i3, loc, i2dest, i1dest, i1_not_in_src, pi3dest_killed)
1272 rtx i3;
1273 rtx *loc;
1274 rtx i2dest;
1275 rtx i1dest;
1276 int i1_not_in_src;
1277 rtx *pi3dest_killed;
1278{
1279 rtx x = *loc;
1280
1281 if (GET_CODE (x) == SET)
1282 {
1283 rtx set = expand_field_assignment (x);
1284 rtx dest = SET_DEST (set);
1285 rtx src = SET_SRC (set);
29a82058 1286 rtx inner_dest = dest;
663522cb 1287
29a82058
JL
1288#if 0
1289 rtx inner_src = src;
1290#endif
230d793d
RS
1291
1292 SUBST (*loc, set);
1293
1294 while (GET_CODE (inner_dest) == STRICT_LOW_PART
1295 || GET_CODE (inner_dest) == SUBREG
1296 || GET_CODE (inner_dest) == ZERO_EXTRACT)
1297 inner_dest = XEXP (inner_dest, 0);
1298
1299 /* We probably don't need this any more now that LIMIT_RELOAD_CLASS
1300 was added. */
1301#if 0
1302 while (GET_CODE (inner_src) == STRICT_LOW_PART
1303 || GET_CODE (inner_src) == SUBREG
1304 || GET_CODE (inner_src) == ZERO_EXTRACT)
1305 inner_src = XEXP (inner_src, 0);
1306
1307 /* If it is better that two different modes keep two different pseudos,
1308 avoid combining them. This avoids producing the following pattern
1309 on a 386:
1310 (set (subreg:SI (reg/v:QI 21) 0)
1311 (lshiftrt:SI (reg/v:SI 20)
1312 (const_int 24)))
1313 If that were made, reload could not handle the pair of
1314 reg 20/21, since it would try to get any GENERAL_REGS
1315 but some of them don't handle QImode. */
1316
1317 if (rtx_equal_p (inner_src, i2dest)
1318 && GET_CODE (inner_dest) == REG
1319 && ! MODES_TIEABLE_P (GET_MODE (i2dest), GET_MODE (inner_dest)))
1320 return 0;
1321#endif
1322
1323 /* Check for the case where I3 modifies its output, as
1324 discussed above. */
1325 if ((inner_dest != dest
1326 && (reg_overlap_mentioned_p (i2dest, inner_dest)
1327 || (i1dest && reg_overlap_mentioned_p (i1dest, inner_dest))))
956d6950 1328
53895717
BS
1329 /* This is the same test done in can_combine_p except we can't test
1330 all_adjacent; we don't have to, since this instruction will stay
1331 in place, thus we are not considering increasing the lifetime of
1332 INNER_DEST.
956d6950
JL
1333
1334 Also, if this insn sets a function argument, combining it with
1335 something that might need a spill could clobber a previous
1336 function argument; the all_adjacent test in can_combine_p also
1337 checks this; here, we do a more specific test for this case. */
663522cb 1338
230d793d 1339 || (GET_CODE (inner_dest) == REG
dfbe1b2f 1340 && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER
c448a43e 1341 && (! HARD_REGNO_MODE_OK (REGNO (inner_dest),
53895717 1342 GET_MODE (inner_dest))))
230d793d
RS
1343 || (i1_not_in_src && reg_overlap_mentioned_p (i1dest, src)))
1344 return 0;
1345
1346 /* If DEST is used in I3, it is being killed in this insn,
663522cb 1347 so record that for later.
36a9c2e9
JL
1348 Never add REG_DEAD notes for the FRAME_POINTER_REGNUM or the
1349 STACK_POINTER_REGNUM, since these are always considered to be
1350 live. Similarly for ARG_POINTER_REGNUM if it is fixed. */
230d793d 1351 if (pi3dest_killed && GET_CODE (dest) == REG
36a9c2e9
JL
1352 && reg_referenced_p (dest, PATTERN (i3))
1353 && REGNO (dest) != FRAME_POINTER_REGNUM
6d7096b0
DE
1354#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1355 && REGNO (dest) != HARD_FRAME_POINTER_REGNUM
1356#endif
36a9c2e9
JL
1357#if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
1358 && (REGNO (dest) != ARG_POINTER_REGNUM
1359 || ! fixed_regs [REGNO (dest)])
1360#endif
1361 && REGNO (dest) != STACK_POINTER_REGNUM)
230d793d
RS
1362 {
1363 if (*pi3dest_killed)
1364 return 0;
1365
1366 *pi3dest_killed = dest;
1367 }
1368 }
1369
1370 else if (GET_CODE (x) == PARALLEL)
1371 {
1372 int i;
1373
1374 for (i = 0; i < XVECLEN (x, 0); i++)
1375 if (! combinable_i3pat (i3, &XVECEXP (x, 0, i), i2dest, i1dest,
1376 i1_not_in_src, pi3dest_killed))
1377 return 0;
1378 }
1379
1380 return 1;
1381}
1382\f
14a774a9
RK
1383/* Return 1 if X is an arithmetic expression that contains a multiplication
1384 and division. We don't count multiplications by powers of two here. */
1385
1386static int
1387contains_muldiv (x)
1388 rtx x;
1389{
1390 switch (GET_CODE (x))
1391 {
1392 case MOD: case DIV: case UMOD: case UDIV:
1393 return 1;
1394
1395 case MULT:
1396 return ! (GET_CODE (XEXP (x, 1)) == CONST_INT
1397 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0);
1398 default:
1399 switch (GET_RTX_CLASS (GET_CODE (x)))
1400 {
1401 case 'c': case '<': case '2':
1402 return contains_muldiv (XEXP (x, 0))
1403 || contains_muldiv (XEXP (x, 1));
1404
1405 case '1':
1406 return contains_muldiv (XEXP (x, 0));
1407
1408 default:
1409 return 0;
1410 }
1411 }
1412}
1413\f
c3410241
BS
1414/* Determine whether INSN can be used in a combination. Return nonzero if
1415 not. This is used in try_combine to detect early some cases where we
1416 can't perform combinations. */
1417
1418static int
1419cant_combine_insn_p (insn)
1420 rtx insn;
1421{
1422 rtx set;
1423 rtx src, dest;
1424
1425 /* If this isn't really an insn, we can't do anything.
1426 This can occur when flow deletes an insn that it has merged into an
1427 auto-increment address. */
1428 if (! INSN_P (insn))
1429 return 1;
1430
1431 /* Never combine loads and stores involving hard regs. The register
1432 allocator can usually handle such reg-reg moves by tying. If we allow
1433 the combiner to make substitutions of hard regs, we risk aborting in
1434 reload on machines that have SMALL_REGISTER_CLASSES.
1435 As an exception, we allow combinations involving fixed regs; these are
1436 not available to the register allocator so there's no risk involved. */
1437
1438 set = single_set (insn);
1439 if (! set)
1440 return 0;
1441 src = SET_SRC (set);
1442 dest = SET_DEST (set);
ad334b51
JH
1443 if (GET_CODE (src) == SUBREG)
1444 src = SUBREG_REG (src);
1445 if (GET_CODE (dest) == SUBREG)
1446 dest = SUBREG_REG (dest);
53895717
BS
1447 if (REG_P (src) && REG_P (dest)
1448 && ((REGNO (src) < FIRST_PSEUDO_REGISTER
1449 && ! fixed_regs[REGNO (src)])
1450 || (REGNO (dest) < FIRST_PSEUDO_REGISTER
1451 && ! fixed_regs[REGNO (dest)])))
c3410241 1452 return 1;
53895717 1453
c3410241
BS
1454 return 0;
1455}
1456
230d793d
RS
1457/* Try to combine the insns I1 and I2 into I3.
1458 Here I1 and I2 appear earlier than I3.
1459 I1 can be zero; then we combine just I2 into I3.
663522cb 1460
04956a1a 1461 If we are combining three insns and the resulting insn is not recognized,
230d793d
RS
1462 try splitting it into two insns. If that happens, I2 and I3 are retained
1463 and I1 is pseudo-deleted by turning it into a NOTE. Otherwise, I1 and I2
1464 are pseudo-deleted.
1465
663522cb 1466 Return 0 if the combination does not work. Then nothing is changed.
abe6e52f 1467 If we did the combination, return the insn at which combine should
663522cb
KH
1468 resume scanning.
1469
44a76fc8
AG
1470 Set NEW_DIRECT_JUMP_P to a non-zero value if try_combine creates a
1471 new direct jump instruction. */
230d793d
RS
1472
1473static rtx
44a76fc8 1474try_combine (i3, i2, i1, new_direct_jump_p)
230d793d 1475 register rtx i3, i2, i1;
44a76fc8 1476 register int *new_direct_jump_p;
230d793d 1477{
02359929 1478 /* New patterns for I3 and I2, respectively. */
230d793d
RS
1479 rtx newpat, newi2pat = 0;
1480 /* Indicates need to preserve SET in I1 or I2 in I3 if it is not dead. */
1481 int added_sets_1, added_sets_2;
1482 /* Total number of SETs to put into I3. */
1483 int total_sets;
1484 /* Nonzero is I2's body now appears in I3. */
1485 int i2_is_used;
1486 /* INSN_CODEs for new I3, new I2, and user of condition code. */
6a651371 1487 int insn_code_number, i2_code_number = 0, other_code_number = 0;
230d793d
RS
1488 /* Contains I3 if the destination of I3 is used in its source, which means
1489 that the old life of I3 is being killed. If that usage is placed into
1490 I2 and not in I3, a REG_DEAD note must be made. */
1491 rtx i3dest_killed = 0;
1492 /* SET_DEST and SET_SRC of I2 and I1. */
1493 rtx i2dest, i2src, i1dest = 0, i1src = 0;
1494 /* PATTERN (I2), or a copy of it in certain cases. */
1495 rtx i2pat;
1496 /* Indicates if I2DEST or I1DEST is in I2SRC or I1_SRC. */
c4e861e8 1497 int i2dest_in_i2src = 0, i1dest_in_i1src = 0, i2dest_in_i1src = 0;
230d793d
RS
1498 int i1_feeds_i3 = 0;
1499 /* Notes that must be added to REG_NOTES in I3 and I2. */
1500 rtx new_i3_notes, new_i2_notes;
176c9e6b
JW
1501 /* Notes that we substituted I3 into I2 instead of the normal case. */
1502 int i3_subst_into_i2 = 0;
df7d75de
RK
1503 /* Notes that I1, I2 or I3 is a MULT operation. */
1504 int have_mult = 0;
230d793d
RS
1505
1506 int maxreg;
1507 rtx temp;
1508 register rtx link;
1509 int i;
1510
c3410241
BS
1511 /* Exit early if one of the insns involved can't be used for
1512 combinations. */
1513 if (cant_combine_insn_p (i3)
1514 || cant_combine_insn_p (i2)
1515 || (i1 && cant_combine_insn_p (i1))
1516 /* We also can't do anything if I3 has a
1517 REG_LIBCALL note since we don't want to disrupt the contiguity of a
1518 libcall. */
ec35104c
JL
1519#if 0
1520 /* ??? This gives worse code, and appears to be unnecessary, since no
1521 pass after flow uses REG_LIBCALL/REG_RETVAL notes. */
1522 || find_reg_note (i3, REG_LIBCALL, NULL_RTX)
1523#endif
663522cb 1524 )
230d793d
RS
1525 return 0;
1526
1527 combine_attempts++;
230d793d
RS
1528 undobuf.other_insn = 0;
1529
6e25d159
RK
1530 /* Reset the hard register usage information. */
1531 CLEAR_HARD_REG_SET (newpat_used_regs);
1532
230d793d
RS
1533 /* If I1 and I2 both feed I3, they can be in any order. To simplify the
1534 code below, set I1 to be the earlier of the two insns. */
1535 if (i1 && INSN_CUID (i1) > INSN_CUID (i2))
1536 temp = i1, i1 = i2, i2 = temp;
1537
abe6e52f 1538 added_links_insn = 0;
137e889e 1539
230d793d 1540 /* First check for one important special-case that the code below will
c7be4f66 1541 not handle. Namely, the case where I1 is zero, I2 is a PARALLEL
230d793d
RS
1542 and I3 is a SET whose SET_SRC is a SET_DEST in I2. In that case,
1543 we may be able to replace that destination with the destination of I3.
1544 This occurs in the common code where we compute both a quotient and
1545 remainder into a structure, in which case we want to do the computation
1546 directly into the structure to avoid register-register copies.
1547
c7be4f66
RK
1548 Note that this case handles both multiple sets in I2 and also
1549 cases where I2 has a number of CLOBBER or PARALLELs.
1550
230d793d
RS
1551 We make very conservative checks below and only try to handle the
1552 most common cases of this. For example, we only handle the case
1553 where I2 and I3 are adjacent to avoid making difficult register
1554 usage tests. */
1555
1556 if (i1 == 0 && GET_CODE (i3) == INSN && GET_CODE (PATTERN (i3)) == SET
1557 && GET_CODE (SET_SRC (PATTERN (i3))) == REG
1558 && REGNO (SET_SRC (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER
230d793d
RS
1559 && find_reg_note (i3, REG_DEAD, SET_SRC (PATTERN (i3)))
1560 && GET_CODE (PATTERN (i2)) == PARALLEL
1561 && ! side_effects_p (SET_DEST (PATTERN (i3)))
5089e22e
RS
1562 /* If the dest of I3 is a ZERO_EXTRACT or STRICT_LOW_PART, the code
1563 below would need to check what is inside (and reg_overlap_mentioned_p
1564 doesn't support those codes anyway). Don't allow those destinations;
1565 the resulting insn isn't likely to be recognized anyway. */
1566 && GET_CODE (SET_DEST (PATTERN (i3))) != ZERO_EXTRACT
1567 && GET_CODE (SET_DEST (PATTERN (i3))) != STRICT_LOW_PART
230d793d
RS
1568 && ! reg_overlap_mentioned_p (SET_SRC (PATTERN (i3)),
1569 SET_DEST (PATTERN (i3)))
1570 && next_real_insn (i2) == i3)
5089e22e
RS
1571 {
1572 rtx p2 = PATTERN (i2);
1573
1574 /* Make sure that the destination of I3,
1575 which we are going to substitute into one output of I2,
1576 is not used within another output of I2. We must avoid making this:
1577 (parallel [(set (mem (reg 69)) ...)
1578 (set (reg 69) ...)])
1579 which is not well-defined as to order of actions.
1580 (Besides, reload can't handle output reloads for this.)
1581
1582 The problem can also happen if the dest of I3 is a memory ref,
1583 if another dest in I2 is an indirect memory ref. */
1584 for (i = 0; i < XVECLEN (p2, 0); i++)
7ca919b7
RK
1585 if ((GET_CODE (XVECEXP (p2, 0, i)) == SET
1586 || GET_CODE (XVECEXP (p2, 0, i)) == CLOBBER)
5089e22e
RS
1587 && reg_overlap_mentioned_p (SET_DEST (PATTERN (i3)),
1588 SET_DEST (XVECEXP (p2, 0, i))))
1589 break;
230d793d 1590
5089e22e
RS
1591 if (i == XVECLEN (p2, 0))
1592 for (i = 0; i < XVECLEN (p2, 0); i++)
481c7efa
FS
1593 if ((GET_CODE (XVECEXP (p2, 0, i)) == SET
1594 || GET_CODE (XVECEXP (p2, 0, i)) == CLOBBER)
1595 && SET_DEST (XVECEXP (p2, 0, i)) == SET_SRC (PATTERN (i3)))
5089e22e
RS
1596 {
1597 combine_merges++;
230d793d 1598
5089e22e
RS
1599 subst_insn = i3;
1600 subst_low_cuid = INSN_CUID (i2);
230d793d 1601
c4e861e8 1602 added_sets_2 = added_sets_1 = 0;
5089e22e 1603 i2dest = SET_SRC (PATTERN (i3));
230d793d 1604
5089e22e
RS
1605 /* Replace the dest in I2 with our dest and make the resulting
1606 insn the new pattern for I3. Then skip to where we
1607 validate the pattern. Everything was set up above. */
663522cb 1608 SUBST (SET_DEST (XVECEXP (p2, 0, i)),
5089e22e
RS
1609 SET_DEST (PATTERN (i3)));
1610
1611 newpat = p2;
176c9e6b 1612 i3_subst_into_i2 = 1;
5089e22e
RS
1613 goto validate_replacement;
1614 }
1615 }
230d793d 1616
667c1c2c
RK
1617 /* If I2 is setting a double-word pseudo to a constant and I3 is setting
1618 one of those words to another constant, merge them by making a new
1619 constant. */
1620 if (i1 == 0
1621 && (temp = single_set (i2)) != 0
1622 && (GET_CODE (SET_SRC (temp)) == CONST_INT
1623 || GET_CODE (SET_SRC (temp)) == CONST_DOUBLE)
1624 && GET_CODE (SET_DEST (temp)) == REG
1625 && GET_MODE_CLASS (GET_MODE (SET_DEST (temp))) == MODE_INT
1626 && GET_MODE_SIZE (GET_MODE (SET_DEST (temp))) == 2 * UNITS_PER_WORD
1627 && GET_CODE (PATTERN (i3)) == SET
1628 && GET_CODE (SET_DEST (PATTERN (i3))) == SUBREG
1629 && SUBREG_REG (SET_DEST (PATTERN (i3))) == SET_DEST (temp)
1630 && GET_MODE_CLASS (GET_MODE (SET_DEST (PATTERN (i3)))) == MODE_INT
1631 && GET_MODE_SIZE (GET_MODE (SET_DEST (PATTERN (i3)))) == UNITS_PER_WORD
1632 && GET_CODE (SET_SRC (PATTERN (i3))) == CONST_INT)
1633 {
1634 HOST_WIDE_INT lo, hi;
1635
1636 if (GET_CODE (SET_SRC (temp)) == CONST_INT)
1637 lo = INTVAL (SET_SRC (temp)), hi = lo < 0 ? -1 : 0;
1638 else
1639 {
1640 lo = CONST_DOUBLE_LOW (SET_SRC (temp));
1641 hi = CONST_DOUBLE_HIGH (SET_SRC (temp));
1642 }
1643
1644 if (subreg_lowpart_p (SET_DEST (PATTERN (i3))))
48b4d901
AO
1645 {
1646 /* We don't handle the case of the target word being wider
1647 than a host wide int. */
1648 if (HOST_BITS_PER_WIDE_INT < BITS_PER_WORD)
1649 abort ();
1650
42a6ff51 1651 lo &= ~(UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD (1) - 1);
48b4d901
AO
1652 lo |= INTVAL (SET_SRC (PATTERN (i3)));
1653 }
1654 else if (HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
667c1c2c 1655 hi = INTVAL (SET_SRC (PATTERN (i3)));
48b4d901
AO
1656 else if (HOST_BITS_PER_WIDE_INT >= 2 * BITS_PER_WORD)
1657 {
1658 int sign = -(int) ((unsigned HOST_WIDE_INT) lo
1659 >> (HOST_BITS_PER_WIDE_INT - 1));
1660
42a6ff51
AO
1661 lo &= ~ (UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD
1662 (UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD (1) - 1));
1663 lo |= (UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD
1664 (INTVAL (SET_SRC (PATTERN (i3)))));
48b4d901
AO
1665 if (hi == sign)
1666 hi = lo < 0 ? -1 : 0;
1667 }
1668 else
1669 /* We don't handle the case of the higher word not fitting
1670 entirely in either hi or lo. */
1671 abort ();
667c1c2c
RK
1672
1673 combine_merges++;
1674 subst_insn = i3;
1675 subst_low_cuid = INSN_CUID (i2);
1676 added_sets_2 = added_sets_1 = 0;
1677 i2dest = SET_DEST (temp);
1678
1679 SUBST (SET_SRC (temp),
1680 immed_double_const (lo, hi, GET_MODE (SET_DEST (temp))));
1681
1682 newpat = PATTERN (i2);
667c1c2c
RK
1683 goto validate_replacement;
1684 }
1685
230d793d
RS
1686#ifndef HAVE_cc0
1687 /* If we have no I1 and I2 looks like:
1688 (parallel [(set (reg:CC X) (compare:CC OP (const_int 0)))
1689 (set Y OP)])
1690 make up a dummy I1 that is
1691 (set Y OP)
1692 and change I2 to be
1693 (set (reg:CC X) (compare:CC Y (const_int 0)))
1694
1695 (We can ignore any trailing CLOBBERs.)
1696
1697 This undoes a previous combination and allows us to match a branch-and-
1698 decrement insn. */
1699
1700 if (i1 == 0 && GET_CODE (PATTERN (i2)) == PARALLEL
1701 && XVECLEN (PATTERN (i2), 0) >= 2
1702 && GET_CODE (XVECEXP (PATTERN (i2), 0, 0)) == SET
1703 && (GET_MODE_CLASS (GET_MODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 0))))
1704 == MODE_CC)
1705 && GET_CODE (SET_SRC (XVECEXP (PATTERN (i2), 0, 0))) == COMPARE
1706 && XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 1) == const0_rtx
1707 && GET_CODE (XVECEXP (PATTERN (i2), 0, 1)) == SET
1708 && GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 1))) == REG
1709 && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 0),
1710 SET_SRC (XVECEXP (PATTERN (i2), 0, 1))))
1711 {
663522cb 1712 for (i = XVECLEN (PATTERN (i2), 0) - 1; i >= 2; i--)
230d793d
RS
1713 if (GET_CODE (XVECEXP (PATTERN (i2), 0, i)) != CLOBBER)
1714 break;
1715
1716 if (i == 1)
1717 {
1718 /* We make I1 with the same INSN_UID as I2. This gives it
1719 the same INSN_CUID for value tracking. Our fake I1 will
1720 never appear in the insn stream so giving it the same INSN_UID
1721 as I2 will not cause a problem. */
1722
0d9641d1 1723 subst_prev_insn = i1
38a448ca
RH
1724 = gen_rtx_INSN (VOIDmode, INSN_UID (i2), NULL_RTX, i2,
1725 XVECEXP (PATTERN (i2), 0, 1), -1, NULL_RTX,
1726 NULL_RTX);
230d793d
RS
1727
1728 SUBST (PATTERN (i2), XVECEXP (PATTERN (i2), 0, 0));
1729 SUBST (XEXP (SET_SRC (PATTERN (i2)), 0),
1730 SET_DEST (PATTERN (i1)));
1731 }
1732 }
1733#endif
1734
1735 /* Verify that I2 and I1 are valid for combining. */
5f4f0e22
CH
1736 if (! can_combine_p (i2, i3, i1, NULL_RTX, &i2dest, &i2src)
1737 || (i1 && ! can_combine_p (i1, i3, NULL_RTX, i2, &i1dest, &i1src)))
230d793d
RS
1738 {
1739 undo_all ();
1740 return 0;
1741 }
1742
1743 /* Record whether I2DEST is used in I2SRC and similarly for the other
1744 cases. Knowing this will help in register status updating below. */
1745 i2dest_in_i2src = reg_overlap_mentioned_p (i2dest, i2src);
1746 i1dest_in_i1src = i1 && reg_overlap_mentioned_p (i1dest, i1src);
1747 i2dest_in_i1src = i1 && reg_overlap_mentioned_p (i2dest, i1src);
1748
916f14f1 1749 /* See if I1 directly feeds into I3. It does if I1DEST is not used
230d793d
RS
1750 in I2SRC. */
1751 i1_feeds_i3 = i1 && ! reg_overlap_mentioned_p (i1dest, i2src);
1752
1753 /* Ensure that I3's pattern can be the destination of combines. */
1754 if (! combinable_i3pat (i3, &PATTERN (i3), i2dest, i1dest,
1755 i1 && i2dest_in_i1src && i1_feeds_i3,
1756 &i3dest_killed))
1757 {
1758 undo_all ();
1759 return 0;
1760 }
1761
df7d75de
RK
1762 /* See if any of the insns is a MULT operation. Unless one is, we will
1763 reject a combination that is, since it must be slower. Be conservative
1764 here. */
1765 if (GET_CODE (i2src) == MULT
1766 || (i1 != 0 && GET_CODE (i1src) == MULT)
1767 || (GET_CODE (PATTERN (i3)) == SET
1768 && GET_CODE (SET_SRC (PATTERN (i3))) == MULT))
1769 have_mult = 1;
1770
230d793d
RS
1771 /* If I3 has an inc, then give up if I1 or I2 uses the reg that is inc'd.
1772 We used to do this EXCEPT in one case: I3 has a post-inc in an
1773 output operand. However, that exception can give rise to insns like
1774 mov r3,(r3)+
1775 which is a famous insn on the PDP-11 where the value of r3 used as the
5089e22e 1776 source was model-dependent. Avoid this sort of thing. */
230d793d
RS
1777
1778#if 0
1779 if (!(GET_CODE (PATTERN (i3)) == SET
1780 && GET_CODE (SET_SRC (PATTERN (i3))) == REG
1781 && GET_CODE (SET_DEST (PATTERN (i3))) == MEM
1782 && (GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_INC
1783 || GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_DEC)))
1784 /* It's not the exception. */
1785#endif
1786#ifdef AUTO_INC_DEC
1787 for (link = REG_NOTES (i3); link; link = XEXP (link, 1))
1788 if (REG_NOTE_KIND (link) == REG_INC
1789 && (reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i2))
1790 || (i1 != 0
1791 && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i1)))))
1792 {
1793 undo_all ();
1794 return 0;
1795 }
1796#endif
1797
1798 /* See if the SETs in I1 or I2 need to be kept around in the merged
1799 instruction: whenever the value set there is still needed past I3.
1800 For the SETs in I2, this is easy: we see if I2DEST dies or is set in I3.
1801
1802 For the SET in I1, we have two cases: If I1 and I2 independently
1803 feed into I3, the set in I1 needs to be kept around if I1DEST dies
1804 or is set in I3. Otherwise (if I1 feeds I2 which feeds I3), the set
1805 in I1 needs to be kept around unless I1DEST dies or is set in either
1806 I2 or I3. We can distinguish these cases by seeing if I2SRC mentions
1807 I1DEST. If so, we know I1 feeds into I2. */
1808
1809 added_sets_2 = ! dead_or_set_p (i3, i2dest);
1810
1811 added_sets_1
1812 = i1 && ! (i1_feeds_i3 ? dead_or_set_p (i3, i1dest)
1813 : (dead_or_set_p (i3, i1dest) || dead_or_set_p (i2, i1dest)));
1814
1815 /* If the set in I2 needs to be kept around, we must make a copy of
1816 PATTERN (I2), so that when we substitute I1SRC for I1DEST in
5089e22e 1817 PATTERN (I2), we are only substituting for the original I1DEST, not into
230d793d
RS
1818 an already-substituted copy. This also prevents making self-referential
1819 rtx. If I2 is a PARALLEL, we just need the piece that assigns I2SRC to
1820 I2DEST. */
1821
1822 i2pat = (GET_CODE (PATTERN (i2)) == PARALLEL
38a448ca 1823 ? gen_rtx_SET (VOIDmode, i2dest, i2src)
230d793d
RS
1824 : PATTERN (i2));
1825
1826 if (added_sets_2)
1827 i2pat = copy_rtx (i2pat);
1828
1829 combine_merges++;
1830
1831 /* Substitute in the latest insn for the regs set by the earlier ones. */
1832
1833 maxreg = max_reg_num ();
1834
1835 subst_insn = i3;
230d793d
RS
1836
1837 /* It is possible that the source of I2 or I1 may be performing an
1838 unneeded operation, such as a ZERO_EXTEND of something that is known
1839 to have the high part zero. Handle that case by letting subst look at
1840 the innermost one of them.
1841
1842 Another way to do this would be to have a function that tries to
1843 simplify a single insn instead of merging two or more insns. We don't
1844 do this because of the potential of infinite loops and because
1845 of the potential extra memory required. However, doing it the way
1846 we are is a bit of a kludge and doesn't catch all cases.
1847
1848 But only do this if -fexpensive-optimizations since it slows things down
1849 and doesn't usually win. */
1850
1851 if (flag_expensive_optimizations)
1852 {
1853 /* Pass pc_rtx so no substitutions are done, just simplifications.
1854 The cases that we are interested in here do not involve the few
1855 cases were is_replaced is checked. */
1856 if (i1)
d0ab8cd3
RK
1857 {
1858 subst_low_cuid = INSN_CUID (i1);
1859 i1src = subst (i1src, pc_rtx, pc_rtx, 0, 0);
1860 }
230d793d 1861 else
d0ab8cd3
RK
1862 {
1863 subst_low_cuid = INSN_CUID (i2);
1864 i2src = subst (i2src, pc_rtx, pc_rtx, 0, 0);
1865 }
230d793d
RS
1866 }
1867
1868#ifndef HAVE_cc0
1869 /* Many machines that don't use CC0 have insns that can both perform an
1870 arithmetic operation and set the condition code. These operations will
1871 be represented as a PARALLEL with the first element of the vector
1872 being a COMPARE of an arithmetic operation with the constant zero.
1873 The second element of the vector will set some pseudo to the result
1874 of the same arithmetic operation. If we simplify the COMPARE, we won't
1875 match such a pattern and so will generate an extra insn. Here we test
1876 for this case, where both the comparison and the operation result are
1877 needed, and make the PARALLEL by just replacing I2DEST in I3SRC with
1878 I2SRC. Later we will make the PARALLEL that contains I2. */
1879
1880 if (i1 == 0 && added_sets_2 && GET_CODE (PATTERN (i3)) == SET
1881 && GET_CODE (SET_SRC (PATTERN (i3))) == COMPARE
1882 && XEXP (SET_SRC (PATTERN (i3)), 1) == const0_rtx
1883 && rtx_equal_p (XEXP (SET_SRC (PATTERN (i3)), 0), i2dest))
1884 {
081f5e7e 1885#ifdef EXTRA_CC_MODES
230d793d
RS
1886 rtx *cc_use;
1887 enum machine_mode compare_mode;
081f5e7e 1888#endif
230d793d
RS
1889
1890 newpat = PATTERN (i3);
1891 SUBST (XEXP (SET_SRC (newpat), 0), i2src);
1892
1893 i2_is_used = 1;
1894
1895#ifdef EXTRA_CC_MODES
1896 /* See if a COMPARE with the operand we substituted in should be done
1897 with the mode that is currently being used. If not, do the same
1898 processing we do in `subst' for a SET; namely, if the destination
1899 is used only once, try to replace it with a register of the proper
1900 mode and also replace the COMPARE. */
1901 if (undobuf.other_insn == 0
1902 && (cc_use = find_single_use (SET_DEST (newpat), i3,
1903 &undobuf.other_insn))
77fa0940
RK
1904 && ((compare_mode = SELECT_CC_MODE (GET_CODE (*cc_use),
1905 i2src, const0_rtx))
230d793d
RS
1906 != GET_MODE (SET_DEST (newpat))))
1907 {
770ae6cc 1908 unsigned int regno = REGNO (SET_DEST (newpat));
38a448ca 1909 rtx new_dest = gen_rtx_REG (compare_mode, regno);
230d793d
RS
1910
1911 if (regno < FIRST_PSEUDO_REGISTER
b1f21e0a 1912 || (REG_N_SETS (regno) == 1 && ! added_sets_2
230d793d
RS
1913 && ! REG_USERVAR_P (SET_DEST (newpat))))
1914 {
1915 if (regno >= FIRST_PSEUDO_REGISTER)
1916 SUBST (regno_reg_rtx[regno], new_dest);
1917
1918 SUBST (SET_DEST (newpat), new_dest);
1919 SUBST (XEXP (*cc_use, 0), new_dest);
1920 SUBST (SET_SRC (newpat),
f1c6ba8b 1921 gen_rtx_COMPARE (compare_mode, i2src, const0_rtx));
230d793d
RS
1922 }
1923 else
1924 undobuf.other_insn = 0;
1925 }
663522cb 1926#endif
230d793d
RS
1927 }
1928 else
1929#endif
1930 {
1931 n_occurrences = 0; /* `subst' counts here */
1932
1933 /* If I1 feeds into I2 (not into I3) and I1DEST is in I1SRC, we
1934 need to make a unique copy of I2SRC each time we substitute it
1935 to avoid self-referential rtl. */
1936
d0ab8cd3 1937 subst_low_cuid = INSN_CUID (i2);
230d793d
RS
1938 newpat = subst (PATTERN (i3), i2dest, i2src, 0,
1939 ! i1_feeds_i3 && i1dest_in_i1src);
230d793d
RS
1940
1941 /* Record whether i2's body now appears within i3's body. */
1942 i2_is_used = n_occurrences;
1943 }
1944
1945 /* If we already got a failure, don't try to do more. Otherwise,
1946 try to substitute in I1 if we have it. */
1947
1948 if (i1 && GET_CODE (newpat) != CLOBBER)
1949 {
1950 /* Before we can do this substitution, we must redo the test done
1951 above (see detailed comments there) that ensures that I1DEST
0f41302f 1952 isn't mentioned in any SETs in NEWPAT that are field assignments. */
230d793d 1953
5f4f0e22 1954 if (! combinable_i3pat (NULL_RTX, &newpat, i1dest, NULL_RTX,
6496a589 1955 0, (rtx*)0))
230d793d
RS
1956 {
1957 undo_all ();
1958 return 0;
1959 }
1960
1961 n_occurrences = 0;
d0ab8cd3 1962 subst_low_cuid = INSN_CUID (i1);
230d793d 1963 newpat = subst (newpat, i1dest, i1src, 0, 0);
230d793d
RS
1964 }
1965
916f14f1
RK
1966 /* Fail if an autoincrement side-effect has been duplicated. Be careful
1967 to count all the ways that I2SRC and I1SRC can be used. */
5f4f0e22 1968 if ((FIND_REG_INC_NOTE (i2, NULL_RTX) != 0
916f14f1 1969 && i2_is_used + added_sets_2 > 1)
5f4f0e22 1970 || (i1 != 0 && FIND_REG_INC_NOTE (i1, NULL_RTX) != 0
916f14f1
RK
1971 && (n_occurrences + added_sets_1 + (added_sets_2 && ! i1_feeds_i3)
1972 > 1))
230d793d
RS
1973 /* Fail if we tried to make a new register (we used to abort, but there's
1974 really no reason to). */
1975 || max_reg_num () != maxreg
1976 /* Fail if we couldn't do something and have a CLOBBER. */
df7d75de
RK
1977 || GET_CODE (newpat) == CLOBBER
1978 /* Fail if this new pattern is a MULT and we didn't have one before
1979 at the outer level. */
1980 || (GET_CODE (newpat) == SET && GET_CODE (SET_SRC (newpat)) == MULT
1981 && ! have_mult))
230d793d
RS
1982 {
1983 undo_all ();
1984 return 0;
1985 }
1986
1987 /* If the actions of the earlier insns must be kept
1988 in addition to substituting them into the latest one,
1989 we must make a new PARALLEL for the latest insn
1990 to hold additional the SETs. */
1991
1992 if (added_sets_1 || added_sets_2)
1993 {
1994 combine_extras++;
1995
1996 if (GET_CODE (newpat) == PARALLEL)
1997 {
1998 rtvec old = XVEC (newpat, 0);
1999 total_sets = XVECLEN (newpat, 0) + added_sets_1 + added_sets_2;
38a448ca 2000 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_sets));
d38a30c9
KG
2001 memcpy (XVEC (newpat, 0)->elem, &old->elem[0],
2002 sizeof (old->elem[0]) * old->num_elem);
230d793d
RS
2003 }
2004 else
2005 {
2006 rtx old = newpat;
2007 total_sets = 1 + added_sets_1 + added_sets_2;
38a448ca 2008 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_sets));
230d793d
RS
2009 XVECEXP (newpat, 0, 0) = old;
2010 }
2011
2012 if (added_sets_1)
2013 XVECEXP (newpat, 0, --total_sets)
2014 = (GET_CODE (PATTERN (i1)) == PARALLEL
38a448ca 2015 ? gen_rtx_SET (VOIDmode, i1dest, i1src) : PATTERN (i1));
230d793d
RS
2016
2017 if (added_sets_2)
c5c76735
JL
2018 {
2019 /* If there is no I1, use I2's body as is. We used to also not do
2020 the subst call below if I2 was substituted into I3,
2021 but that could lose a simplification. */
2022 if (i1 == 0)
2023 XVECEXP (newpat, 0, --total_sets) = i2pat;
2024 else
2025 /* See comment where i2pat is assigned. */
2026 XVECEXP (newpat, 0, --total_sets)
2027 = subst (i2pat, i1dest, i1src, 0, 0);
2028 }
230d793d
RS
2029 }
2030
2031 /* We come here when we are replacing a destination in I2 with the
2032 destination of I3. */
2033 validate_replacement:
2034
6e25d159
RK
2035 /* Note which hard regs this insn has as inputs. */
2036 mark_used_regs_combine (newpat);
2037
230d793d 2038 /* Is the result of combination a valid instruction? */
8e2f6e35 2039 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
230d793d
RS
2040
2041 /* If the result isn't valid, see if it is a PARALLEL of two SETs where
2042 the second SET's destination is a register that is unused. In that case,
2043 we just need the first SET. This can occur when simplifying a divmod
2044 insn. We *must* test for this case here because the code below that
2045 splits two independent SETs doesn't handle this case correctly when it
2046 updates the register status. Also check the case where the first
2047 SET's destination is unused. That would not cause incorrect code, but
2048 does cause an unneeded insn to remain. */
2049
2050 if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
2051 && XVECLEN (newpat, 0) == 2
2052 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
2053 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2054 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == REG
2055 && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 1)))
2056 && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 1)))
2057 && asm_noperands (newpat) < 0)
2058 {
2059 newpat = XVECEXP (newpat, 0, 0);
8e2f6e35 2060 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
230d793d
RS
2061 }
2062
2063 else if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
2064 && XVECLEN (newpat, 0) == 2
2065 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
2066 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2067 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) == REG
2068 && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 0)))
2069 && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 0)))
2070 && asm_noperands (newpat) < 0)
2071 {
2072 newpat = XVECEXP (newpat, 0, 1);
8e2f6e35 2073 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
230d793d
RS
2074 }
2075
2076 /* If we were combining three insns and the result is a simple SET
2077 with no ASM_OPERANDS that wasn't recognized, try to split it into two
663522cb 2078 insns. There are two ways to do this. It can be split using a
916f14f1
RK
2079 machine-specific method (like when you have an addition of a large
2080 constant) or by combine in the function find_split_point. */
2081
230d793d
RS
2082 if (i1 && insn_code_number < 0 && GET_CODE (newpat) == SET
2083 && asm_noperands (newpat) < 0)
2084 {
916f14f1 2085 rtx m_split, *split;
42495ca0 2086 rtx ni2dest = i2dest;
916f14f1
RK
2087
2088 /* See if the MD file can split NEWPAT. If it can't, see if letting it
42495ca0
RK
2089 use I2DEST as a scratch register will help. In the latter case,
2090 convert I2DEST to the mode of the source of NEWPAT if we can. */
916f14f1
RK
2091
2092 m_split = split_insns (newpat, i3);
a70c61d9
JW
2093
2094 /* We can only use I2DEST as a scratch reg if it doesn't overlap any
2095 inputs of NEWPAT. */
2096
2097 /* ??? If I2DEST is not safe, and I1DEST exists, then it would be
2098 possible to try that as a scratch reg. This would require adding
2099 more code to make it work though. */
2100
2101 if (m_split == 0 && ! reg_overlap_mentioned_p (ni2dest, newpat))
42495ca0
RK
2102 {
2103 /* If I2DEST is a hard register or the only use of a pseudo,
2104 we can change its mode. */
2105 if (GET_MODE (SET_DEST (newpat)) != GET_MODE (i2dest)
02f4ada4 2106 && GET_MODE (SET_DEST (newpat)) != VOIDmode
60654f77 2107 && GET_CODE (i2dest) == REG
42495ca0 2108 && (REGNO (i2dest) < FIRST_PSEUDO_REGISTER
b1f21e0a 2109 || (REG_N_SETS (REGNO (i2dest)) == 1 && ! added_sets_2
42495ca0 2110 && ! REG_USERVAR_P (i2dest))))
38a448ca 2111 ni2dest = gen_rtx_REG (GET_MODE (SET_DEST (newpat)),
c5c76735
JL
2112 REGNO (i2dest));
2113
2114 m_split = split_insns (gen_rtx_PARALLEL
2115 (VOIDmode,
2116 gen_rtvec (2, newpat,
2117 gen_rtx_CLOBBER (VOIDmode,
2118 ni2dest))),
2119 i3);
5dd3e650
R
2120 /* If the split with the mode-changed register didn't work, try
2121 the original register. */
2122 if (! m_split && ni2dest != i2dest)
c7ca5912
RK
2123 {
2124 ni2dest = i2dest;
2125 m_split = split_insns (gen_rtx_PARALLEL
2126 (VOIDmode,
2127 gen_rtvec (2, newpat,
2128 gen_rtx_CLOBBER (VOIDmode,
2129 i2dest))),
2130 i3);
2131 }
42495ca0 2132 }
916f14f1 2133
d340408c
RH
2134 if (m_split && GET_CODE (m_split) != SEQUENCE)
2135 {
2136 insn_code_number = recog_for_combine (&m_split, i3, &new_i3_notes);
2137 if (insn_code_number >= 0)
2138 newpat = m_split;
2139 }
2140 else if (m_split && GET_CODE (m_split) == SEQUENCE
2141 && XVECLEN (m_split, 0) == 2
2142 && (next_real_insn (i2) == i3
2143 || ! use_crosses_set_p (PATTERN (XVECEXP (m_split, 0, 0)),
2144 INSN_CUID (i2))))
916f14f1 2145 {
1a26b032 2146 rtx i2set, i3set;
d0ab8cd3 2147 rtx newi3pat = PATTERN (XVECEXP (m_split, 0, 1));
916f14f1 2148 newi2pat = PATTERN (XVECEXP (m_split, 0, 0));
916f14f1 2149
e4ba89be
RK
2150 i3set = single_set (XVECEXP (m_split, 0, 1));
2151 i2set = single_set (XVECEXP (m_split, 0, 0));
1a26b032 2152
42495ca0
RK
2153 /* In case we changed the mode of I2DEST, replace it in the
2154 pseudo-register table here. We can't do it above in case this
2155 code doesn't get executed and we do a split the other way. */
2156
2157 if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
2158 SUBST (regno_reg_rtx[REGNO (i2dest)], ni2dest);
2159
8e2f6e35 2160 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
1a26b032
RK
2161
2162 /* If I2 or I3 has multiple SETs, we won't know how to track
9cc96794
RK
2163 register status, so don't use these insns. If I2's destination
2164 is used between I2 and I3, we also can't use these insns. */
1a26b032 2165
9cc96794
RK
2166 if (i2_code_number >= 0 && i2set && i3set
2167 && (next_real_insn (i2) == i3
2168 || ! reg_used_between_p (SET_DEST (i2set), i2, i3)))
8e2f6e35
BS
2169 insn_code_number = recog_for_combine (&newi3pat, i3,
2170 &new_i3_notes);
d0ab8cd3
RK
2171 if (insn_code_number >= 0)
2172 newpat = newi3pat;
2173
c767f54b 2174 /* It is possible that both insns now set the destination of I3.
22609cbf 2175 If so, we must show an extra use of it. */
c767f54b 2176
393de53f
RK
2177 if (insn_code_number >= 0)
2178 {
2179 rtx new_i3_dest = SET_DEST (i3set);
2180 rtx new_i2_dest = SET_DEST (i2set);
2181
2182 while (GET_CODE (new_i3_dest) == ZERO_EXTRACT
2183 || GET_CODE (new_i3_dest) == STRICT_LOW_PART
2184 || GET_CODE (new_i3_dest) == SUBREG)
2185 new_i3_dest = XEXP (new_i3_dest, 0);
2186
d4096689
RK
2187 while (GET_CODE (new_i2_dest) == ZERO_EXTRACT
2188 || GET_CODE (new_i2_dest) == STRICT_LOW_PART
2189 || GET_CODE (new_i2_dest) == SUBREG)
2190 new_i2_dest = XEXP (new_i2_dest, 0);
2191
393de53f
RK
2192 if (GET_CODE (new_i3_dest) == REG
2193 && GET_CODE (new_i2_dest) == REG
2194 && REGNO (new_i3_dest) == REGNO (new_i2_dest))
b1f21e0a 2195 REG_N_SETS (REGNO (new_i2_dest))++;
393de53f 2196 }
916f14f1 2197 }
230d793d
RS
2198
2199 /* If we can split it and use I2DEST, go ahead and see if that
2200 helps things be recognized. Verify that none of the registers
2201 are set between I2 and I3. */
d0ab8cd3 2202 if (insn_code_number < 0 && (split = find_split_point (&newpat, i3)) != 0
230d793d
RS
2203#ifdef HAVE_cc0
2204 && GET_CODE (i2dest) == REG
2205#endif
2206 /* We need I2DEST in the proper mode. If it is a hard register
2207 or the only use of a pseudo, we can change its mode. */
2208 && (GET_MODE (*split) == GET_MODE (i2dest)
2209 || GET_MODE (*split) == VOIDmode
2210 || REGNO (i2dest) < FIRST_PSEUDO_REGISTER
b1f21e0a 2211 || (REG_N_SETS (REGNO (i2dest)) == 1 && ! added_sets_2
230d793d
RS
2212 && ! REG_USERVAR_P (i2dest)))
2213 && (next_real_insn (i2) == i3
2214 || ! use_crosses_set_p (*split, INSN_CUID (i2)))
2215 /* We can't overwrite I2DEST if its value is still used by
2216 NEWPAT. */
2217 && ! reg_referenced_p (i2dest, newpat))
2218 {
2219 rtx newdest = i2dest;
df7d75de
RK
2220 enum rtx_code split_code = GET_CODE (*split);
2221 enum machine_mode split_mode = GET_MODE (*split);
230d793d
RS
2222
2223 /* Get NEWDEST as a register in the proper mode. We have already
2224 validated that we can do this. */
df7d75de 2225 if (GET_MODE (i2dest) != split_mode && split_mode != VOIDmode)
230d793d 2226 {
38a448ca 2227 newdest = gen_rtx_REG (split_mode, REGNO (i2dest));
230d793d
RS
2228
2229 if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
2230 SUBST (regno_reg_rtx[REGNO (i2dest)], newdest);
2231 }
2232
2233 /* If *SPLIT is a (mult FOO (const_int pow2)), convert it to
2234 an ASHIFT. This can occur if it was inside a PLUS and hence
2235 appeared to be a memory address. This is a kludge. */
df7d75de 2236 if (split_code == MULT
230d793d 2237 && GET_CODE (XEXP (*split, 1)) == CONST_INT
1568d79b 2238 && INTVAL (XEXP (*split, 1)) > 0
230d793d 2239 && (i = exact_log2 (INTVAL (XEXP (*split, 1)))) >= 0)
1dc8a823 2240 {
f1c6ba8b
RK
2241 SUBST (*split, gen_rtx_ASHIFT (split_mode,
2242 XEXP (*split, 0), GEN_INT (i)));
1dc8a823
JW
2243 /* Update split_code because we may not have a multiply
2244 anymore. */
2245 split_code = GET_CODE (*split);
2246 }
230d793d
RS
2247
2248#ifdef INSN_SCHEDULING
2249 /* If *SPLIT is a paradoxical SUBREG, when we split it, it should
2250 be written as a ZERO_EXTEND. */
df7d75de 2251 if (split_code == SUBREG && GET_CODE (SUBREG_REG (*split)) == MEM)
f1c6ba8b 2252 SUBST (*split, gen_rtx_ZERO_EXTEND (split_mode,
ddef6bc7 2253 SUBREG_REG (*split)));
230d793d
RS
2254#endif
2255
f1c6ba8b 2256 newi2pat = gen_rtx_SET (VOIDmode, newdest, *split);
230d793d 2257 SUBST (*split, newdest);
8e2f6e35 2258 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
df7d75de
RK
2259
2260 /* If the split point was a MULT and we didn't have one before,
2261 don't use one now. */
2262 if (i2_code_number >= 0 && ! (split_code == MULT && ! have_mult))
8e2f6e35 2263 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
230d793d
RS
2264 }
2265 }
2266
2267 /* Check for a case where we loaded from memory in a narrow mode and
2268 then sign extended it, but we need both registers. In that case,
2269 we have a PARALLEL with both loads from the same memory location.
2270 We can split this into a load from memory followed by a register-register
2271 copy. This saves at least one insn, more if register allocation can
f0343c74
RK
2272 eliminate the copy.
2273
2274 We cannot do this if the destination of the second assignment is
2275 a register that we have already assumed is zero-extended. Similarly
2276 for a SUBREG of such a register. */
230d793d
RS
2277
2278 else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
2279 && GET_CODE (newpat) == PARALLEL
2280 && XVECLEN (newpat, 0) == 2
2281 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
2282 && GET_CODE (SET_SRC (XVECEXP (newpat, 0, 0))) == SIGN_EXTEND
2283 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2284 && rtx_equal_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2285 XEXP (SET_SRC (XVECEXP (newpat, 0, 0)), 0))
2286 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2287 INSN_CUID (i2))
2288 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
2289 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
f0343c74
RK
2290 && ! (temp = SET_DEST (XVECEXP (newpat, 0, 1)),
2291 (GET_CODE (temp) == REG
2292 && reg_nonzero_bits[REGNO (temp)] != 0
2293 && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
2294 && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
2295 && (reg_nonzero_bits[REGNO (temp)]
2296 != GET_MODE_MASK (word_mode))))
2297 && ! (GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == SUBREG
2298 && (temp = SUBREG_REG (SET_DEST (XVECEXP (newpat, 0, 1))),
2299 (GET_CODE (temp) == REG
2300 && reg_nonzero_bits[REGNO (temp)] != 0
2301 && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
2302 && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
2303 && (reg_nonzero_bits[REGNO (temp)]
2304 != GET_MODE_MASK (word_mode)))))
230d793d
RS
2305 && ! reg_overlap_mentioned_p (SET_DEST (XVECEXP (newpat, 0, 1)),
2306 SET_SRC (XVECEXP (newpat, 0, 1)))
2307 && ! find_reg_note (i3, REG_UNUSED,
2308 SET_DEST (XVECEXP (newpat, 0, 0))))
2309 {
472fbdd1
RK
2310 rtx ni2dest;
2311
230d793d 2312 newi2pat = XVECEXP (newpat, 0, 0);
472fbdd1 2313 ni2dest = SET_DEST (XVECEXP (newpat, 0, 0));
230d793d
RS
2314 newpat = XVECEXP (newpat, 0, 1);
2315 SUBST (SET_SRC (newpat),
472fbdd1 2316 gen_lowpart_for_combine (GET_MODE (SET_SRC (newpat)), ni2dest));
8e2f6e35 2317 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
a29ca9db 2318
230d793d 2319 if (i2_code_number >= 0)
8e2f6e35 2320 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
5089e22e
RS
2321
2322 if (insn_code_number >= 0)
2323 {
2324 rtx insn;
2325 rtx link;
2326
2327 /* If we will be able to accept this, we have made a change to the
2328 destination of I3. This can invalidate a LOG_LINKS pointing
2329 to I3. No other part of combine.c makes such a transformation.
2330
2331 The new I3 will have a destination that was previously the
2332 destination of I1 or I2 and which was used in i2 or I3. Call
2333 distribute_links to make a LOG_LINK from the next use of
2334 that destination. */
2335
2336 PATTERN (i3) = newpat;
38a448ca 2337 distribute_links (gen_rtx_INSN_LIST (VOIDmode, i3, NULL_RTX));
5089e22e
RS
2338
2339 /* I3 now uses what used to be its destination and which is
2340 now I2's destination. That means we need a LOG_LINK from
2341 I3 to I2. But we used to have one, so we still will.
2342
2343 However, some later insn might be using I2's dest and have
2344 a LOG_LINK pointing at I3. We must remove this link.
2345 The simplest way to remove the link is to point it at I1,
2346 which we know will be a NOTE. */
2347
2348 for (insn = NEXT_INSN (i3);
0d4d42c3 2349 insn && (this_basic_block == n_basic_blocks - 1
3b413743 2350 || insn != BLOCK_HEAD (this_basic_block + 1));
5089e22e
RS
2351 insn = NEXT_INSN (insn))
2352 {
2c3c49de 2353 if (INSN_P (insn) && reg_referenced_p (ni2dest, PATTERN (insn)))
5089e22e
RS
2354 {
2355 for (link = LOG_LINKS (insn); link;
2356 link = XEXP (link, 1))
2357 if (XEXP (link, 0) == i3)
2358 XEXP (link, 0) = i1;
2359
2360 break;
2361 }
2362 }
2363 }
230d793d 2364 }
663522cb 2365
230d793d
RS
2366 /* Similarly, check for a case where we have a PARALLEL of two independent
2367 SETs but we started with three insns. In this case, we can do the sets
2368 as two separate insns. This case occurs when some SET allows two
2369 other insns to combine, but the destination of that SET is still live. */
2370
2371 else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
2372 && GET_CODE (newpat) == PARALLEL
2373 && XVECLEN (newpat, 0) == 2
2374 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
2375 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != ZERO_EXTRACT
2376 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != STRICT_LOW_PART
2377 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2378 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
2379 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
2380 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2381 INSN_CUID (i2))
2382 /* Don't pass sets with (USE (MEM ...)) dests to the following. */
2383 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != USE
2384 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != USE
2385 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 1)),
2386 XVECEXP (newpat, 0, 0))
2387 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 0)),
14a774a9
RK
2388 XVECEXP (newpat, 0, 1))
2389 && ! (contains_muldiv (SET_SRC (XVECEXP (newpat, 0, 0)))
2390 && contains_muldiv (SET_SRC (XVECEXP (newpat, 0, 1)))))
230d793d 2391 {
e9a25f70
JL
2392 /* Normally, it doesn't matter which of the two is done first,
2393 but it does if one references cc0. In that case, it has to
2394 be first. */
2395#ifdef HAVE_cc0
2396 if (reg_referenced_p (cc0_rtx, XVECEXP (newpat, 0, 0)))
2397 {
2398 newi2pat = XVECEXP (newpat, 0, 0);
2399 newpat = XVECEXP (newpat, 0, 1);
2400 }
2401 else
2402#endif
2403 {
2404 newi2pat = XVECEXP (newpat, 0, 1);
2405 newpat = XVECEXP (newpat, 0, 0);
2406 }
230d793d 2407
8e2f6e35 2408 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
a29ca9db 2409
230d793d 2410 if (i2_code_number >= 0)
8e2f6e35 2411 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
230d793d
RS
2412 }
2413
2414 /* If it still isn't recognized, fail and change things back the way they
2415 were. */
2416 if ((insn_code_number < 0
2417 /* Is the result a reasonable ASM_OPERANDS? */
2418 && (! check_asm_operands (newpat) || added_sets_1 || added_sets_2)))
2419 {
2420 undo_all ();
2421 return 0;
2422 }
2423
2424 /* If we had to change another insn, make sure it is valid also. */
2425 if (undobuf.other_insn)
2426 {
230d793d
RS
2427 rtx other_pat = PATTERN (undobuf.other_insn);
2428 rtx new_other_notes;
2429 rtx note, next;
2430
6e25d159
RK
2431 CLEAR_HARD_REG_SET (newpat_used_regs);
2432
8e2f6e35
BS
2433 other_code_number = recog_for_combine (&other_pat, undobuf.other_insn,
2434 &new_other_notes);
230d793d
RS
2435
2436 if (other_code_number < 0 && ! check_asm_operands (other_pat))
2437 {
2438 undo_all ();
2439 return 0;
2440 }
2441
2442 PATTERN (undobuf.other_insn) = other_pat;
2443
2444 /* If any of the notes in OTHER_INSN were REG_UNUSED, ensure that they
2445 are still valid. Then add any non-duplicate notes added by
2446 recog_for_combine. */
2447 for (note = REG_NOTES (undobuf.other_insn); note; note = next)
2448 {
2449 next = XEXP (note, 1);
2450
2451 if (REG_NOTE_KIND (note) == REG_UNUSED
2452 && ! reg_set_p (XEXP (note, 0), PATTERN (undobuf.other_insn)))
1a26b032
RK
2453 {
2454 if (GET_CODE (XEXP (note, 0)) == REG)
b1f21e0a 2455 REG_N_DEATHS (REGNO (XEXP (note, 0)))--;
1a26b032
RK
2456
2457 remove_note (undobuf.other_insn, note);
2458 }
230d793d
RS
2459 }
2460
1a26b032
RK
2461 for (note = new_other_notes; note; note = XEXP (note, 1))
2462 if (GET_CODE (XEXP (note, 0)) == REG)
b1f21e0a 2463 REG_N_DEATHS (REGNO (XEXP (note, 0)))++;
1a26b032 2464
230d793d 2465 distribute_notes (new_other_notes, undobuf.other_insn,
5f4f0e22 2466 undobuf.other_insn, NULL_RTX, NULL_RTX, NULL_RTX);
230d793d 2467 }
5ef17dd2 2468#ifdef HAVE_cc0
663522cb 2469 /* If I2 is the setter CC0 and I3 is the user CC0 then check whether
5ef17dd2
CC
2470 they are adjacent to each other or not. */
2471 {
2472 rtx p = prev_nonnote_insn (i3);
663522cb
KH
2473 if (p && p != i2 && GET_CODE (p) == INSN && newi2pat
2474 && sets_cc0_p (newi2pat))
5ef17dd2 2475 {
663522cb
KH
2476 undo_all ();
2477 return 0;
5ef17dd2 2478 }
663522cb
KH
2479 }
2480#endif
230d793d 2481
663522cb 2482 /* We now know that we can do this combination. Merge the insns and
230d793d
RS
2483 update the status of registers and LOG_LINKS. */
2484
2485 {
2486 rtx i3notes, i2notes, i1notes = 0;
2487 rtx i3links, i2links, i1links = 0;
2488 rtx midnotes = 0;
770ae6cc 2489 unsigned int regno;
ff3467a9
JW
2490 /* Compute which registers we expect to eliminate. newi2pat may be setting
2491 either i3dest or i2dest, so we must check it. Also, i1dest may be the
2492 same as i3dest, in which case newi2pat may be setting i1dest. */
2493 rtx elim_i2 = ((newi2pat && reg_set_p (i2dest, newi2pat))
2494 || i2dest_in_i2src || i2dest_in_i1src
230d793d 2495 ? 0 : i2dest);
ff3467a9
JW
2496 rtx elim_i1 = (i1 == 0 || i1dest_in_i1src
2497 || (newi2pat && reg_set_p (i1dest, newi2pat))
2498 ? 0 : i1dest);
230d793d
RS
2499
2500 /* Get the old REG_NOTES and LOG_LINKS from all our insns and
2501 clear them. */
2502 i3notes = REG_NOTES (i3), i3links = LOG_LINKS (i3);
2503 i2notes = REG_NOTES (i2), i2links = LOG_LINKS (i2);
2504 if (i1)
2505 i1notes = REG_NOTES (i1), i1links = LOG_LINKS (i1);
2506
2507 /* Ensure that we do not have something that should not be shared but
2508 occurs multiple times in the new insns. Check this by first
5089e22e 2509 resetting all the `used' flags and then copying anything is shared. */
230d793d
RS
2510
2511 reset_used_flags (i3notes);
2512 reset_used_flags (i2notes);
2513 reset_used_flags (i1notes);
2514 reset_used_flags (newpat);
2515 reset_used_flags (newi2pat);
2516 if (undobuf.other_insn)
2517 reset_used_flags (PATTERN (undobuf.other_insn));
2518
2519 i3notes = copy_rtx_if_shared (i3notes);
2520 i2notes = copy_rtx_if_shared (i2notes);
2521 i1notes = copy_rtx_if_shared (i1notes);
2522 newpat = copy_rtx_if_shared (newpat);
2523 newi2pat = copy_rtx_if_shared (newi2pat);
2524 if (undobuf.other_insn)
2525 reset_used_flags (PATTERN (undobuf.other_insn));
2526
2527 INSN_CODE (i3) = insn_code_number;
2528 PATTERN (i3) = newpat;
2529 if (undobuf.other_insn)
2530 INSN_CODE (undobuf.other_insn) = other_code_number;
2531
2532 /* We had one special case above where I2 had more than one set and
2533 we replaced a destination of one of those sets with the destination
2534 of I3. In that case, we have to update LOG_LINKS of insns later
176c9e6b
JW
2535 in this basic block. Note that this (expensive) case is rare.
2536
2537 Also, in this case, we must pretend that all REG_NOTEs for I2
2538 actually came from I3, so that REG_UNUSED notes from I2 will be
2539 properly handled. */
2540
c7be4f66 2541 if (i3_subst_into_i2)
176c9e6b 2542 {
1786009e 2543 for (i = 0; i < XVECLEN (PATTERN (i2), 0); i++)
95ac07b0
AO
2544 if (GET_CODE (XVECEXP (PATTERN (i2), 0, i)) != USE
2545 && GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, i))) == REG
1786009e
ZW
2546 && SET_DEST (XVECEXP (PATTERN (i2), 0, i)) != i2dest
2547 && ! find_reg_note (i2, REG_UNUSED,
2548 SET_DEST (XVECEXP (PATTERN (i2), 0, i))))
2549 for (temp = NEXT_INSN (i2);
2550 temp && (this_basic_block == n_basic_blocks - 1
2551 || BLOCK_HEAD (this_basic_block) != temp);
2552 temp = NEXT_INSN (temp))
2553 if (temp != i3 && INSN_P (temp))
2554 for (link = LOG_LINKS (temp); link; link = XEXP (link, 1))
2555 if (XEXP (link, 0) == i2)
2556 XEXP (link, 0) = i3;
176c9e6b
JW
2557
2558 if (i3notes)
2559 {
2560 rtx link = i3notes;
2561 while (XEXP (link, 1))
2562 link = XEXP (link, 1);
2563 XEXP (link, 1) = i2notes;
2564 }
2565 else
2566 i3notes = i2notes;
2567 i2notes = 0;
2568 }
230d793d
RS
2569
2570 LOG_LINKS (i3) = 0;
2571 REG_NOTES (i3) = 0;
2572 LOG_LINKS (i2) = 0;
2573 REG_NOTES (i2) = 0;
2574
2575 if (newi2pat)
2576 {
2577 INSN_CODE (i2) = i2_code_number;
2578 PATTERN (i2) = newi2pat;
2579 }
2580 else
2581 {
2582 PUT_CODE (i2, NOTE);
2583 NOTE_LINE_NUMBER (i2) = NOTE_INSN_DELETED;
2584 NOTE_SOURCE_FILE (i2) = 0;
2585 }
2586
2587 if (i1)
2588 {
2589 LOG_LINKS (i1) = 0;
2590 REG_NOTES (i1) = 0;
2591 PUT_CODE (i1, NOTE);
2592 NOTE_LINE_NUMBER (i1) = NOTE_INSN_DELETED;
2593 NOTE_SOURCE_FILE (i1) = 0;
2594 }
2595
2596 /* Get death notes for everything that is now used in either I3 or
663522cb 2597 I2 and used to die in a previous insn. If we built two new
6eb12cef
RK
2598 patterns, move from I1 to I2 then I2 to I3 so that we get the
2599 proper movement on registers that I2 modifies. */
230d793d 2600
230d793d 2601 if (newi2pat)
6eb12cef
RK
2602 {
2603 move_deaths (newi2pat, NULL_RTX, INSN_CUID (i1), i2, &midnotes);
2604 move_deaths (newpat, newi2pat, INSN_CUID (i1), i3, &midnotes);
2605 }
2606 else
2607 move_deaths (newpat, NULL_RTX, i1 ? INSN_CUID (i1) : INSN_CUID (i2),
2608 i3, &midnotes);
230d793d
RS
2609
2610 /* Distribute all the LOG_LINKS and REG_NOTES from I1, I2, and I3. */
2611 if (i3notes)
5f4f0e22
CH
2612 distribute_notes (i3notes, i3, i3, newi2pat ? i2 : NULL_RTX,
2613 elim_i2, elim_i1);
230d793d 2614 if (i2notes)
5f4f0e22
CH
2615 distribute_notes (i2notes, i2, i3, newi2pat ? i2 : NULL_RTX,
2616 elim_i2, elim_i1);
230d793d 2617 if (i1notes)
5f4f0e22
CH
2618 distribute_notes (i1notes, i1, i3, newi2pat ? i2 : NULL_RTX,
2619 elim_i2, elim_i1);
230d793d 2620 if (midnotes)
5f4f0e22
CH
2621 distribute_notes (midnotes, NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2622 elim_i2, elim_i1);
230d793d
RS
2623
2624 /* Distribute any notes added to I2 or I3 by recog_for_combine. We
2625 know these are REG_UNUSED and want them to go to the desired insn,
663522cb 2626 so we always pass it as i3. We have not counted the notes in
1a26b032
RK
2627 reg_n_deaths yet, so we need to do so now. */
2628
230d793d 2629 if (newi2pat && new_i2_notes)
1a26b032
RK
2630 {
2631 for (temp = new_i2_notes; temp; temp = XEXP (temp, 1))
2632 if (GET_CODE (XEXP (temp, 0)) == REG)
b1f21e0a 2633 REG_N_DEATHS (REGNO (XEXP (temp, 0)))++;
663522cb 2634
1a26b032
RK
2635 distribute_notes (new_i2_notes, i2, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2636 }
2637
230d793d 2638 if (new_i3_notes)
1a26b032
RK
2639 {
2640 for (temp = new_i3_notes; temp; temp = XEXP (temp, 1))
2641 if (GET_CODE (XEXP (temp, 0)) == REG)
b1f21e0a 2642 REG_N_DEATHS (REGNO (XEXP (temp, 0)))++;
663522cb 2643
1a26b032
RK
2644 distribute_notes (new_i3_notes, i3, i3, NULL_RTX, NULL_RTX, NULL_RTX);
2645 }
230d793d
RS
2646
2647 /* If I3DEST was used in I3SRC, it really died in I3. We may need to
e9a25f70
JL
2648 put a REG_DEAD note for it somewhere. If NEWI2PAT exists and sets
2649 I3DEST, the death must be somewhere before I2, not I3. If we passed I3
2650 in that case, it might delete I2. Similarly for I2 and I1.
1a26b032
RK
2651 Show an additional death due to the REG_DEAD note we make here. If
2652 we discard it in distribute_notes, we will decrement it again. */
d0ab8cd3 2653
230d793d 2654 if (i3dest_killed)
1a26b032
RK
2655 {
2656 if (GET_CODE (i3dest_killed) == REG)
b1f21e0a 2657 REG_N_DEATHS (REGNO (i3dest_killed))++;
1a26b032 2658
e9a25f70 2659 if (newi2pat && reg_set_p (i3dest_killed, newi2pat))
38a448ca
RH
2660 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i3dest_killed,
2661 NULL_RTX),
ff3467a9 2662 NULL_RTX, i2, NULL_RTX, elim_i2, elim_i1);
e9a25f70 2663 else
38a448ca
RH
2664 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i3dest_killed,
2665 NULL_RTX),
e9a25f70 2666 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
ff3467a9 2667 elim_i2, elim_i1);
1a26b032 2668 }
58c8c593 2669
230d793d 2670 if (i2dest_in_i2src)
58c8c593 2671 {
1a26b032 2672 if (GET_CODE (i2dest) == REG)
b1f21e0a 2673 REG_N_DEATHS (REGNO (i2dest))++;
1a26b032 2674
58c8c593 2675 if (newi2pat && reg_set_p (i2dest, newi2pat))
38a448ca 2676 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i2dest, NULL_RTX),
58c8c593
RK
2677 NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2678 else
38a448ca 2679 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i2dest, NULL_RTX),
58c8c593
RK
2680 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2681 NULL_RTX, NULL_RTX);
2682 }
2683
230d793d 2684 if (i1dest_in_i1src)
58c8c593 2685 {
1a26b032 2686 if (GET_CODE (i1dest) == REG)
b1f21e0a 2687 REG_N_DEATHS (REGNO (i1dest))++;
1a26b032 2688
58c8c593 2689 if (newi2pat && reg_set_p (i1dest, newi2pat))
38a448ca 2690 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i1dest, NULL_RTX),
58c8c593
RK
2691 NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2692 else
38a448ca 2693 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i1dest, NULL_RTX),
58c8c593
RK
2694 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2695 NULL_RTX, NULL_RTX);
2696 }
230d793d
RS
2697
2698 distribute_links (i3links);
2699 distribute_links (i2links);
2700 distribute_links (i1links);
2701
2702 if (GET_CODE (i2dest) == REG)
2703 {
d0ab8cd3
RK
2704 rtx link;
2705 rtx i2_insn = 0, i2_val = 0, set;
2706
2707 /* The insn that used to set this register doesn't exist, and
2708 this life of the register may not exist either. See if one of
663522cb 2709 I3's links points to an insn that sets I2DEST. If it does,
d0ab8cd3
RK
2710 that is now the last known value for I2DEST. If we don't update
2711 this and I2 set the register to a value that depended on its old
230d793d
RS
2712 contents, we will get confused. If this insn is used, thing
2713 will be set correctly in combine_instructions. */
d0ab8cd3
RK
2714
2715 for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2716 if ((set = single_set (XEXP (link, 0))) != 0
2717 && rtx_equal_p (i2dest, SET_DEST (set)))
2718 i2_insn = XEXP (link, 0), i2_val = SET_SRC (set);
2719
2720 record_value_for_reg (i2dest, i2_insn, i2_val);
230d793d
RS
2721
2722 /* If the reg formerly set in I2 died only once and that was in I3,
2723 zero its use count so it won't make `reload' do any work. */
538fe8cd
ILT
2724 if (! added_sets_2
2725 && (newi2pat == 0 || ! reg_mentioned_p (i2dest, newi2pat))
2726 && ! i2dest_in_i2src)
230d793d
RS
2727 {
2728 regno = REGNO (i2dest);
b1f21e0a 2729 REG_N_SETS (regno)--;
230d793d
RS
2730 }
2731 }
2732
2733 if (i1 && GET_CODE (i1dest) == REG)
2734 {
d0ab8cd3
RK
2735 rtx link;
2736 rtx i1_insn = 0, i1_val = 0, set;
2737
2738 for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2739 if ((set = single_set (XEXP (link, 0))) != 0
2740 && rtx_equal_p (i1dest, SET_DEST (set)))
2741 i1_insn = XEXP (link, 0), i1_val = SET_SRC (set);
2742
2743 record_value_for_reg (i1dest, i1_insn, i1_val);
2744
230d793d 2745 regno = REGNO (i1dest);
5af91171 2746 if (! added_sets_1 && ! i1dest_in_i1src)
770ae6cc 2747 REG_N_SETS (regno)--;
230d793d
RS
2748 }
2749
951553af 2750 /* Update reg_nonzero_bits et al for any changes that may have been made
663522cb 2751 to this insn. The order of set_nonzero_bits_and_sign_copies() is
5fb7c247 2752 important. Because newi2pat can affect nonzero_bits of newpat */
22609cbf 2753 if (newi2pat)
84832317 2754 note_stores (newi2pat, set_nonzero_bits_and_sign_copies, NULL);
5fb7c247 2755 note_stores (newpat, set_nonzero_bits_and_sign_copies, NULL);
22609cbf 2756
44a76fc8
AG
2757 /* Set new_direct_jump_p if a new return or simple jump instruction
2758 has been created.
2759
663522cb 2760 If I3 is now an unconditional jump, ensure that it has a
230d793d 2761 BARRIER following it since it may have initially been a
381ee8af 2762 conditional jump. It may also be the last nonnote insn. */
663522cb 2763
7f1c097d 2764 if (GET_CODE (newpat) == RETURN || any_uncondjump_p (i3))
44a76fc8
AG
2765 {
2766 *new_direct_jump_p = 1;
230d793d 2767
44a76fc8
AG
2768 if ((temp = next_nonnote_insn (i3)) == NULL_RTX
2769 || GET_CODE (temp) != BARRIER)
2770 emit_barrier_after (i3);
2771 }
230d793d
RS
2772 }
2773
2774 combine_successes++;
e7749837 2775 undo_commit ();
230d793d 2776
bcd49eb7
JW
2777 /* Clear this here, so that subsequent get_last_value calls are not
2778 affected. */
2779 subst_prev_insn = NULL_RTX;
2780
abe6e52f
RK
2781 if (added_links_insn
2782 && (newi2pat == 0 || INSN_CUID (added_links_insn) < INSN_CUID (i2))
2783 && INSN_CUID (added_links_insn) < INSN_CUID (i3))
2784 return added_links_insn;
2785 else
2786 return newi2pat ? i2 : i3;
230d793d
RS
2787}
2788\f
2789/* Undo all the modifications recorded in undobuf. */
2790
2791static void
2792undo_all ()
2793{
241cea85
RK
2794 struct undo *undo, *next;
2795
2796 for (undo = undobuf.undos; undo; undo = next)
7c046e4e 2797 {
241cea85
RK
2798 next = undo->next;
2799 if (undo->is_int)
2800 *undo->where.i = undo->old_contents.i;
7c046e4e 2801 else
241cea85
RK
2802 *undo->where.r = undo->old_contents.r;
2803
2804 undo->next = undobuf.frees;
2805 undobuf.frees = undo;
7c046e4e 2806 }
230d793d 2807
f1c6ba8b 2808 undobuf.undos = 0;
bcd49eb7
JW
2809
2810 /* Clear this here, so that subsequent get_last_value calls are not
2811 affected. */
2812 subst_prev_insn = NULL_RTX;
230d793d 2813}
e7749837
RH
2814
2815/* We've committed to accepting the changes we made. Move all
2816 of the undos to the free list. */
2817
2818static void
2819undo_commit ()
2820{
2821 struct undo *undo, *next;
2822
2823 for (undo = undobuf.undos; undo; undo = next)
2824 {
2825 next = undo->next;
2826 undo->next = undobuf.frees;
2827 undobuf.frees = undo;
2828 }
f1c6ba8b 2829 undobuf.undos = 0;
e7749837
RH
2830}
2831
230d793d
RS
2832\f
2833/* Find the innermost point within the rtx at LOC, possibly LOC itself,
d0ab8cd3
RK
2834 where we have an arithmetic expression and return that point. LOC will
2835 be inside INSN.
230d793d
RS
2836
2837 try_combine will call this function to see if an insn can be split into
2838 two insns. */
2839
2840static rtx *
d0ab8cd3 2841find_split_point (loc, insn)
230d793d 2842 rtx *loc;
d0ab8cd3 2843 rtx insn;
230d793d
RS
2844{
2845 rtx x = *loc;
2846 enum rtx_code code = GET_CODE (x);
2847 rtx *split;
770ae6cc
RK
2848 unsigned HOST_WIDE_INT len = 0;
2849 HOST_WIDE_INT pos = 0;
2850 int unsignedp = 0;
6a651371 2851 rtx inner = NULL_RTX;
230d793d
RS
2852
2853 /* First special-case some codes. */
2854 switch (code)
2855 {
2856 case SUBREG:
2857#ifdef INSN_SCHEDULING
2858 /* If we are making a paradoxical SUBREG invalid, it becomes a split
2859 point. */
2860 if (GET_CODE (SUBREG_REG (x)) == MEM)
2861 return loc;
2862#endif
d0ab8cd3 2863 return find_split_point (&SUBREG_REG (x), insn);
230d793d 2864
230d793d 2865 case MEM:
916f14f1 2866#ifdef HAVE_lo_sum
230d793d
RS
2867 /* If we have (mem (const ..)) or (mem (symbol_ref ...)), split it
2868 using LO_SUM and HIGH. */
2869 if (GET_CODE (XEXP (x, 0)) == CONST
2870 || GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
2871 {
2872 SUBST (XEXP (x, 0),
f1c6ba8b
RK
2873 gen_rtx_LO_SUM (Pmode,
2874 gen_rtx_HIGH (Pmode, XEXP (x, 0)),
2875 XEXP (x, 0)));
230d793d
RS
2876 return &XEXP (XEXP (x, 0), 0);
2877 }
230d793d
RS
2878#endif
2879
916f14f1
RK
2880 /* If we have a PLUS whose second operand is a constant and the
2881 address is not valid, perhaps will can split it up using
2882 the machine-specific way to split large constants. We use
ddd5a7c1 2883 the first pseudo-reg (one of the virtual regs) as a placeholder;
916f14f1
RK
2884 it will not remain in the result. */
2885 if (GET_CODE (XEXP (x, 0)) == PLUS
2886 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2887 && ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
2888 {
2889 rtx reg = regno_reg_rtx[FIRST_PSEUDO_REGISTER];
38a448ca 2890 rtx seq = split_insns (gen_rtx_SET (VOIDmode, reg, XEXP (x, 0)),
916f14f1
RK
2891 subst_insn);
2892
2893 /* This should have produced two insns, each of which sets our
2894 placeholder. If the source of the second is a valid address,
2895 we can make put both sources together and make a split point
2896 in the middle. */
2897
2898 if (seq && XVECLEN (seq, 0) == 2
2899 && GET_CODE (XVECEXP (seq, 0, 0)) == INSN
2900 && GET_CODE (PATTERN (XVECEXP (seq, 0, 0))) == SET
2901 && SET_DEST (PATTERN (XVECEXP (seq, 0, 0))) == reg
2902 && ! reg_mentioned_p (reg,
2903 SET_SRC (PATTERN (XVECEXP (seq, 0, 0))))
2904 && GET_CODE (XVECEXP (seq, 0, 1)) == INSN
2905 && GET_CODE (PATTERN (XVECEXP (seq, 0, 1))) == SET
2906 && SET_DEST (PATTERN (XVECEXP (seq, 0, 1))) == reg
2907 && memory_address_p (GET_MODE (x),
2908 SET_SRC (PATTERN (XVECEXP (seq, 0, 1)))))
2909 {
2910 rtx src1 = SET_SRC (PATTERN (XVECEXP (seq, 0, 0)));
2911 rtx src2 = SET_SRC (PATTERN (XVECEXP (seq, 0, 1)));
2912
2913 /* Replace the placeholder in SRC2 with SRC1. If we can
2914 find where in SRC2 it was placed, that can become our
2915 split point and we can replace this address with SRC2.
2916 Just try two obvious places. */
2917
2918 src2 = replace_rtx (src2, reg, src1);
2919 split = 0;
2920 if (XEXP (src2, 0) == src1)
2921 split = &XEXP (src2, 0);
2922 else if (GET_RTX_FORMAT (GET_CODE (XEXP (src2, 0)))[0] == 'e'
2923 && XEXP (XEXP (src2, 0), 0) == src1)
2924 split = &XEXP (XEXP (src2, 0), 0);
2925
2926 if (split)
2927 {
2928 SUBST (XEXP (x, 0), src2);
2929 return split;
2930 }
2931 }
663522cb 2932
1a26b032
RK
2933 /* If that didn't work, perhaps the first operand is complex and
2934 needs to be computed separately, so make a split point there.
2935 This will occur on machines that just support REG + CONST
2936 and have a constant moved through some previous computation. */
2937
2938 else if (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (x, 0), 0))) != 'o'
2939 && ! (GET_CODE (XEXP (XEXP (x, 0), 0)) == SUBREG
2940 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (XEXP (x, 0), 0))))
2941 == 'o')))
2942 return &XEXP (XEXP (x, 0), 0);
916f14f1
RK
2943 }
2944 break;
2945
230d793d
RS
2946 case SET:
2947#ifdef HAVE_cc0
2948 /* If SET_DEST is CC0 and SET_SRC is not an operand, a COMPARE, or a
2949 ZERO_EXTRACT, the most likely reason why this doesn't match is that
2950 we need to put the operand into a register. So split at that
2951 point. */
2952
2953 if (SET_DEST (x) == cc0_rtx
2954 && GET_CODE (SET_SRC (x)) != COMPARE
2955 && GET_CODE (SET_SRC (x)) != ZERO_EXTRACT
2956 && GET_RTX_CLASS (GET_CODE (SET_SRC (x))) != 'o'
2957 && ! (GET_CODE (SET_SRC (x)) == SUBREG
2958 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (SET_SRC (x)))) == 'o'))
2959 return &SET_SRC (x);
2960#endif
2961
2962 /* See if we can split SET_SRC as it stands. */
d0ab8cd3 2963 split = find_split_point (&SET_SRC (x), insn);
230d793d
RS
2964 if (split && split != &SET_SRC (x))
2965 return split;
2966
041d7180
JL
2967 /* See if we can split SET_DEST as it stands. */
2968 split = find_split_point (&SET_DEST (x), insn);
2969 if (split && split != &SET_DEST (x))
2970 return split;
2971
230d793d
RS
2972 /* See if this is a bitfield assignment with everything constant. If
2973 so, this is an IOR of an AND, so split it into that. */
2974 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2975 && (GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
5f4f0e22 2976 <= HOST_BITS_PER_WIDE_INT)
230d793d
RS
2977 && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT
2978 && GET_CODE (XEXP (SET_DEST (x), 2)) == CONST_INT
2979 && GET_CODE (SET_SRC (x)) == CONST_INT
2980 && ((INTVAL (XEXP (SET_DEST (x), 1))
2981 + INTVAL (XEXP (SET_DEST (x), 2)))
2982 <= GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0))))
2983 && ! side_effects_p (XEXP (SET_DEST (x), 0)))
2984 {
770ae6cc
RK
2985 HOST_WIDE_INT pos = INTVAL (XEXP (SET_DEST (x), 2));
2986 unsigned HOST_WIDE_INT len = INTVAL (XEXP (SET_DEST (x), 1));
2987 unsigned HOST_WIDE_INT src = INTVAL (SET_SRC (x));
230d793d
RS
2988 rtx dest = XEXP (SET_DEST (x), 0);
2989 enum machine_mode mode = GET_MODE (dest);
5f4f0e22 2990 unsigned HOST_WIDE_INT mask = ((HOST_WIDE_INT) 1 << len) - 1;
230d793d 2991
f76b9db2
ILT
2992 if (BITS_BIG_ENDIAN)
2993 pos = GET_MODE_BITSIZE (mode) - len - pos;
230d793d 2994
770ae6cc 2995 if (src == mask)
230d793d 2996 SUBST (SET_SRC (x),
5f4f0e22 2997 gen_binary (IOR, mode, dest, GEN_INT (src << pos)));
230d793d
RS
2998 else
2999 SUBST (SET_SRC (x),
3000 gen_binary (IOR, mode,
663522cb
KH
3001 gen_binary (AND, mode, dest,
3002 GEN_INT (~(mask << pos)
5f4f0e22
CH
3003 & GET_MODE_MASK (mode))),
3004 GEN_INT (src << pos)));
230d793d
RS
3005
3006 SUBST (SET_DEST (x), dest);
3007
d0ab8cd3 3008 split = find_split_point (&SET_SRC (x), insn);
230d793d
RS
3009 if (split && split != &SET_SRC (x))
3010 return split;
3011 }
3012
3013 /* Otherwise, see if this is an operation that we can split into two.
3014 If so, try to split that. */
3015 code = GET_CODE (SET_SRC (x));
3016
3017 switch (code)
3018 {
d0ab8cd3
RK
3019 case AND:
3020 /* If we are AND'ing with a large constant that is only a single
3021 bit and the result is only being used in a context where we
3022 need to know if it is zero or non-zero, replace it with a bit
3023 extraction. This will avoid the large constant, which might
3024 have taken more than one insn to make. If the constant were
3025 not a valid argument to the AND but took only one insn to make,
3026 this is no worse, but if it took more than one insn, it will
3027 be better. */
3028
3029 if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
3030 && GET_CODE (XEXP (SET_SRC (x), 0)) == REG
3031 && (pos = exact_log2 (INTVAL (XEXP (SET_SRC (x), 1)))) >= 7
3032 && GET_CODE (SET_DEST (x)) == REG
6496a589 3033 && (split = find_single_use (SET_DEST (x), insn, (rtx*)0)) != 0
d0ab8cd3
RK
3034 && (GET_CODE (*split) == EQ || GET_CODE (*split) == NE)
3035 && XEXP (*split, 0) == SET_DEST (x)
3036 && XEXP (*split, 1) == const0_rtx)
3037 {
76184def
DE
3038 rtx extraction = make_extraction (GET_MODE (SET_DEST (x)),
3039 XEXP (SET_SRC (x), 0),
3040 pos, NULL_RTX, 1, 1, 0, 0);
3041 if (extraction != 0)
3042 {
3043 SUBST (SET_SRC (x), extraction);
3044 return find_split_point (loc, insn);
3045 }
d0ab8cd3
RK
3046 }
3047 break;
3048
1a6ec070
RK
3049 case NE:
3050 /* if STORE_FLAG_VALUE is -1, this is (NE X 0) and only one bit of X
3051 is known to be on, this can be converted into a NEG of a shift. */
3052 if (STORE_FLAG_VALUE == -1 && XEXP (SET_SRC (x), 1) == const0_rtx
3053 && GET_MODE (SET_SRC (x)) == GET_MODE (XEXP (SET_SRC (x), 0))
4eb2cb10 3054 && 1 <= (pos = exact_log2
1a6ec070
RK
3055 (nonzero_bits (XEXP (SET_SRC (x), 0),
3056 GET_MODE (XEXP (SET_SRC (x), 0))))))
3057 {
3058 enum machine_mode mode = GET_MODE (XEXP (SET_SRC (x), 0));
3059
3060 SUBST (SET_SRC (x),
f1c6ba8b
RK
3061 gen_rtx_NEG (mode,
3062 gen_rtx_LSHIFTRT (mode,
3063 XEXP (SET_SRC (x), 0),
3064 GEN_INT (pos))));
1a6ec070
RK
3065
3066 split = find_split_point (&SET_SRC (x), insn);
3067 if (split && split != &SET_SRC (x))
3068 return split;
3069 }
3070 break;
3071
230d793d
RS
3072 case SIGN_EXTEND:
3073 inner = XEXP (SET_SRC (x), 0);
101c1a3d
JL
3074
3075 /* We can't optimize if either mode is a partial integer
3076 mode as we don't know how many bits are significant
3077 in those modes. */
3078 if (GET_MODE_CLASS (GET_MODE (inner)) == MODE_PARTIAL_INT
3079 || GET_MODE_CLASS (GET_MODE (SET_SRC (x))) == MODE_PARTIAL_INT)
3080 break;
3081
230d793d
RS
3082 pos = 0;
3083 len = GET_MODE_BITSIZE (GET_MODE (inner));
3084 unsignedp = 0;
3085 break;
3086
3087 case SIGN_EXTRACT:
3088 case ZERO_EXTRACT:
3089 if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
3090 && GET_CODE (XEXP (SET_SRC (x), 2)) == CONST_INT)
3091 {
3092 inner = XEXP (SET_SRC (x), 0);
3093 len = INTVAL (XEXP (SET_SRC (x), 1));
3094 pos = INTVAL (XEXP (SET_SRC (x), 2));
3095
f76b9db2
ILT
3096 if (BITS_BIG_ENDIAN)
3097 pos = GET_MODE_BITSIZE (GET_MODE (inner)) - len - pos;
230d793d
RS
3098 unsignedp = (code == ZERO_EXTRACT);
3099 }
3100 break;
e9a25f70
JL
3101
3102 default:
3103 break;
230d793d
RS
3104 }
3105
3106 if (len && pos >= 0 && pos + len <= GET_MODE_BITSIZE (GET_MODE (inner)))
3107 {
3108 enum machine_mode mode = GET_MODE (SET_SRC (x));
3109
d0ab8cd3
RK
3110 /* For unsigned, we have a choice of a shift followed by an
3111 AND or two shifts. Use two shifts for field sizes where the
3112 constant might be too large. We assume here that we can
3113 always at least get 8-bit constants in an AND insn, which is
3114 true for every current RISC. */
3115
3116 if (unsignedp && len <= 8)
230d793d
RS
3117 {
3118 SUBST (SET_SRC (x),
f1c6ba8b
RK
3119 gen_rtx_AND (mode,
3120 gen_rtx_LSHIFTRT
3121 (mode, gen_lowpart_for_combine (mode, inner),
3122 GEN_INT (pos)),
3123 GEN_INT (((HOST_WIDE_INT) 1 << len) - 1)));
230d793d 3124
d0ab8cd3 3125 split = find_split_point (&SET_SRC (x), insn);
230d793d
RS
3126 if (split && split != &SET_SRC (x))
3127 return split;
3128 }
3129 else
3130 {
3131 SUBST (SET_SRC (x),
f1c6ba8b 3132 gen_rtx_fmt_ee
d0ab8cd3 3133 (unsignedp ? LSHIFTRT : ASHIFTRT, mode,
f1c6ba8b
RK
3134 gen_rtx_ASHIFT (mode,
3135 gen_lowpart_for_combine (mode, inner),
3136 GEN_INT (GET_MODE_BITSIZE (mode)
3137 - len - pos)),
5f4f0e22 3138 GEN_INT (GET_MODE_BITSIZE (mode) - len)));
230d793d 3139
d0ab8cd3 3140 split = find_split_point (&SET_SRC (x), insn);
230d793d
RS
3141 if (split && split != &SET_SRC (x))
3142 return split;
3143 }
3144 }
3145
3146 /* See if this is a simple operation with a constant as the second
3147 operand. It might be that this constant is out of range and hence
3148 could be used as a split point. */
3149 if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
3150 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
3151 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<')
3152 && CONSTANT_P (XEXP (SET_SRC (x), 1))
3153 && (GET_RTX_CLASS (GET_CODE (XEXP (SET_SRC (x), 0))) == 'o'
3154 || (GET_CODE (XEXP (SET_SRC (x), 0)) == SUBREG
3155 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (SET_SRC (x), 0))))
3156 == 'o'))))
3157 return &XEXP (SET_SRC (x), 1);
3158
3159 /* Finally, see if this is a simple operation with its first operand
3160 not in a register. The operation might require this operand in a
3161 register, so return it as a split point. We can always do this
3162 because if the first operand were another operation, we would have
3163 already found it as a split point. */
3164 if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
3165 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
3166 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<'
3167 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '1')
3168 && ! register_operand (XEXP (SET_SRC (x), 0), VOIDmode))
3169 return &XEXP (SET_SRC (x), 0);
3170
3171 return 0;
3172
3173 case AND:
3174 case IOR:
3175 /* We write NOR as (and (not A) (not B)), but if we don't have a NOR,
3176 it is better to write this as (not (ior A B)) so we can split it.
3177 Similarly for IOR. */
3178 if (GET_CODE (XEXP (x, 0)) == NOT && GET_CODE (XEXP (x, 1)) == NOT)
3179 {
3180 SUBST (*loc,
f1c6ba8b
RK
3181 gen_rtx_NOT (GET_MODE (x),
3182 gen_rtx_fmt_ee (code == IOR ? AND : IOR,
3183 GET_MODE (x),
3184 XEXP (XEXP (x, 0), 0),
3185 XEXP (XEXP (x, 1), 0))));
d0ab8cd3 3186 return find_split_point (loc, insn);
230d793d
RS
3187 }
3188
3189 /* Many RISC machines have a large set of logical insns. If the
3190 second operand is a NOT, put it first so we will try to split the
3191 other operand first. */
3192 if (GET_CODE (XEXP (x, 1)) == NOT)
3193 {
3194 rtx tem = XEXP (x, 0);
3195 SUBST (XEXP (x, 0), XEXP (x, 1));
3196 SUBST (XEXP (x, 1), tem);
3197 }
3198 break;
e9a25f70
JL
3199
3200 default:
3201 break;
230d793d
RS
3202 }
3203
3204 /* Otherwise, select our actions depending on our rtx class. */
3205 switch (GET_RTX_CLASS (code))
3206 {
3207 case 'b': /* This is ZERO_EXTRACT and SIGN_EXTRACT. */
3208 case '3':
d0ab8cd3 3209 split = find_split_point (&XEXP (x, 2), insn);
230d793d
RS
3210 if (split)
3211 return split;
0f41302f 3212 /* ... fall through ... */
230d793d
RS
3213 case '2':
3214 case 'c':
3215 case '<':
d0ab8cd3 3216 split = find_split_point (&XEXP (x, 1), insn);
230d793d
RS
3217 if (split)
3218 return split;
0f41302f 3219 /* ... fall through ... */
230d793d
RS
3220 case '1':
3221 /* Some machines have (and (shift ...) ...) insns. If X is not
3222 an AND, but XEXP (X, 0) is, use it as our split point. */
3223 if (GET_CODE (x) != AND && GET_CODE (XEXP (x, 0)) == AND)
3224 return &XEXP (x, 0);
3225
d0ab8cd3 3226 split = find_split_point (&XEXP (x, 0), insn);
230d793d
RS
3227 if (split)
3228 return split;
3229 return loc;
3230 }
3231
3232 /* Otherwise, we don't have a split point. */
3233 return 0;
3234}
3235\f
3236/* Throughout X, replace FROM with TO, and return the result.
3237 The result is TO if X is FROM;
3238 otherwise the result is X, but its contents may have been modified.
3239 If they were modified, a record was made in undobuf so that
3240 undo_all will (among other things) return X to its original state.
3241
3242 If the number of changes necessary is too much to record to undo,
3243 the excess changes are not made, so the result is invalid.
3244 The changes already made can still be undone.
3245 undobuf.num_undo is incremented for such changes, so by testing that
3246 the caller can tell whether the result is valid.
3247
3248 `n_occurrences' is incremented each time FROM is replaced.
663522cb 3249
230d793d
RS
3250 IN_DEST is non-zero if we are processing the SET_DEST of a SET.
3251
5089e22e 3252 UNIQUE_COPY is non-zero if each substitution must be unique. We do this
230d793d
RS
3253 by copying if `n_occurrences' is non-zero. */
3254
3255static rtx
3256subst (x, from, to, in_dest, unique_copy)
3257 register rtx x, from, to;
3258 int in_dest;
3259 int unique_copy;
3260{
f24ad0e4 3261 register enum rtx_code code = GET_CODE (x);
230d793d 3262 enum machine_mode op0_mode = VOIDmode;
6f7d635c 3263 register const char *fmt;
8079805d
RK
3264 register int len, i;
3265 rtx new;
230d793d
RS
3266
3267/* Two expressions are equal if they are identical copies of a shared
3268 RTX or if they are both registers with the same register number
3269 and mode. */
3270
3271#define COMBINE_RTX_EQUAL_P(X,Y) \
3272 ((X) == (Y) \
3273 || (GET_CODE (X) == REG && GET_CODE (Y) == REG \
3274 && REGNO (X) == REGNO (Y) && GET_MODE (X) == GET_MODE (Y)))
3275
3276 if (! in_dest && COMBINE_RTX_EQUAL_P (x, from))
3277 {
3278 n_occurrences++;
3279 return (unique_copy && n_occurrences > 1 ? copy_rtx (to) : to);
3280 }
3281
3282 /* If X and FROM are the same register but different modes, they will
663522cb 3283 not have been seen as equal above. However, flow.c will make a
230d793d
RS
3284 LOG_LINKS entry for that case. If we do nothing, we will try to
3285 rerecognize our original insn and, when it succeeds, we will
3286 delete the feeding insn, which is incorrect.
3287
3288 So force this insn not to match in this (rare) case. */
3289 if (! in_dest && code == REG && GET_CODE (from) == REG
3290 && REGNO (x) == REGNO (from))
38a448ca 3291 return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
230d793d
RS
3292
3293 /* If this is an object, we are done unless it is a MEM or LO_SUM, both
3294 of which may contain things that can be combined. */
3295 if (code != MEM && code != LO_SUM && GET_RTX_CLASS (code) == 'o')
3296 return x;
3297
3298 /* It is possible to have a subexpression appear twice in the insn.
3299 Suppose that FROM is a register that appears within TO.
3300 Then, after that subexpression has been scanned once by `subst',
3301 the second time it is scanned, TO may be found. If we were
3302 to scan TO here, we would find FROM within it and create a
3303 self-referent rtl structure which is completely wrong. */
3304 if (COMBINE_RTX_EQUAL_P (x, to))
3305 return to;
3306
4f4b3679
RH
3307 /* Parallel asm_operands need special attention because all of the
3308 inputs are shared across the arms. Furthermore, unsharing the
3309 rtl results in recognition failures. Failure to handle this case
3310 specially can result in circular rtl.
3311
3312 Solve this by doing a normal pass across the first entry of the
3313 parallel, and only processing the SET_DESTs of the subsequent
3314 entries. Ug. */
3315
3316 if (code == PARALLEL
3317 && GET_CODE (XVECEXP (x, 0, 0)) == SET
3318 && GET_CODE (SET_SRC (XVECEXP (x, 0, 0))) == ASM_OPERANDS)
230d793d 3319 {
4f4b3679
RH
3320 new = subst (XVECEXP (x, 0, 0), from, to, 0, unique_copy);
3321
3322 /* If this substitution failed, this whole thing fails. */
3323 if (GET_CODE (new) == CLOBBER
3324 && XEXP (new, 0) == const0_rtx)
3325 return new;
3326
3327 SUBST (XVECEXP (x, 0, 0), new);
3328
3329 for (i = XVECLEN (x, 0) - 1; i >= 1; i--)
230d793d 3330 {
4f4b3679 3331 rtx dest = SET_DEST (XVECEXP (x, 0, i));
663522cb 3332
4f4b3679
RH
3333 if (GET_CODE (dest) != REG
3334 && GET_CODE (dest) != CC0
3335 && GET_CODE (dest) != PC)
230d793d 3336 {
4f4b3679 3337 new = subst (dest, from, to, 0, unique_copy);
230d793d 3338
4f4b3679
RH
3339 /* If this substitution failed, this whole thing fails. */
3340 if (GET_CODE (new) == CLOBBER
3341 && XEXP (new, 0) == const0_rtx)
3342 return new;
230d793d 3343
4f4b3679 3344 SUBST (SET_DEST (XVECEXP (x, 0, i)), new);
230d793d
RS
3345 }
3346 }
4f4b3679
RH
3347 }
3348 else
3349 {
3350 len = GET_RTX_LENGTH (code);
3351 fmt = GET_RTX_FORMAT (code);
3352
3353 /* We don't need to process a SET_DEST that is a register, CC0,
3354 or PC, so set up to skip this common case. All other cases
3355 where we want to suppress replacing something inside a
3356 SET_SRC are handled via the IN_DEST operand. */
3357 if (code == SET
3358 && (GET_CODE (SET_DEST (x)) == REG
3359 || GET_CODE (SET_DEST (x)) == CC0
3360 || GET_CODE (SET_DEST (x)) == PC))
3361 fmt = "ie";
3362
3363 /* Get the mode of operand 0 in case X is now a SIGN_EXTEND of a
3364 constant. */
3365 if (fmt[0] == 'e')
3366 op0_mode = GET_MODE (XEXP (x, 0));
3367
3368 for (i = 0; i < len; i++)
230d793d 3369 {
4f4b3679 3370 if (fmt[i] == 'E')
230d793d 3371 {
4f4b3679
RH
3372 register int j;
3373 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3374 {
3375 if (COMBINE_RTX_EQUAL_P (XVECEXP (x, i, j), from))
3376 {
3377 new = (unique_copy && n_occurrences
3378 ? copy_rtx (to) : to);
3379 n_occurrences++;
3380 }
3381 else
3382 {
3383 new = subst (XVECEXP (x, i, j), from, to, 0,
3384 unique_copy);
3385
3386 /* If this substitution failed, this whole thing
3387 fails. */
3388 if (GET_CODE (new) == CLOBBER
3389 && XEXP (new, 0) == const0_rtx)
3390 return new;
3391 }
3392
3393 SUBST (XVECEXP (x, i, j), new);
3394 }
3395 }
3396 else if (fmt[i] == 'e')
3397 {
0a33d11e
RH
3398 /* If this is a register being set, ignore it. */
3399 new = XEXP (x, i);
3400 if (in_dest
3401 && (code == SUBREG || code == STRICT_LOW_PART
3402 || code == ZERO_EXTRACT)
3403 && i == 0
3404 && GET_CODE (new) == REG)
3405 ;
3406
3407 else if (COMBINE_RTX_EQUAL_P (XEXP (x, i), from))
4f4b3679
RH
3408 {
3409 /* In general, don't install a subreg involving two
3410 modes not tieable. It can worsen register
3411 allocation, and can even make invalid reload
3412 insns, since the reg inside may need to be copied
3413 from in the outside mode, and that may be invalid
3414 if it is an fp reg copied in integer mode.
3415
3416 We allow two exceptions to this: It is valid if
3417 it is inside another SUBREG and the mode of that
3418 SUBREG and the mode of the inside of TO is
3419 tieable and it is valid if X is a SET that copies
3420 FROM to CC0. */
3421
3422 if (GET_CODE (to) == SUBREG
3423 && ! MODES_TIEABLE_P (GET_MODE (to),
3424 GET_MODE (SUBREG_REG (to)))
3425 && ! (code == SUBREG
3426 && MODES_TIEABLE_P (GET_MODE (x),
3427 GET_MODE (SUBREG_REG (to))))
42301240 3428#ifdef HAVE_cc0
4f4b3679 3429 && ! (code == SET && i == 1 && XEXP (x, 0) == cc0_rtx)
42301240 3430#endif
4f4b3679
RH
3431 )
3432 return gen_rtx_CLOBBER (VOIDmode, const0_rtx);
42301240 3433
02188693 3434#ifdef CLASS_CANNOT_CHANGE_MODE
ed8afe3a
GK
3435 if (code == SUBREG
3436 && GET_CODE (to) == REG
3437 && REGNO (to) < FIRST_PSEUDO_REGISTER
3438 && (TEST_HARD_REG_BIT
02188693 3439 (reg_class_contents[(int) CLASS_CANNOT_CHANGE_MODE],
ed8afe3a 3440 REGNO (to)))
02188693
RH
3441 && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (to),
3442 GET_MODE (x)))
ed8afe3a
GK
3443 return gen_rtx_CLOBBER (VOIDmode, const0_rtx);
3444#endif
3445
4f4b3679
RH
3446 new = (unique_copy && n_occurrences ? copy_rtx (to) : to);
3447 n_occurrences++;
3448 }
3449 else
3450 /* If we are in a SET_DEST, suppress most cases unless we
3451 have gone inside a MEM, in which case we want to
3452 simplify the address. We assume here that things that
3453 are actually part of the destination have their inner
663522cb 3454 parts in the first expression. This is true for SUBREG,
4f4b3679
RH
3455 STRICT_LOW_PART, and ZERO_EXTRACT, which are the only
3456 things aside from REG and MEM that should appear in a
3457 SET_DEST. */
3458 new = subst (XEXP (x, i), from, to,
3459 (((in_dest
3460 && (code == SUBREG || code == STRICT_LOW_PART
3461 || code == ZERO_EXTRACT))
3462 || code == SET)
3463 && i == 0), unique_copy);
3464
3465 /* If we found that we will have to reject this combination,
3466 indicate that by returning the CLOBBER ourselves, rather than
3467 an expression containing it. This will speed things up as
3468 well as prevent accidents where two CLOBBERs are considered
3469 to be equal, thus producing an incorrect simplification. */
3470
3471 if (GET_CODE (new) == CLOBBER && XEXP (new, 0) == const0_rtx)
3472 return new;
3473
3474 SUBST (XEXP (x, i), new);
230d793d 3475 }
230d793d
RS
3476 }
3477 }
3478
8079805d
RK
3479 /* Try to simplify X. If the simplification changed the code, it is likely
3480 that further simplification will help, so loop, but limit the number
3481 of repetitions that will be performed. */
3482
3483 for (i = 0; i < 4; i++)
3484 {
3485 /* If X is sufficiently simple, don't bother trying to do anything
3486 with it. */
3487 if (code != CONST_INT && code != REG && code != CLOBBER)
31ec4e5e 3488 x = combine_simplify_rtx (x, op0_mode, i == 3, in_dest);
d0ab8cd3 3489
8079805d
RK
3490 if (GET_CODE (x) == code)
3491 break;
d0ab8cd3 3492
8079805d 3493 code = GET_CODE (x);
eeb43d32 3494
8079805d
RK
3495 /* We no longer know the original mode of operand 0 since we
3496 have changed the form of X) */
3497 op0_mode = VOIDmode;
3498 }
eeb43d32 3499
8079805d
RK
3500 return x;
3501}
3502\f
3503/* Simplify X, a piece of RTL. We just operate on the expression at the
3504 outer level; call `subst' to simplify recursively. Return the new
3505 expression.
3506
3507 OP0_MODE is the original mode of XEXP (x, 0); LAST is nonzero if this
3508 will be the iteration even if an expression with a code different from
3509 X is returned; IN_DEST is nonzero if we are inside a SET_DEST. */
eeb43d32 3510
8079805d 3511static rtx
31ec4e5e 3512combine_simplify_rtx (x, op0_mode, last, in_dest)
8079805d
RK
3513 rtx x;
3514 enum machine_mode op0_mode;
3515 int last;
3516 int in_dest;
3517{
3518 enum rtx_code code = GET_CODE (x);
3519 enum machine_mode mode = GET_MODE (x);
3520 rtx temp;
9a915772 3521 rtx reversed;
8079805d 3522 int i;
d0ab8cd3 3523
230d793d
RS
3524 /* If this is a commutative operation, put a constant last and a complex
3525 expression first. We don't need to do this for comparisons here. */
3526 if (GET_RTX_CLASS (code) == 'c'
e5c56fd9 3527 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
230d793d
RS
3528 {
3529 temp = XEXP (x, 0);
3530 SUBST (XEXP (x, 0), XEXP (x, 1));
3531 SUBST (XEXP (x, 1), temp);
3532 }
3533
22609cbf
RK
3534 /* If this is a PLUS, MINUS, or MULT, and the first operand is the
3535 sign extension of a PLUS with a constant, reverse the order of the sign
3536 extension and the addition. Note that this not the same as the original
3537 code, but overflow is undefined for signed values. Also note that the
3538 PLUS will have been partially moved "inside" the sign-extension, so that
3539 the first operand of X will really look like:
3540 (ashiftrt (plus (ashift A C4) C5) C4).
3541 We convert this to
3542 (plus (ashiftrt (ashift A C4) C2) C4)
3543 and replace the first operand of X with that expression. Later parts
3544 of this function may simplify the expression further.
3545
3546 For example, if we start with (mult (sign_extend (plus A C1)) C2),
3547 we swap the SIGN_EXTEND and PLUS. Later code will apply the
3548 distributive law to produce (plus (mult (sign_extend X) C1) C3).
3549
3550 We do this to simplify address expressions. */
3551
3552 if ((code == PLUS || code == MINUS || code == MULT)
3553 && GET_CODE (XEXP (x, 0)) == ASHIFTRT
3554 && GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS
3555 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ASHIFT
3556 && GET_CODE (XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1)) == CONST_INT
3557 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3558 && XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1) == XEXP (XEXP (x, 0), 1)
3559 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
3560 && (temp = simplify_binary_operation (ASHIFTRT, mode,
3561 XEXP (XEXP (XEXP (x, 0), 0), 1),
3562 XEXP (XEXP (x, 0), 1))) != 0)
3563 {
3564 rtx new
3565 = simplify_shift_const (NULL_RTX, ASHIFT, mode,
3566 XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 0),
3567 INTVAL (XEXP (XEXP (x, 0), 1)));
3568
3569 new = simplify_shift_const (NULL_RTX, ASHIFTRT, mode, new,
3570 INTVAL (XEXP (XEXP (x, 0), 1)));
3571
3572 SUBST (XEXP (x, 0), gen_binary (PLUS, mode, new, temp));
3573 }
3574
663522cb 3575 /* If this is a simple operation applied to an IF_THEN_ELSE, try
d0ab8cd3 3576 applying it to the arms of the IF_THEN_ELSE. This often simplifies
abe6e52f
RK
3577 things. Check for cases where both arms are testing the same
3578 condition.
3579
3580 Don't do anything if all operands are very simple. */
3581
3582 if (((GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c'
3583 || GET_RTX_CLASS (code) == '<')
3584 && ((GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) != 'o'
3585 && ! (GET_CODE (XEXP (x, 0)) == SUBREG
3586 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0))))
3587 == 'o')))
3588 || (GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o'
3589 && ! (GET_CODE (XEXP (x, 1)) == SUBREG
3590 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 1))))
3591 == 'o')))))
3592 || (GET_RTX_CLASS (code) == '1'
3593 && ((GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) != 'o'
3594 && ! (GET_CODE (XEXP (x, 0)) == SUBREG
3595 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0))))
3596 == 'o'))))))
d0ab8cd3 3597 {
d6edb99e 3598 rtx cond, true_rtx, false_rtx;
abe6e52f 3599
d6edb99e 3600 cond = if_then_else_cond (x, &true_rtx, &false_rtx);
0802d516
RK
3601 if (cond != 0
3602 /* If everything is a comparison, what we have is highly unlikely
3603 to be simpler, so don't use it. */
3604 && ! (GET_RTX_CLASS (code) == '<'
d6edb99e
ZW
3605 && (GET_RTX_CLASS (GET_CODE (true_rtx)) == '<'
3606 || GET_RTX_CLASS (GET_CODE (false_rtx)) == '<')))
abe6e52f
RK
3607 {
3608 rtx cop1 = const0_rtx;
3609 enum rtx_code cond_code = simplify_comparison (NE, &cond, &cop1);
3610
15448afc
RK
3611 if (cond_code == NE && GET_RTX_CLASS (GET_CODE (cond)) == '<')
3612 return x;
3613
663522cb 3614 /* Simplify the alternative arms; this may collapse the true and
9210df58 3615 false arms to store-flag values. */
d6edb99e
ZW
3616 true_rtx = subst (true_rtx, pc_rtx, pc_rtx, 0, 0);
3617 false_rtx = subst (false_rtx, pc_rtx, pc_rtx, 0, 0);
9210df58 3618
d6edb99e 3619 /* If true_rtx and false_rtx are not general_operands, an if_then_else
085f1714 3620 is unlikely to be simpler. */
d6edb99e
ZW
3621 if (general_operand (true_rtx, VOIDmode)
3622 && general_operand (false_rtx, VOIDmode))
085f1714
RH
3623 {
3624 /* Restarting if we generate a store-flag expression will cause
3625 us to loop. Just drop through in this case. */
3626
3627 /* If the result values are STORE_FLAG_VALUE and zero, we can
3628 just make the comparison operation. */
d6edb99e 3629 if (true_rtx == const_true_rtx && false_rtx == const0_rtx)
085f1714 3630 x = gen_binary (cond_code, mode, cond, cop1);
d6edb99e 3631 else if (true_rtx == const0_rtx && false_rtx == const_true_rtx)
085f1714
RH
3632 x = gen_binary (reverse_condition (cond_code),
3633 mode, cond, cop1);
3634
3635 /* Likewise, we can make the negate of a comparison operation
3636 if the result values are - STORE_FLAG_VALUE and zero. */
d6edb99e
ZW
3637 else if (GET_CODE (true_rtx) == CONST_INT
3638 && INTVAL (true_rtx) == - STORE_FLAG_VALUE
3639 && false_rtx == const0_rtx)
f1c6ba8b
RK
3640 x = simplify_gen_unary (NEG, mode,
3641 gen_binary (cond_code, mode, cond,
3642 cop1),
3643 mode);
d6edb99e
ZW
3644 else if (GET_CODE (false_rtx) == CONST_INT
3645 && INTVAL (false_rtx) == - STORE_FLAG_VALUE
3646 && true_rtx == const0_rtx)
f1c6ba8b
RK
3647 x = simplify_gen_unary (NEG, mode,
3648 gen_binary (reverse_condition
3649 (cond_code),
3650 mode, cond, cop1),
3651 mode);
085f1714
RH
3652 else
3653 return gen_rtx_IF_THEN_ELSE (mode,
3654 gen_binary (cond_code, VOIDmode,
3655 cond, cop1),
d6edb99e 3656 true_rtx, false_rtx);
5109d49f 3657
085f1714
RH
3658 code = GET_CODE (x);
3659 op0_mode = VOIDmode;
3660 }
abe6e52f 3661 }
d0ab8cd3
RK
3662 }
3663
230d793d
RS
3664 /* Try to fold this expression in case we have constants that weren't
3665 present before. */
3666 temp = 0;
3667 switch (GET_RTX_CLASS (code))
3668 {
3669 case '1':
3670 temp = simplify_unary_operation (code, mode, XEXP (x, 0), op0_mode);
3671 break;
3672 case '<':
47b1e19b
JH
3673 {
3674 enum machine_mode cmp_mode = GET_MODE (XEXP (x, 0));
3675 if (cmp_mode == VOIDmode)
1cac8785
DD
3676 {
3677 cmp_mode = GET_MODE (XEXP (x, 1));
3678 if (cmp_mode == VOIDmode)
3679 cmp_mode = op0_mode;
3680 }
47b1e19b
JH
3681 temp = simplify_relational_operation (code, cmp_mode,
3682 XEXP (x, 0), XEXP (x, 1));
3683 }
77fa0940 3684#ifdef FLOAT_STORE_FLAG_VALUE
12530dbe
RH
3685 if (temp != 0 && GET_MODE_CLASS (mode) == MODE_FLOAT)
3686 {
3687 if (temp == const0_rtx)
3688 temp = CONST0_RTX (mode);
3689 else
3690 temp = immed_real_const_1 (FLOAT_STORE_FLAG_VALUE (mode), mode);
3691 }
77fa0940 3692#endif
230d793d
RS
3693 break;
3694 case 'c':
3695 case '2':
3696 temp = simplify_binary_operation (code, mode, XEXP (x, 0), XEXP (x, 1));
3697 break;
3698 case 'b':
3699 case '3':
3700 temp = simplify_ternary_operation (code, mode, op0_mode, XEXP (x, 0),
3701 XEXP (x, 1), XEXP (x, 2));
3702 break;
3703 }
3704
3705 if (temp)
4531c1c7
DN
3706 {
3707 x = temp;
3708 code = GET_CODE (temp);
3709 op0_mode = VOIDmode;
3710 mode = GET_MODE (temp);
3711 }
230d793d 3712
230d793d 3713 /* First see if we can apply the inverse distributive law. */
224eeff2
RK
3714 if (code == PLUS || code == MINUS
3715 || code == AND || code == IOR || code == XOR)
230d793d
RS
3716 {
3717 x = apply_distributive_law (x);
3718 code = GET_CODE (x);
6e20204f 3719 op0_mode = VOIDmode;
230d793d
RS
3720 }
3721
3722 /* If CODE is an associative operation not otherwise handled, see if we
3723 can associate some operands. This can win if they are constants or
e0e08ac2 3724 if they are logically related (i.e. (a & b) & a). */
230d793d
RS
3725 if ((code == PLUS || code == MINUS
3726 || code == MULT || code == AND || code == IOR || code == XOR
3727 || code == DIV || code == UDIV
3728 || code == SMAX || code == SMIN || code == UMAX || code == UMIN)
3ad2180a 3729 && INTEGRAL_MODE_P (mode))
230d793d
RS
3730 {
3731 if (GET_CODE (XEXP (x, 0)) == code)
3732 {
3733 rtx other = XEXP (XEXP (x, 0), 0);
3734 rtx inner_op0 = XEXP (XEXP (x, 0), 1);
3735 rtx inner_op1 = XEXP (x, 1);
3736 rtx inner;
663522cb 3737
230d793d
RS
3738 /* Make sure we pass the constant operand if any as the second
3739 one if this is a commutative operation. */
3740 if (CONSTANT_P (inner_op0) && GET_RTX_CLASS (code) == 'c')
3741 {
3742 rtx tem = inner_op0;
3743 inner_op0 = inner_op1;
3744 inner_op1 = tem;
3745 }
3746 inner = simplify_binary_operation (code == MINUS ? PLUS
3747 : code == DIV ? MULT
3748 : code == UDIV ? MULT
3749 : code,
3750 mode, inner_op0, inner_op1);
3751
3752 /* For commutative operations, try the other pair if that one
3753 didn't simplify. */
3754 if (inner == 0 && GET_RTX_CLASS (code) == 'c')
3755 {
3756 other = XEXP (XEXP (x, 0), 1);
3757 inner = simplify_binary_operation (code, mode,
3758 XEXP (XEXP (x, 0), 0),
3759 XEXP (x, 1));
3760 }
3761
3762 if (inner)
8079805d 3763 return gen_binary (code, mode, other, inner);
230d793d
RS
3764 }
3765 }
3766
3767 /* A little bit of algebraic simplification here. */
3768 switch (code)
3769 {
3770 case MEM:
3771 /* Ensure that our address has any ASHIFTs converted to MULT in case
3772 address-recognizing predicates are called later. */
3773 temp = make_compound_operation (XEXP (x, 0), MEM);
3774 SUBST (XEXP (x, 0), temp);
3775 break;
3776
3777 case SUBREG:
eea50aa0
JH
3778 if (op0_mode == VOIDmode)
3779 op0_mode = GET_MODE (SUBREG_REG (x));
230d793d 3780
eea50aa0 3781 /* simplify_subreg can't use gen_lowpart_for_combine. */
3c99d5ff 3782 if (CONSTANT_P (SUBREG_REG (x))
e0e08ac2 3783 && subreg_lowpart_offset (mode, op0_mode) == SUBREG_BYTE (x))
230d793d
RS
3784 return gen_lowpart_for_combine (mode, SUBREG_REG (x));
3785
eea50aa0
JH
3786 {
3787 rtx temp;
3788 temp = simplify_subreg (mode, SUBREG_REG (x), op0_mode,
3789 SUBREG_BYTE (x));
3790 if (temp)
3791 return temp;
3792 }
b65c1b5b 3793
87e3e0c1
RK
3794 /* Note that we cannot do any narrowing for non-constants since
3795 we might have been counting on using the fact that some bits were
3796 zero. We now do this in the SET. */
3797
230d793d
RS
3798 break;
3799
3800 case NOT:
3801 /* (not (plus X -1)) can become (neg X). */
3802 if (GET_CODE (XEXP (x, 0)) == PLUS
3803 && XEXP (XEXP (x, 0), 1) == constm1_rtx)
f1c6ba8b 3804 return gen_rtx_NEG (mode, XEXP (XEXP (x, 0), 0));
230d793d
RS
3805
3806 /* Similarly, (not (neg X)) is (plus X -1). */
3807 if (GET_CODE (XEXP (x, 0)) == NEG)
f1c6ba8b 3808 return gen_rtx_PLUS (mode, XEXP (XEXP (x, 0), 0), constm1_rtx);
230d793d 3809
663522cb 3810 /* (not (xor X C)) for C constant is (xor X D) with D = ~C. */
d0ab8cd3
RK
3811 if (GET_CODE (XEXP (x, 0)) == XOR
3812 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3813 && (temp = simplify_unary_operation (NOT, mode,
3814 XEXP (XEXP (x, 0), 1),
3815 mode)) != 0)
787745f5 3816 return gen_binary (XOR, mode, XEXP (XEXP (x, 0), 0), temp);
663522cb 3817
230d793d
RS
3818 /* (not (ashift 1 X)) is (rotate ~1 X). We used to do this for operands
3819 other than 1, but that is not valid. We could do a similar
3820 simplification for (not (lshiftrt C X)) where C is just the sign bit,
3821 but this doesn't seem common enough to bother with. */
3822 if (GET_CODE (XEXP (x, 0)) == ASHIFT
3823 && XEXP (XEXP (x, 0), 0) == const1_rtx)
f1c6ba8b
RK
3824 return gen_rtx_ROTATE (mode, simplify_gen_unary (NOT, mode,
3825 const1_rtx, mode),
38a448ca 3826 XEXP (XEXP (x, 0), 1));
663522cb 3827
230d793d
RS
3828 if (GET_CODE (XEXP (x, 0)) == SUBREG
3829 && subreg_lowpart_p (XEXP (x, 0))
3830 && (GET_MODE_SIZE (GET_MODE (XEXP (x, 0)))
3831 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (x, 0)))))
3832 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == ASHIFT
3833 && XEXP (SUBREG_REG (XEXP (x, 0)), 0) == const1_rtx)
3834 {
3835 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (XEXP (x, 0)));
3836
38a448ca 3837 x = gen_rtx_ROTATE (inner_mode,
f1c6ba8b
RK
3838 simplify_gen_unary (NOT, inner_mode, const1_rtx,
3839 inner_mode),
38a448ca 3840 XEXP (SUBREG_REG (XEXP (x, 0)), 1));
8079805d 3841 return gen_lowpart_for_combine (mode, x);
230d793d 3842 }
663522cb 3843
0802d516
RK
3844 /* If STORE_FLAG_VALUE is -1, (not (comparison foo bar)) can be done by
3845 reversing the comparison code if valid. */
3846 if (STORE_FLAG_VALUE == -1
3847 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
9a915772
JH
3848 && (reversed = reversed_comparison (x, mode, XEXP (XEXP (x, 0), 0),
3849 XEXP (XEXP (x, 0), 1))))
3850 return reversed;
500c518b
RK
3851
3852 /* (ashiftrt foo C) where C is the number of bits in FOO minus 1
0802d516
RK
3853 is (lt foo (const_int 0)) if STORE_FLAG_VALUE is -1, so we can
3854 perform the above simplification. */
500c518b 3855
0802d516 3856 if (STORE_FLAG_VALUE == -1
500c518b 3857 && GET_CODE (XEXP (x, 0)) == ASHIFTRT
37ac53d9 3858 && XEXP (x, 1) == const1_rtx
500c518b
RK
3859 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3860 && INTVAL (XEXP (XEXP (x, 0), 1)) == GET_MODE_BITSIZE (mode) - 1)
f1c6ba8b 3861 return gen_rtx_GE (mode, XEXP (XEXP (x, 0), 0), const0_rtx);
230d793d
RS
3862
3863 /* Apply De Morgan's laws to reduce number of patterns for machines
3864 with negating logical insns (and-not, nand, etc.). If result has
3865 only one NOT, put it first, since that is how the patterns are
3866 coded. */
3867
3868 if (GET_CODE (XEXP (x, 0)) == IOR || GET_CODE (XEXP (x, 0)) == AND)
3869 {
663522cb 3870 rtx in1 = XEXP (XEXP (x, 0), 0), in2 = XEXP (XEXP (x, 0), 1);
5bd60ce6 3871 enum machine_mode op_mode;
230d793d 3872
5bd60ce6 3873 op_mode = GET_MODE (in1);
f1c6ba8b 3874 in1 = simplify_gen_unary (NOT, op_mode, in1, op_mode);
230d793d 3875
5bd60ce6
RH
3876 op_mode = GET_MODE (in2);
3877 if (op_mode == VOIDmode)
3878 op_mode = mode;
f1c6ba8b 3879 in2 = simplify_gen_unary (NOT, op_mode, in2, op_mode);
663522cb 3880
5bd60ce6 3881 if (GET_CODE (in2) == NOT && GET_CODE (in1) != NOT)
663522cb
KH
3882 {
3883 rtx tem = in2;
3884 in2 = in1; in1 = tem;
3885 }
3886
f1c6ba8b
RK
3887 return gen_rtx_fmt_ee (GET_CODE (XEXP (x, 0)) == IOR ? AND : IOR,
3888 mode, in1, in2);
663522cb 3889 }
230d793d
RS
3890 break;
3891
3892 case NEG:
3893 /* (neg (plus X 1)) can become (not X). */
3894 if (GET_CODE (XEXP (x, 0)) == PLUS
3895 && XEXP (XEXP (x, 0), 1) == const1_rtx)
f1c6ba8b 3896 return gen_rtx_NOT (mode, XEXP (XEXP (x, 0), 0));
230d793d
RS
3897
3898 /* Similarly, (neg (not X)) is (plus X 1). */
3899 if (GET_CODE (XEXP (x, 0)) == NOT)
8079805d 3900 return plus_constant (XEXP (XEXP (x, 0), 0), 1);
230d793d 3901
230d793d
RS
3902 /* (neg (minus X Y)) can become (minus Y X). */
3903 if (GET_CODE (XEXP (x, 0)) == MINUS
3ad2180a 3904 && (! FLOAT_MODE_P (mode)
0f41302f 3905 /* x-y != -(y-x) with IEEE floating point. */
7e2a0d8e 3906 || TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
de6c5979 3907 || flag_unsafe_math_optimizations))
8079805d
RK
3908 return gen_binary (MINUS, mode, XEXP (XEXP (x, 0), 1),
3909 XEXP (XEXP (x, 0), 0));
230d793d 3910
0f41302f 3911 /* (neg (xor A 1)) is (plus A -1) if A is known to be either 0 or 1. */
d0ab8cd3 3912 if (GET_CODE (XEXP (x, 0)) == XOR && XEXP (XEXP (x, 0), 1) == const1_rtx
951553af 3913 && nonzero_bits (XEXP (XEXP (x, 0), 0), mode) == 1)
8079805d 3914 return gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0), constm1_rtx);
d0ab8cd3 3915
230d793d
RS
3916 /* NEG commutes with ASHIFT since it is multiplication. Only do this
3917 if we can then eliminate the NEG (e.g.,
3918 if the operand is a constant). */
3919
3920 if (GET_CODE (XEXP (x, 0)) == ASHIFT)
3921 {
3922 temp = simplify_unary_operation (NEG, mode,
3923 XEXP (XEXP (x, 0), 0), mode);
3924 if (temp)
3925 {
3926 SUBST (XEXP (XEXP (x, 0), 0), temp);
3927 return XEXP (x, 0);
3928 }
3929 }
3930
3931 temp = expand_compound_operation (XEXP (x, 0));
3932
3933 /* For C equal to the width of MODE minus 1, (neg (ashiftrt X C)) can be
3934 replaced by (lshiftrt X C). This will convert
3935 (neg (sign_extract X 1 Y)) to (zero_extract X 1 Y). */
3936
3937 if (GET_CODE (temp) == ASHIFTRT
3938 && GET_CODE (XEXP (temp, 1)) == CONST_INT
3939 && INTVAL (XEXP (temp, 1)) == GET_MODE_BITSIZE (mode) - 1)
8079805d
RK
3940 return simplify_shift_const (temp, LSHIFTRT, mode, XEXP (temp, 0),
3941 INTVAL (XEXP (temp, 1)));
230d793d 3942
951553af 3943 /* If X has only a single bit that might be nonzero, say, bit I, convert
230d793d
RS
3944 (neg X) to (ashiftrt (ashift X C-I) C-I) where C is the bitsize of
3945 MODE minus 1. This will convert (neg (zero_extract X 1 Y)) to
3946 (sign_extract X 1 Y). But only do this if TEMP isn't a register
3947 or a SUBREG of one since we'd be making the expression more
3948 complex if it was just a register. */
3949
3950 if (GET_CODE (temp) != REG
3951 && ! (GET_CODE (temp) == SUBREG
3952 && GET_CODE (SUBREG_REG (temp)) == REG)
951553af 3953 && (i = exact_log2 (nonzero_bits (temp, mode))) >= 0)
230d793d
RS
3954 {
3955 rtx temp1 = simplify_shift_const
5f4f0e22
CH
3956 (NULL_RTX, ASHIFTRT, mode,
3957 simplify_shift_const (NULL_RTX, ASHIFT, mode, temp,
230d793d
RS
3958 GET_MODE_BITSIZE (mode) - 1 - i),
3959 GET_MODE_BITSIZE (mode) - 1 - i);
3960
3961 /* If all we did was surround TEMP with the two shifts, we
3962 haven't improved anything, so don't use it. Otherwise,
3963 we are better off with TEMP1. */
3964 if (GET_CODE (temp1) != ASHIFTRT
3965 || GET_CODE (XEXP (temp1, 0)) != ASHIFT
3966 || XEXP (XEXP (temp1, 0), 0) != temp)
8079805d 3967 return temp1;
230d793d
RS
3968 }
3969 break;
3970
2ca9ae17 3971 case TRUNCATE:
e30fb98f
JL
3972 /* We can't handle truncation to a partial integer mode here
3973 because we don't know the real bitsize of the partial
3974 integer mode. */
3975 if (GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
3976 break;
3977
80608e27
JL
3978 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3979 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
3980 GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))))
2ca9ae17
JW
3981 SUBST (XEXP (x, 0),
3982 force_to_mode (XEXP (x, 0), GET_MODE (XEXP (x, 0)),
3983 GET_MODE_MASK (mode), NULL_RTX, 0));
0f13a422
ILT
3984
3985 /* (truncate:SI ({sign,zero}_extend:DI foo:SI)) == foo:SI. */
3986 if ((GET_CODE (XEXP (x, 0)) == SIGN_EXTEND
3987 || GET_CODE (XEXP (x, 0)) == ZERO_EXTEND)
3988 && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode)
3989 return XEXP (XEXP (x, 0), 0);
3990
3991 /* (truncate:SI (OP:DI ({sign,zero}_extend:DI foo:SI))) is
3992 (OP:SI foo:SI) if OP is NEG or ABS. */
3993 if ((GET_CODE (XEXP (x, 0)) == ABS
3994 || GET_CODE (XEXP (x, 0)) == NEG)
3995 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == SIGN_EXTEND
3996 || GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND)
3997 && GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == mode)
f1c6ba8b
RK
3998 return simplify_gen_unary (GET_CODE (XEXP (x, 0)), mode,
3999 XEXP (XEXP (XEXP (x, 0), 0), 0), mode);
0f13a422
ILT
4000
4001 /* (truncate:SI (subreg:DI (truncate:SI X) 0)) is
4002 (truncate:SI x). */
4003 if (GET_CODE (XEXP (x, 0)) == SUBREG
4004 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == TRUNCATE
4005 && subreg_lowpart_p (XEXP (x, 0)))
4006 return SUBREG_REG (XEXP (x, 0));
4007
4008 /* If we know that the value is already truncated, we can
14a774a9
RK
4009 replace the TRUNCATE with a SUBREG if TRULY_NOOP_TRUNCATION
4010 is nonzero for the corresponding modes. But don't do this
4011 for an (LSHIFTRT (MULT ...)) since this will cause problems
4012 with the umulXi3_highpart patterns. */
6a992214
JL
4013 if (TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
4014 GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
4015 && num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
14a774a9
RK
4016 >= GET_MODE_BITSIZE (mode) + 1
4017 && ! (GET_CODE (XEXP (x, 0)) == LSHIFTRT
4018 && GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT))
0f13a422
ILT
4019 return gen_lowpart_for_combine (mode, XEXP (x, 0));
4020
4021 /* A truncate of a comparison can be replaced with a subreg if
4022 STORE_FLAG_VALUE permits. This is like the previous test,
4023 but it works even if the comparison is done in a mode larger
4024 than HOST_BITS_PER_WIDE_INT. */
4025 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4026 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
663522cb 4027 && ((HOST_WIDE_INT) STORE_FLAG_VALUE & ~GET_MODE_MASK (mode)) == 0)
0f13a422
ILT
4028 return gen_lowpart_for_combine (mode, XEXP (x, 0));
4029
4030 /* Similarly, a truncate of a register whose value is a
4031 comparison can be replaced with a subreg if STORE_FLAG_VALUE
4032 permits. */
4033 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
663522cb 4034 && ((HOST_WIDE_INT) STORE_FLAG_VALUE & ~GET_MODE_MASK (mode)) == 0
0f13a422
ILT
4035 && (temp = get_last_value (XEXP (x, 0)))
4036 && GET_RTX_CLASS (GET_CODE (temp)) == '<')
4037 return gen_lowpart_for_combine (mode, XEXP (x, 0));
4038
2ca9ae17
JW
4039 break;
4040
230d793d
RS
4041 case FLOAT_TRUNCATE:
4042 /* (float_truncate:SF (float_extend:DF foo:SF)) = foo:SF. */
4043 if (GET_CODE (XEXP (x, 0)) == FLOAT_EXTEND
4044 && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode)
663522cb 4045 return XEXP (XEXP (x, 0), 0);
4635f748
RK
4046
4047 /* (float_truncate:SF (OP:DF (float_extend:DF foo:sf))) is
4048 (OP:SF foo:SF) if OP is NEG or ABS. */
4049 if ((GET_CODE (XEXP (x, 0)) == ABS
4050 || GET_CODE (XEXP (x, 0)) == NEG)
4051 && GET_CODE (XEXP (XEXP (x, 0), 0)) == FLOAT_EXTEND
4052 && GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == mode)
f1c6ba8b
RK
4053 return simplify_gen_unary (GET_CODE (XEXP (x, 0)), mode,
4054 XEXP (XEXP (XEXP (x, 0), 0), 0), mode);
1d12df72
RK
4055
4056 /* (float_truncate:SF (subreg:DF (float_truncate:SF X) 0))
4057 is (float_truncate:SF x). */
4058 if (GET_CODE (XEXP (x, 0)) == SUBREG
4059 && subreg_lowpart_p (XEXP (x, 0))
4060 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == FLOAT_TRUNCATE)
4061 return SUBREG_REG (XEXP (x, 0));
663522cb 4062 break;
230d793d
RS
4063
4064#ifdef HAVE_cc0
4065 case COMPARE:
4066 /* Convert (compare FOO (const_int 0)) to FOO unless we aren't
4067 using cc0, in which case we want to leave it as a COMPARE
4068 so we can distinguish it from a register-register-copy. */
4069 if (XEXP (x, 1) == const0_rtx)
4070 return XEXP (x, 0);
4071
4072 /* In IEEE floating point, x-0 is not the same as x. */
4073 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
7e2a0d8e 4074 || ! FLOAT_MODE_P (GET_MODE (XEXP (x, 0)))
de6c5979 4075 || flag_unsafe_math_optimizations)
230d793d
RS
4076 && XEXP (x, 1) == CONST0_RTX (GET_MODE (XEXP (x, 0))))
4077 return XEXP (x, 0);
4078 break;
4079#endif
4080
4081 case CONST:
4082 /* (const (const X)) can become (const X). Do it this way rather than
4083 returning the inner CONST since CONST can be shared with a
4084 REG_EQUAL note. */
4085 if (GET_CODE (XEXP (x, 0)) == CONST)
4086 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4087 break;
4088
4089#ifdef HAVE_lo_sum
4090 case LO_SUM:
4091 /* Convert (lo_sum (high FOO) FOO) to FOO. This is necessary so we
4092 can add in an offset. find_split_point will split this address up
4093 again if it doesn't match. */
4094 if (GET_CODE (XEXP (x, 0)) == HIGH
4095 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1)))
4096 return XEXP (x, 1);
4097 break;
4098#endif
4099
4100 case PLUS:
4101 /* If we have (plus (plus (A const) B)), associate it so that CONST is
4102 outermost. That's because that's the way indexed addresses are
4103 supposed to appear. This code used to check many more cases, but
4104 they are now checked elsewhere. */
4105 if (GET_CODE (XEXP (x, 0)) == PLUS
4106 && CONSTANT_ADDRESS_P (XEXP (XEXP (x, 0), 1)))
4107 return gen_binary (PLUS, mode,
4108 gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0),
4109 XEXP (x, 1)),
4110 XEXP (XEXP (x, 0), 1));
4111
4112 /* (plus (xor (and <foo> (const_int pow2 - 1)) <c>) <-c>)
4113 when c is (const_int (pow2 + 1) / 2) is a sign extension of a
4114 bit-field and can be replaced by either a sign_extend or a
e6380233
JL
4115 sign_extract. The `and' may be a zero_extend and the two
4116 <c>, -<c> constants may be reversed. */
230d793d
RS
4117 if (GET_CODE (XEXP (x, 0)) == XOR
4118 && GET_CODE (XEXP (x, 1)) == CONST_INT
4119 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
663522cb 4120 && INTVAL (XEXP (x, 1)) == -INTVAL (XEXP (XEXP (x, 0), 1))
e6380233
JL
4121 && ((i = exact_log2 (INTVAL (XEXP (XEXP (x, 0), 1)))) >= 0
4122 || (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
5f4f0e22 4123 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
230d793d
RS
4124 && ((GET_CODE (XEXP (XEXP (x, 0), 0)) == AND
4125 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
4126 && (INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))
5f4f0e22 4127 == ((HOST_WIDE_INT) 1 << (i + 1)) - 1))
230d793d
RS
4128 || (GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND
4129 && (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)))
770ae6cc 4130 == (unsigned int) i + 1))))
8079805d
RK
4131 return simplify_shift_const
4132 (NULL_RTX, ASHIFTRT, mode,
4133 simplify_shift_const (NULL_RTX, ASHIFT, mode,
4134 XEXP (XEXP (XEXP (x, 0), 0), 0),
4135 GET_MODE_BITSIZE (mode) - (i + 1)),
4136 GET_MODE_BITSIZE (mode) - (i + 1));
230d793d 4137
bc0776c6
RK
4138 /* (plus (comparison A B) C) can become (neg (rev-comp A B)) if
4139 C is 1 and STORE_FLAG_VALUE is -1 or if C is -1 and STORE_FLAG_VALUE
4140 is 1. This produces better code than the alternative immediately
4141 below. */
4142 if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
bc0776c6 4143 && ((STORE_FLAG_VALUE == -1 && XEXP (x, 1) == const1_rtx)
9a915772
JH
4144 || (STORE_FLAG_VALUE == 1 && XEXP (x, 1) == constm1_rtx))
4145 && (reversed = reversed_comparison (XEXP (x, 0), mode,
4146 XEXP (XEXP (x, 0), 0),
4147 XEXP (XEXP (x, 0), 1))))
8079805d 4148 return
f1c6ba8b 4149 simplify_gen_unary (NEG, mode, reversed, mode);
bc0776c6
RK
4150
4151 /* If only the low-order bit of X is possibly nonzero, (plus x -1)
230d793d
RS
4152 can become (ashiftrt (ashift (xor x 1) C) C) where C is
4153 the bitsize of the mode - 1. This allows simplification of
4154 "a = (b & 8) == 0;" */
4155 if (XEXP (x, 1) == constm1_rtx
4156 && GET_CODE (XEXP (x, 0)) != REG
4157 && ! (GET_CODE (XEXP (x,0)) == SUBREG
4158 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG)
951553af 4159 && nonzero_bits (XEXP (x, 0), mode) == 1)
8079805d
RK
4160 return simplify_shift_const (NULL_RTX, ASHIFTRT, mode,
4161 simplify_shift_const (NULL_RTX, ASHIFT, mode,
f1c6ba8b 4162 gen_rtx_XOR (mode, XEXP (x, 0), const1_rtx),
8079805d
RK
4163 GET_MODE_BITSIZE (mode) - 1),
4164 GET_MODE_BITSIZE (mode) - 1);
02f4ada4
RK
4165
4166 /* If we are adding two things that have no bits in common, convert
4167 the addition into an IOR. This will often be further simplified,
4168 for example in cases like ((a & 1) + (a & 2)), which can
4169 become a & 3. */
4170
ac49a949 4171 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
951553af
RK
4172 && (nonzero_bits (XEXP (x, 0), mode)
4173 & nonzero_bits (XEXP (x, 1), mode)) == 0)
085f1714
RH
4174 {
4175 /* Try to simplify the expression further. */
4176 rtx tor = gen_binary (IOR, mode, XEXP (x, 0), XEXP (x, 1));
4177 temp = combine_simplify_rtx (tor, mode, last, in_dest);
4178
4179 /* If we could, great. If not, do not go ahead with the IOR
4180 replacement, since PLUS appears in many special purpose
4181 address arithmetic instructions. */
4182 if (GET_CODE (temp) != CLOBBER && temp != tor)
4183 return temp;
4184 }
230d793d
RS
4185 break;
4186
4187 case MINUS:
0802d516
RK
4188 /* If STORE_FLAG_VALUE is 1, (minus 1 (comparison foo bar)) can be done
4189 by reversing the comparison code if valid. */
4190 if (STORE_FLAG_VALUE == 1
4191 && XEXP (x, 0) == const1_rtx
5109d49f 4192 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<'
9a915772
JH
4193 && (reversed = reversed_comparison (XEXP (x, 1), mode,
4194 XEXP (XEXP (x, 1), 0),
4195 XEXP (XEXP (x, 1), 1))))
4196 return reversed;
5109d49f 4197
230d793d
RS
4198 /* (minus <foo> (and <foo> (const_int -pow2))) becomes
4199 (and <foo> (const_int pow2-1)) */
4200 if (GET_CODE (XEXP (x, 1)) == AND
4201 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
663522cb 4202 && exact_log2 (-INTVAL (XEXP (XEXP (x, 1), 1))) >= 0
230d793d 4203 && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0)))
8079805d 4204 return simplify_and_const_int (NULL_RTX, mode, XEXP (x, 0),
663522cb 4205 -INTVAL (XEXP (XEXP (x, 1), 1)) - 1);
7bef8680
RK
4206
4207 /* Canonicalize (minus A (plus B C)) to (minus (minus A B) C) for
4208 integers. */
4209 if (GET_CODE (XEXP (x, 1)) == PLUS && INTEGRAL_MODE_P (mode))
8079805d
RK
4210 return gen_binary (MINUS, mode,
4211 gen_binary (MINUS, mode, XEXP (x, 0),
4212 XEXP (XEXP (x, 1), 0)),
4213 XEXP (XEXP (x, 1), 1));
230d793d
RS
4214 break;
4215
4216 case MULT:
4217 /* If we have (mult (plus A B) C), apply the distributive law and then
4218 the inverse distributive law to see if things simplify. This
4219 occurs mostly in addresses, often when unrolling loops. */
4220
4221 if (GET_CODE (XEXP (x, 0)) == PLUS)
4222 {
4223 x = apply_distributive_law
4224 (gen_binary (PLUS, mode,
4225 gen_binary (MULT, mode,
4226 XEXP (XEXP (x, 0), 0), XEXP (x, 1)),
4227 gen_binary (MULT, mode,
3749f4ca
BS
4228 XEXP (XEXP (x, 0), 1),
4229 copy_rtx (XEXP (x, 1)))));
230d793d
RS
4230
4231 if (GET_CODE (x) != MULT)
8079805d 4232 return x;
230d793d 4233 }
230d793d
RS
4234 break;
4235
4236 case UDIV:
4237 /* If this is a divide by a power of two, treat it as a shift if
4238 its first operand is a shift. */
4239 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4240 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0
4241 && (GET_CODE (XEXP (x, 0)) == ASHIFT
4242 || GET_CODE (XEXP (x, 0)) == LSHIFTRT
4243 || GET_CODE (XEXP (x, 0)) == ASHIFTRT
4244 || GET_CODE (XEXP (x, 0)) == ROTATE
4245 || GET_CODE (XEXP (x, 0)) == ROTATERT))
8079805d 4246 return simplify_shift_const (NULL_RTX, LSHIFTRT, mode, XEXP (x, 0), i);
230d793d
RS
4247 break;
4248
4249 case EQ: case NE:
4250 case GT: case GTU: case GE: case GEU:
4251 case LT: case LTU: case LE: case LEU:
69bc0a1f
JH
4252 case UNEQ: case LTGT:
4253 case UNGT: case UNGE:
4254 case UNLT: case UNLE:
4255 case UNORDERED: case ORDERED:
230d793d
RS
4256 /* If the first operand is a condition code, we can't do anything
4257 with it. */
4258 if (GET_CODE (XEXP (x, 0)) == COMPARE
4259 || (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) != MODE_CC
4260#ifdef HAVE_cc0
4261 && XEXP (x, 0) != cc0_rtx
4262#endif
663522cb 4263 ))
230d793d
RS
4264 {
4265 rtx op0 = XEXP (x, 0);
4266 rtx op1 = XEXP (x, 1);
4267 enum rtx_code new_code;
4268
4269 if (GET_CODE (op0) == COMPARE)
4270 op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
4271
4272 /* Simplify our comparison, if possible. */
4273 new_code = simplify_comparison (code, &op0, &op1);
4274
230d793d 4275 /* If STORE_FLAG_VALUE is 1, we can convert (ne x 0) to simply X
951553af 4276 if only the low-order bit is possibly nonzero in X (such as when
5109d49f
RK
4277 X is a ZERO_EXTRACT of one bit). Similarly, we can convert EQ to
4278 (xor X 1) or (minus 1 X); we use the former. Finally, if X is
4279 known to be either 0 or -1, NE becomes a NEG and EQ becomes
4280 (plus X 1).
4281
4282 Remove any ZERO_EXTRACT we made when thinking this was a
4283 comparison. It may now be simpler to use, e.g., an AND. If a
4284 ZERO_EXTRACT is indeed appropriate, it will be placed back by
4285 the call to make_compound_operation in the SET case. */
4286
0802d516
RK
4287 if (STORE_FLAG_VALUE == 1
4288 && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
a191f0ee
RH
4289 && op1 == const0_rtx
4290 && mode == GET_MODE (op0)
4291 && nonzero_bits (op0, mode) == 1)
818b11b9
RK
4292 return gen_lowpart_for_combine (mode,
4293 expand_compound_operation (op0));
5109d49f 4294
0802d516
RK
4295 else if (STORE_FLAG_VALUE == 1
4296 && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
5109d49f 4297 && op1 == const0_rtx
a191f0ee 4298 && mode == GET_MODE (op0)
5109d49f
RK
4299 && (num_sign_bit_copies (op0, mode)
4300 == GET_MODE_BITSIZE (mode)))
4301 {
4302 op0 = expand_compound_operation (op0);
f1c6ba8b
RK
4303 return simplify_gen_unary (NEG, mode,
4304 gen_lowpart_for_combine (mode, op0),
4305 mode);
5109d49f
RK
4306 }
4307
0802d516
RK
4308 else if (STORE_FLAG_VALUE == 1
4309 && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
230d793d 4310 && op1 == const0_rtx
a191f0ee 4311 && mode == GET_MODE (op0)
5109d49f 4312 && nonzero_bits (op0, mode) == 1)
818b11b9
RK
4313 {
4314 op0 = expand_compound_operation (op0);
8079805d
RK
4315 return gen_binary (XOR, mode,
4316 gen_lowpart_for_combine (mode, op0),
4317 const1_rtx);
5109d49f 4318 }
818b11b9 4319
0802d516
RK
4320 else if (STORE_FLAG_VALUE == 1
4321 && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
5109d49f 4322 && op1 == const0_rtx
a191f0ee 4323 && mode == GET_MODE (op0)
5109d49f
RK
4324 && (num_sign_bit_copies (op0, mode)
4325 == GET_MODE_BITSIZE (mode)))
4326 {
4327 op0 = expand_compound_operation (op0);
8079805d 4328 return plus_constant (gen_lowpart_for_combine (mode, op0), 1);
818b11b9 4329 }
230d793d 4330
5109d49f
RK
4331 /* If STORE_FLAG_VALUE is -1, we have cases similar to
4332 those above. */
0802d516
RK
4333 if (STORE_FLAG_VALUE == -1
4334 && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
230d793d 4335 && op1 == const0_rtx
5109d49f
RK
4336 && (num_sign_bit_copies (op0, mode)
4337 == GET_MODE_BITSIZE (mode)))
4338 return gen_lowpart_for_combine (mode,
4339 expand_compound_operation (op0));
4340
0802d516
RK
4341 else if (STORE_FLAG_VALUE == -1
4342 && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
5109d49f 4343 && op1 == const0_rtx
a191f0ee 4344 && mode == GET_MODE (op0)
5109d49f
RK
4345 && nonzero_bits (op0, mode) == 1)
4346 {
4347 op0 = expand_compound_operation (op0);
f1c6ba8b
RK
4348 return simplify_gen_unary (NEG, mode,
4349 gen_lowpart_for_combine (mode, op0),
4350 mode);
5109d49f
RK
4351 }
4352
0802d516
RK
4353 else if (STORE_FLAG_VALUE == -1
4354 && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
5109d49f 4355 && op1 == const0_rtx
a191f0ee 4356 && mode == GET_MODE (op0)
5109d49f
RK
4357 && (num_sign_bit_copies (op0, mode)
4358 == GET_MODE_BITSIZE (mode)))
230d793d 4359 {
818b11b9 4360 op0 = expand_compound_operation (op0);
f1c6ba8b
RK
4361 return simplify_gen_unary (NOT, mode,
4362 gen_lowpart_for_combine (mode, op0),
4363 mode);
5109d49f
RK
4364 }
4365
4366 /* If X is 0/1, (eq X 0) is X-1. */
0802d516
RK
4367 else if (STORE_FLAG_VALUE == -1
4368 && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
5109d49f 4369 && op1 == const0_rtx
a191f0ee 4370 && mode == GET_MODE (op0)
5109d49f
RK
4371 && nonzero_bits (op0, mode) == 1)
4372 {
4373 op0 = expand_compound_operation (op0);
8079805d 4374 return plus_constant (gen_lowpart_for_combine (mode, op0), -1);
230d793d 4375 }
230d793d
RS
4376
4377 /* If STORE_FLAG_VALUE says to just test the sign bit and X has just
951553af
RK
4378 one bit that might be nonzero, we can convert (ne x 0) to
4379 (ashift x c) where C puts the bit in the sign bit. Remove any
4380 AND with STORE_FLAG_VALUE when we are done, since we are only
4381 going to test the sign bit. */
3f508eca 4382 if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
5f4f0e22 4383 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
0802d516 4384 && ((STORE_FLAG_VALUE & GET_MODE_MASK (mode))
e51712db 4385 == (unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE(mode)-1))
230d793d
RS
4386 && op1 == const0_rtx
4387 && mode == GET_MODE (op0)
5109d49f 4388 && (i = exact_log2 (nonzero_bits (op0, mode))) >= 0)
230d793d 4389 {
818b11b9
RK
4390 x = simplify_shift_const (NULL_RTX, ASHIFT, mode,
4391 expand_compound_operation (op0),
230d793d
RS
4392 GET_MODE_BITSIZE (mode) - 1 - i);
4393 if (GET_CODE (x) == AND && XEXP (x, 1) == const_true_rtx)
4394 return XEXP (x, 0);
4395 else
4396 return x;
4397 }
4398
4399 /* If the code changed, return a whole new comparison. */
4400 if (new_code != code)
f1c6ba8b 4401 return gen_rtx_fmt_ee (new_code, mode, op0, op1);
230d793d 4402
663522cb 4403 /* Otherwise, keep this operation, but maybe change its operands.
230d793d
RS
4404 This also converts (ne (compare FOO BAR) 0) to (ne FOO BAR). */
4405 SUBST (XEXP (x, 0), op0);
4406 SUBST (XEXP (x, 1), op1);
4407 }
4408 break;
663522cb 4409
230d793d 4410 case IF_THEN_ELSE:
8079805d 4411 return simplify_if_then_else (x);
9210df58 4412
8079805d
RK
4413 case ZERO_EXTRACT:
4414 case SIGN_EXTRACT:
4415 case ZERO_EXTEND:
4416 case SIGN_EXTEND:
0f41302f 4417 /* If we are processing SET_DEST, we are done. */
8079805d
RK
4418 if (in_dest)
4419 return x;
d0ab8cd3 4420
8079805d 4421 return expand_compound_operation (x);
d0ab8cd3 4422
8079805d
RK
4423 case SET:
4424 return simplify_set (x);
1a26b032 4425
8079805d
RK
4426 case AND:
4427 case IOR:
4428 case XOR:
4429 return simplify_logical (x, last);
d0ab8cd3 4430
663522cb 4431 case ABS:
8079805d
RK
4432 /* (abs (neg <foo>)) -> (abs <foo>) */
4433 if (GET_CODE (XEXP (x, 0)) == NEG)
4434 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
1a26b032 4435
b472527b
JL
4436 /* If the mode of the operand is VOIDmode (i.e. if it is ASM_OPERANDS),
4437 do nothing. */
4438 if (GET_MODE (XEXP (x, 0)) == VOIDmode)
4439 break;
f40421ce 4440
8079805d
RK
4441 /* If operand is something known to be positive, ignore the ABS. */
4442 if (GET_CODE (XEXP (x, 0)) == FFS || GET_CODE (XEXP (x, 0)) == ABS
4443 || ((GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4444 <= HOST_BITS_PER_WIDE_INT)
4445 && ((nonzero_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
4446 & ((HOST_WIDE_INT) 1
4447 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1)))
4448 == 0)))
4449 return XEXP (x, 0);
1a26b032 4450
8079805d
RK
4451 /* If operand is known to be only -1 or 0, convert ABS to NEG. */
4452 if (num_sign_bit_copies (XEXP (x, 0), mode) == GET_MODE_BITSIZE (mode))
f1c6ba8b 4453 return gen_rtx_NEG (mode, XEXP (x, 0));
1a26b032 4454
8079805d 4455 break;
1a26b032 4456
8079805d
RK
4457 case FFS:
4458 /* (ffs (*_extend <X>)) = (ffs <X>) */
4459 if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND
4460 || GET_CODE (XEXP (x, 0)) == ZERO_EXTEND)
4461 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4462 break;
1a26b032 4463
8079805d
RK
4464 case FLOAT:
4465 /* (float (sign_extend <X>)) = (float <X>). */
4466 if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND)
4467 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4468 break;
1a26b032 4469
8079805d
RK
4470 case ASHIFT:
4471 case LSHIFTRT:
4472 case ASHIFTRT:
4473 case ROTATE:
4474 case ROTATERT:
4475 /* If this is a shift by a constant amount, simplify it. */
4476 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
663522cb 4477 return simplify_shift_const (x, code, mode, XEXP (x, 0),
8079805d
RK
4478 INTVAL (XEXP (x, 1)));
4479
4480#ifdef SHIFT_COUNT_TRUNCATED
4481 else if (SHIFT_COUNT_TRUNCATED && GET_CODE (XEXP (x, 1)) != REG)
4482 SUBST (XEXP (x, 1),
4483 force_to_mode (XEXP (x, 1), GET_MODE (x),
663522cb 4484 ((HOST_WIDE_INT) 1
8079805d
RK
4485 << exact_log2 (GET_MODE_BITSIZE (GET_MODE (x))))
4486 - 1,
4487 NULL_RTX, 0));
4488#endif
4489
4490 break;
e9a25f70 4491
82be40f7
BS
4492 case VEC_SELECT:
4493 {
4494 rtx op0 = XEXP (x, 0);
4495 rtx op1 = XEXP (x, 1);
4496 int len;
4497
4498 if (GET_CODE (op1) != PARALLEL)
4499 abort ();
4500 len = XVECLEN (op1, 0);
4501 if (len == 1
4502 && GET_CODE (XVECEXP (op1, 0, 0)) == CONST_INT
4503 && GET_CODE (op0) == VEC_CONCAT)
4504 {
4505 int offset = INTVAL (XVECEXP (op1, 0, 0)) * GET_MODE_SIZE (GET_MODE (x));
4506
4507 /* Try to find the element in the VEC_CONCAT. */
4508 for (;;)
4509 {
4510 if (GET_MODE (op0) == GET_MODE (x))
4511 return op0;
4512 if (GET_CODE (op0) == VEC_CONCAT)
4513 {
4514 HOST_WIDE_INT op0_size = GET_MODE_SIZE (GET_MODE (XEXP (op0, 0)));
4515 if (op0_size < offset)
4516 op0 = XEXP (op0, 0);
4517 else
4518 {
4519 offset -= op0_size;
4520 op0 = XEXP (op0, 1);
4521 }
4522 }
4523 else
4524 break;
4525 }
4526 }
4527 }
4528
4529 break;
4530
e9a25f70
JL
4531 default:
4532 break;
8079805d
RK
4533 }
4534
4535 return x;
4536}
4537\f
4538/* Simplify X, an IF_THEN_ELSE expression. Return the new expression. */
5109d49f 4539
8079805d
RK
4540static rtx
4541simplify_if_then_else (x)
4542 rtx x;
4543{
4544 enum machine_mode mode = GET_MODE (x);
4545 rtx cond = XEXP (x, 0);
d6edb99e
ZW
4546 rtx true_rtx = XEXP (x, 1);
4547 rtx false_rtx = XEXP (x, 2);
8079805d
RK
4548 enum rtx_code true_code = GET_CODE (cond);
4549 int comparison_p = GET_RTX_CLASS (true_code) == '<';
4550 rtx temp;
4551 int i;
9a915772
JH
4552 enum rtx_code false_code;
4553 rtx reversed;
8079805d 4554
0f41302f 4555 /* Simplify storing of the truth value. */
d6edb99e 4556 if (comparison_p && true_rtx == const_true_rtx && false_rtx == const0_rtx)
8079805d 4557 return gen_binary (true_code, mode, XEXP (cond, 0), XEXP (cond, 1));
663522cb 4558
0f41302f 4559 /* Also when the truth value has to be reversed. */
9a915772 4560 if (comparison_p
d6edb99e 4561 && true_rtx == const0_rtx && false_rtx == const_true_rtx
9a915772
JH
4562 && (reversed = reversed_comparison (cond, mode, XEXP (cond, 0),
4563 XEXP (cond, 1))))
4564 return reversed;
8079805d
RK
4565
4566 /* Sometimes we can simplify the arm of an IF_THEN_ELSE if a register used
4567 in it is being compared against certain values. Get the true and false
4568 comparisons and see if that says anything about the value of each arm. */
4569
9a915772
JH
4570 if (comparison_p
4571 && ((false_code = combine_reversed_comparison_code (cond))
4572 != UNKNOWN)
8079805d
RK
4573 && GET_CODE (XEXP (cond, 0)) == REG)
4574 {
4575 HOST_WIDE_INT nzb;
4576 rtx from = XEXP (cond, 0);
8079805d
RK
4577 rtx true_val = XEXP (cond, 1);
4578 rtx false_val = true_val;
4579 int swapped = 0;
9210df58 4580
8079805d 4581 /* If FALSE_CODE is EQ, swap the codes and arms. */
5109d49f 4582
8079805d 4583 if (false_code == EQ)
1a26b032 4584 {
8079805d 4585 swapped = 1, true_code = EQ, false_code = NE;
d6edb99e 4586 temp = true_rtx, true_rtx = false_rtx, false_rtx = temp;
8079805d 4587 }
5109d49f 4588
8079805d
RK
4589 /* If we are comparing against zero and the expression being tested has
4590 only a single bit that might be nonzero, that is its value when it is
4591 not equal to zero. Similarly if it is known to be -1 or 0. */
4592
4593 if (true_code == EQ && true_val == const0_rtx
4594 && exact_log2 (nzb = nonzero_bits (from, GET_MODE (from))) >= 0)
4595 false_code = EQ, false_val = GEN_INT (nzb);
4596 else if (true_code == EQ && true_val == const0_rtx
4597 && (num_sign_bit_copies (from, GET_MODE (from))
4598 == GET_MODE_BITSIZE (GET_MODE (from))))
4599 false_code = EQ, false_val = constm1_rtx;
4600
4601 /* Now simplify an arm if we know the value of the register in the
4602 branch and it is used in the arm. Be careful due to the potential
4603 of locally-shared RTL. */
4604
d6edb99e
ZW
4605 if (reg_mentioned_p (from, true_rtx))
4606 true_rtx = subst (known_cond (copy_rtx (true_rtx), true_code,
4607 from, true_val),
8079805d 4608 pc_rtx, pc_rtx, 0, 0);
d6edb99e
ZW
4609 if (reg_mentioned_p (from, false_rtx))
4610 false_rtx = subst (known_cond (copy_rtx (false_rtx), false_code,
8079805d
RK
4611 from, false_val),
4612 pc_rtx, pc_rtx, 0, 0);
4613
d6edb99e
ZW
4614 SUBST (XEXP (x, 1), swapped ? false_rtx : true_rtx);
4615 SUBST (XEXP (x, 2), swapped ? true_rtx : false_rtx);
8079805d 4616
d6edb99e
ZW
4617 true_rtx = XEXP (x, 1);
4618 false_rtx = XEXP (x, 2);
4619 true_code = GET_CODE (cond);
8079805d 4620 }
5109d49f 4621
8079805d
RK
4622 /* If we have (if_then_else FOO (pc) (label_ref BAR)) and FOO can be
4623 reversed, do so to avoid needing two sets of patterns for
4624 subtract-and-branch insns. Similarly if we have a constant in the true
4625 arm, the false arm is the same as the first operand of the comparison, or
4626 the false arm is more complicated than the true arm. */
4627
9a915772
JH
4628 if (comparison_p
4629 && combine_reversed_comparison_code (cond) != UNKNOWN
d6edb99e
ZW
4630 && (true_rtx == pc_rtx
4631 || (CONSTANT_P (true_rtx)
4632 && GET_CODE (false_rtx) != CONST_INT && false_rtx != pc_rtx)
4633 || true_rtx == const0_rtx
4634 || (GET_RTX_CLASS (GET_CODE (true_rtx)) == 'o'
4635 && GET_RTX_CLASS (GET_CODE (false_rtx)) != 'o')
4636 || (GET_CODE (true_rtx) == SUBREG
4637 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (true_rtx))) == 'o'
4638 && GET_RTX_CLASS (GET_CODE (false_rtx)) != 'o')
4639 || reg_mentioned_p (true_rtx, false_rtx)
4640 || rtx_equal_p (false_rtx, XEXP (cond, 0))))
8079805d 4641 {
9a915772 4642 true_code = reversed_comparison_code (cond, NULL);
8079805d 4643 SUBST (XEXP (x, 0),
9a915772
JH
4644 reversed_comparison (cond, GET_MODE (cond), XEXP (cond, 0),
4645 XEXP (cond, 1)));
5109d49f 4646
d6edb99e
ZW
4647 SUBST (XEXP (x, 1), false_rtx);
4648 SUBST (XEXP (x, 2), true_rtx);
1a26b032 4649
d6edb99e
ZW
4650 temp = true_rtx, true_rtx = false_rtx, false_rtx = temp;
4651 cond = XEXP (x, 0);
bb821298 4652
0f41302f 4653 /* It is possible that the conditional has been simplified out. */
bb821298
RK
4654 true_code = GET_CODE (cond);
4655 comparison_p = GET_RTX_CLASS (true_code) == '<';
8079805d 4656 }
abe6e52f 4657
8079805d 4658 /* If the two arms are identical, we don't need the comparison. */
1a26b032 4659
d6edb99e
ZW
4660 if (rtx_equal_p (true_rtx, false_rtx) && ! side_effects_p (cond))
4661 return true_rtx;
1a26b032 4662
5be669c7
RK
4663 /* Convert a == b ? b : a to "a". */
4664 if (true_code == EQ && ! side_effects_p (cond)
de6c5979 4665 && (! FLOAT_MODE_P (mode) || flag_unsafe_math_optimizations)
d6edb99e
ZW
4666 && rtx_equal_p (XEXP (cond, 0), false_rtx)
4667 && rtx_equal_p (XEXP (cond, 1), true_rtx))
4668 return false_rtx;
5be669c7 4669 else if (true_code == NE && ! side_effects_p (cond)
de6c5979 4670 && (! FLOAT_MODE_P (mode) || flag_unsafe_math_optimizations)
d6edb99e
ZW
4671 && rtx_equal_p (XEXP (cond, 0), true_rtx)
4672 && rtx_equal_p (XEXP (cond, 1), false_rtx))
4673 return true_rtx;
5be669c7 4674
8079805d
RK
4675 /* Look for cases where we have (abs x) or (neg (abs X)). */
4676
4677 if (GET_MODE_CLASS (mode) == MODE_INT
d6edb99e
ZW
4678 && GET_CODE (false_rtx) == NEG
4679 && rtx_equal_p (true_rtx, XEXP (false_rtx, 0))
8079805d 4680 && comparison_p
d6edb99e
ZW
4681 && rtx_equal_p (true_rtx, XEXP (cond, 0))
4682 && ! side_effects_p (true_rtx))
8079805d
RK
4683 switch (true_code)
4684 {
4685 case GT:
4686 case GE:
f1c6ba8b 4687 return simplify_gen_unary (ABS, mode, true_rtx, mode);
8079805d
RK
4688 case LT:
4689 case LE:
f1c6ba8b
RK
4690 return
4691 simplify_gen_unary (NEG, mode,
4692 simplify_gen_unary (ABS, mode, true_rtx, mode),
4693 mode);
e9a25f70
JL
4694 default:
4695 break;
8079805d
RK
4696 }
4697
4698 /* Look for MIN or MAX. */
4699
de6c5979 4700 if ((! FLOAT_MODE_P (mode) || flag_unsafe_math_optimizations)
8079805d 4701 && comparison_p
d6edb99e
ZW
4702 && rtx_equal_p (XEXP (cond, 0), true_rtx)
4703 && rtx_equal_p (XEXP (cond, 1), false_rtx)
8079805d
RK
4704 && ! side_effects_p (cond))
4705 switch (true_code)
4706 {
4707 case GE:
4708 case GT:
d6edb99e 4709 return gen_binary (SMAX, mode, true_rtx, false_rtx);
8079805d
RK
4710 case LE:
4711 case LT:
d6edb99e 4712 return gen_binary (SMIN, mode, true_rtx, false_rtx);
8079805d
RK
4713 case GEU:
4714 case GTU:
d6edb99e 4715 return gen_binary (UMAX, mode, true_rtx, false_rtx);
8079805d
RK
4716 case LEU:
4717 case LTU:
d6edb99e 4718 return gen_binary (UMIN, mode, true_rtx, false_rtx);
e9a25f70
JL
4719 default:
4720 break;
8079805d 4721 }
663522cb 4722
8079805d
RK
4723 /* If we have (if_then_else COND (OP Z C1) Z) and OP is an identity when its
4724 second operand is zero, this can be done as (OP Z (mult COND C2)) where
4725 C2 = C1 * STORE_FLAG_VALUE. Similarly if OP has an outer ZERO_EXTEND or
4726 SIGN_EXTEND as long as Z is already extended (so we don't destroy it).
4727 We can do this kind of thing in some cases when STORE_FLAG_VALUE is
0802d516 4728 neither 1 or -1, but it isn't worth checking for. */
8079805d 4729
0802d516
RK
4730 if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
4731 && comparison_p && mode != VOIDmode && ! side_effects_p (x))
8079805d 4732 {
d6edb99e
ZW
4733 rtx t = make_compound_operation (true_rtx, SET);
4734 rtx f = make_compound_operation (false_rtx, SET);
8079805d
RK
4735 rtx cond_op0 = XEXP (cond, 0);
4736 rtx cond_op1 = XEXP (cond, 1);
6a651371 4737 enum rtx_code op = NIL, extend_op = NIL;
8079805d 4738 enum machine_mode m = mode;
6a651371 4739 rtx z = 0, c1 = NULL_RTX;
8079805d 4740
8079805d
RK
4741 if ((GET_CODE (t) == PLUS || GET_CODE (t) == MINUS
4742 || GET_CODE (t) == IOR || GET_CODE (t) == XOR
4743 || GET_CODE (t) == ASHIFT
4744 || GET_CODE (t) == LSHIFTRT || GET_CODE (t) == ASHIFTRT)
4745 && rtx_equal_p (XEXP (t, 0), f))
4746 c1 = XEXP (t, 1), op = GET_CODE (t), z = f;
4747
4748 /* If an identity-zero op is commutative, check whether there
0f41302f 4749 would be a match if we swapped the operands. */
8079805d
RK
4750 else if ((GET_CODE (t) == PLUS || GET_CODE (t) == IOR
4751 || GET_CODE (t) == XOR)
4752 && rtx_equal_p (XEXP (t, 1), f))
4753 c1 = XEXP (t, 0), op = GET_CODE (t), z = f;
4754 else if (GET_CODE (t) == SIGN_EXTEND
4755 && (GET_CODE (XEXP (t, 0)) == PLUS
4756 || GET_CODE (XEXP (t, 0)) == MINUS
4757 || GET_CODE (XEXP (t, 0)) == IOR
4758 || GET_CODE (XEXP (t, 0)) == XOR
4759 || GET_CODE (XEXP (t, 0)) == ASHIFT
4760 || GET_CODE (XEXP (t, 0)) == LSHIFTRT
4761 || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
4762 && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
4763 && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
4764 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
4765 && (num_sign_bit_copies (f, GET_MODE (f))
4766 > (GET_MODE_BITSIZE (mode)
4767 - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 0))))))
4768 {
4769 c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
4770 extend_op = SIGN_EXTEND;
4771 m = GET_MODE (XEXP (t, 0));
1a26b032 4772 }
8079805d
RK
4773 else if (GET_CODE (t) == SIGN_EXTEND
4774 && (GET_CODE (XEXP (t, 0)) == PLUS
4775 || GET_CODE (XEXP (t, 0)) == IOR
4776 || GET_CODE (XEXP (t, 0)) == XOR)
4777 && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
4778 && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
4779 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
4780 && (num_sign_bit_copies (f, GET_MODE (f))
4781 > (GET_MODE_BITSIZE (mode)
4782 - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 1))))))
4783 {
4784 c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
4785 extend_op = SIGN_EXTEND;
4786 m = GET_MODE (XEXP (t, 0));
4787 }
4788 else if (GET_CODE (t) == ZERO_EXTEND
4789 && (GET_CODE (XEXP (t, 0)) == PLUS
4790 || GET_CODE (XEXP (t, 0)) == MINUS
4791 || GET_CODE (XEXP (t, 0)) == IOR
4792 || GET_CODE (XEXP (t, 0)) == XOR
4793 || GET_CODE (XEXP (t, 0)) == ASHIFT
4794 || GET_CODE (XEXP (t, 0)) == LSHIFTRT
4795 || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
4796 && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
4797 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4798 && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
4799 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
4800 && ((nonzero_bits (f, GET_MODE (f))
663522cb 4801 & ~GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 0))))
8079805d
RK
4802 == 0))
4803 {
4804 c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
4805 extend_op = ZERO_EXTEND;
4806 m = GET_MODE (XEXP (t, 0));
4807 }
4808 else if (GET_CODE (t) == ZERO_EXTEND
4809 && (GET_CODE (XEXP (t, 0)) == PLUS
4810 || GET_CODE (XEXP (t, 0)) == IOR
4811 || GET_CODE (XEXP (t, 0)) == XOR)
4812 && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
4813 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4814 && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
4815 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
4816 && ((nonzero_bits (f, GET_MODE (f))
663522cb 4817 & ~GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 1))))
8079805d
RK
4818 == 0))
4819 {
4820 c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
4821 extend_op = ZERO_EXTEND;
4822 m = GET_MODE (XEXP (t, 0));
4823 }
663522cb 4824
8079805d
RK
4825 if (z)
4826 {
4827 temp = subst (gen_binary (true_code, m, cond_op0, cond_op1),
4828 pc_rtx, pc_rtx, 0, 0);
4829 temp = gen_binary (MULT, m, temp,
4830 gen_binary (MULT, m, c1, const_true_rtx));
4831 temp = subst (temp, pc_rtx, pc_rtx, 0, 0);
4832 temp = gen_binary (op, m, gen_lowpart_for_combine (m, z), temp);
4833
4834 if (extend_op != NIL)
f1c6ba8b 4835 temp = simplify_gen_unary (extend_op, mode, temp, m);
8079805d
RK
4836
4837 return temp;
4838 }
4839 }
224eeff2 4840
8079805d
RK
4841 /* If we have (if_then_else (ne A 0) C1 0) and either A is known to be 0 or
4842 1 and C1 is a single bit or A is known to be 0 or -1 and C1 is the
4843 negation of a single bit, we can convert this operation to a shift. We
4844 can actually do this more generally, but it doesn't seem worth it. */
4845
4846 if (true_code == NE && XEXP (cond, 1) == const0_rtx
d6edb99e 4847 && false_rtx == const0_rtx && GET_CODE (true_rtx) == CONST_INT
8079805d 4848 && ((1 == nonzero_bits (XEXP (cond, 0), mode)
d6edb99e 4849 && (i = exact_log2 (INTVAL (true_rtx))) >= 0)
8079805d
RK
4850 || ((num_sign_bit_copies (XEXP (cond, 0), mode)
4851 == GET_MODE_BITSIZE (mode))
d6edb99e 4852 && (i = exact_log2 (-INTVAL (true_rtx))) >= 0)))
8079805d
RK
4853 return
4854 simplify_shift_const (NULL_RTX, ASHIFT, mode,
4855 gen_lowpart_for_combine (mode, XEXP (cond, 0)), i);
230d793d 4856
8079805d
RK
4857 return x;
4858}
4859\f
4860/* Simplify X, a SET expression. Return the new expression. */
230d793d 4861
8079805d
RK
4862static rtx
4863simplify_set (x)
4864 rtx x;
4865{
4866 rtx src = SET_SRC (x);
4867 rtx dest = SET_DEST (x);
4868 enum machine_mode mode
4869 = GET_MODE (src) != VOIDmode ? GET_MODE (src) : GET_MODE (dest);
4870 rtx other_insn;
4871 rtx *cc_use;
4872
4873 /* (set (pc) (return)) gets written as (return). */
4874 if (GET_CODE (dest) == PC && GET_CODE (src) == RETURN)
4875 return src;
230d793d 4876
87e3e0c1
RK
4877 /* Now that we know for sure which bits of SRC we are using, see if we can
4878 simplify the expression for the object knowing that we only need the
4879 low-order bits. */
4880
4881 if (GET_MODE_CLASS (mode) == MODE_INT)
c5c76735 4882 {
e8dc6d50 4883 src = force_to_mode (src, mode, ~(HOST_WIDE_INT) 0, NULL_RTX, 0);
c5c76735
JL
4884 SUBST (SET_SRC (x), src);
4885 }
87e3e0c1 4886
8079805d
RK
4887 /* If we are setting CC0 or if the source is a COMPARE, look for the use of
4888 the comparison result and try to simplify it unless we already have used
4889 undobuf.other_insn. */
4890 if ((GET_CODE (src) == COMPARE
230d793d 4891#ifdef HAVE_cc0
8079805d 4892 || dest == cc0_rtx
230d793d 4893#endif
8079805d
RK
4894 )
4895 && (cc_use = find_single_use (dest, subst_insn, &other_insn)) != 0
4896 && (undobuf.other_insn == 0 || other_insn == undobuf.other_insn)
4897 && GET_RTX_CLASS (GET_CODE (*cc_use)) == '<'
c0d3ac4d 4898 && rtx_equal_p (XEXP (*cc_use, 0), dest))
8079805d
RK
4899 {
4900 enum rtx_code old_code = GET_CODE (*cc_use);
4901 enum rtx_code new_code;
4902 rtx op0, op1;
4903 int other_changed = 0;
4904 enum machine_mode compare_mode = GET_MODE (dest);
4905
4906 if (GET_CODE (src) == COMPARE)
4907 op0 = XEXP (src, 0), op1 = XEXP (src, 1);
4908 else
4909 op0 = src, op1 = const0_rtx;
230d793d 4910
8079805d
RK
4911 /* Simplify our comparison, if possible. */
4912 new_code = simplify_comparison (old_code, &op0, &op1);
230d793d 4913
c141a106 4914#ifdef EXTRA_CC_MODES
8079805d
RK
4915 /* If this machine has CC modes other than CCmode, check to see if we
4916 need to use a different CC mode here. */
4917 compare_mode = SELECT_CC_MODE (new_code, op0, op1);
c141a106 4918#endif /* EXTRA_CC_MODES */
230d793d 4919
c141a106 4920#if !defined (HAVE_cc0) && defined (EXTRA_CC_MODES)
8079805d
RK
4921 /* If the mode changed, we have to change SET_DEST, the mode in the
4922 compare, and the mode in the place SET_DEST is used. If SET_DEST is
4923 a hard register, just build new versions with the proper mode. If it
4924 is a pseudo, we lose unless it is only time we set the pseudo, in
4925 which case we can safely change its mode. */
4926 if (compare_mode != GET_MODE (dest))
4927 {
770ae6cc 4928 unsigned int regno = REGNO (dest);
38a448ca 4929 rtx new_dest = gen_rtx_REG (compare_mode, regno);
8079805d
RK
4930
4931 if (regno < FIRST_PSEUDO_REGISTER
b1f21e0a 4932 || (REG_N_SETS (regno) == 1 && ! REG_USERVAR_P (dest)))
230d793d 4933 {
8079805d
RK
4934 if (regno >= FIRST_PSEUDO_REGISTER)
4935 SUBST (regno_reg_rtx[regno], new_dest);
230d793d 4936
8079805d
RK
4937 SUBST (SET_DEST (x), new_dest);
4938 SUBST (XEXP (*cc_use, 0), new_dest);
4939 other_changed = 1;
230d793d 4940
8079805d 4941 dest = new_dest;
230d793d 4942 }
8079805d 4943 }
230d793d
RS
4944#endif
4945
8079805d
RK
4946 /* If the code changed, we have to build a new comparison in
4947 undobuf.other_insn. */
4948 if (new_code != old_code)
4949 {
4950 unsigned HOST_WIDE_INT mask;
4951
f1c6ba8b
RK
4952 SUBST (*cc_use, gen_rtx_fmt_ee (new_code, GET_MODE (*cc_use),
4953 dest, const0_rtx));
8079805d
RK
4954
4955 /* If the only change we made was to change an EQ into an NE or
4956 vice versa, OP0 has only one bit that might be nonzero, and OP1
4957 is zero, check if changing the user of the condition code will
4958 produce a valid insn. If it won't, we can keep the original code
4959 in that insn by surrounding our operation with an XOR. */
4960
4961 if (((old_code == NE && new_code == EQ)
4962 || (old_code == EQ && new_code == NE))
4963 && ! other_changed && op1 == const0_rtx
4964 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
4965 && exact_log2 (mask = nonzero_bits (op0, GET_MODE (op0))) >= 0)
230d793d 4966 {
8079805d 4967 rtx pat = PATTERN (other_insn), note = 0;
230d793d 4968
8e2f6e35 4969 if ((recog_for_combine (&pat, other_insn, &note) < 0
8079805d
RK
4970 && ! check_asm_operands (pat)))
4971 {
4972 PUT_CODE (*cc_use, old_code);
4973 other_insn = 0;
230d793d 4974
8079805d 4975 op0 = gen_binary (XOR, GET_MODE (op0), op0, GEN_INT (mask));
230d793d 4976 }
230d793d
RS
4977 }
4978
8079805d
RK
4979 other_changed = 1;
4980 }
4981
4982 if (other_changed)
4983 undobuf.other_insn = other_insn;
230d793d
RS
4984
4985#ifdef HAVE_cc0
8079805d
RK
4986 /* If we are now comparing against zero, change our source if
4987 needed. If we do not use cc0, we always have a COMPARE. */
4988 if (op1 == const0_rtx && dest == cc0_rtx)
4989 {
4990 SUBST (SET_SRC (x), op0);
4991 src = op0;
4992 }
4993 else
230d793d
RS
4994#endif
4995
8079805d
RK
4996 /* Otherwise, if we didn't previously have a COMPARE in the
4997 correct mode, we need one. */
4998 if (GET_CODE (src) != COMPARE || GET_MODE (src) != compare_mode)
4999 {
f1c6ba8b 5000 SUBST (SET_SRC (x), gen_rtx_COMPARE (compare_mode, op0, op1));
8079805d 5001 src = SET_SRC (x);
230d793d
RS
5002 }
5003 else
5004 {
8079805d
RK
5005 /* Otherwise, update the COMPARE if needed. */
5006 SUBST (XEXP (src, 0), op0);
5007 SUBST (XEXP (src, 1), op1);
230d793d 5008 }
8079805d
RK
5009 }
5010 else
5011 {
5012 /* Get SET_SRC in a form where we have placed back any
5013 compound expressions. Then do the checks below. */
5014 src = make_compound_operation (src, SET);
5015 SUBST (SET_SRC (x), src);
5016 }
230d793d 5017
8079805d
RK
5018 /* If we have (set x (subreg:m1 (op:m2 ...) 0)) with OP being some operation,
5019 and X being a REG or (subreg (reg)), we may be able to convert this to
663522cb 5020 (set (subreg:m2 x) (op)).
df62f951 5021
8079805d
RK
5022 We can always do this if M1 is narrower than M2 because that means that
5023 we only care about the low bits of the result.
df62f951 5024
8079805d 5025 However, on machines without WORD_REGISTER_OPERATIONS defined, we cannot
9ec36da5 5026 perform a narrower operation than requested since the high-order bits will
8079805d
RK
5027 be undefined. On machine where it is defined, this transformation is safe
5028 as long as M1 and M2 have the same number of words. */
663522cb 5029
8079805d
RK
5030 if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
5031 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (src))) != 'o'
5032 && (((GET_MODE_SIZE (GET_MODE (src)) + (UNITS_PER_WORD - 1))
5033 / UNITS_PER_WORD)
5034 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5035 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
8baf60bb 5036#ifndef WORD_REGISTER_OPERATIONS
8079805d
RK
5037 && (GET_MODE_SIZE (GET_MODE (src))
5038 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
df62f951 5039#endif
02188693 5040#ifdef CLASS_CANNOT_CHANGE_MODE
f507a070
RK
5041 && ! (GET_CODE (dest) == REG && REGNO (dest) < FIRST_PSEUDO_REGISTER
5042 && (TEST_HARD_REG_BIT
02188693 5043 (reg_class_contents[(int) CLASS_CANNOT_CHANGE_MODE],
f507a070 5044 REGNO (dest)))
02188693
RH
5045 && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (src),
5046 GET_MODE (SUBREG_REG (src))))
663522cb 5047#endif
8079805d
RK
5048 && (GET_CODE (dest) == REG
5049 || (GET_CODE (dest) == SUBREG
5050 && GET_CODE (SUBREG_REG (dest)) == REG)))
5051 {
5052 SUBST (SET_DEST (x),
5053 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (src)),
5054 dest));
5055 SUBST (SET_SRC (x), SUBREG_REG (src));
5056
5057 src = SET_SRC (x), dest = SET_DEST (x);
5058 }
df62f951 5059
8baf60bb 5060#ifdef LOAD_EXTEND_OP
8079805d
RK
5061 /* If we have (set FOO (subreg:M (mem:N BAR) 0)) with M wider than N, this
5062 would require a paradoxical subreg. Replace the subreg with a
0f41302f 5063 zero_extend to avoid the reload that would otherwise be required. */
8079805d
RK
5064
5065 if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
5066 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))) != NIL
ddef6bc7 5067 && SUBREG_BYTE (src) == 0
8079805d
RK
5068 && (GET_MODE_SIZE (GET_MODE (src))
5069 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
5070 && GET_CODE (SUBREG_REG (src)) == MEM)
5071 {
5072 SUBST (SET_SRC (x),
f1c6ba8b 5073 gen_rtx (LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))),
ddef6bc7 5074 GET_MODE (src), SUBREG_REG (src)));
8079805d
RK
5075
5076 src = SET_SRC (x);
5077 }
230d793d
RS
5078#endif
5079
8079805d
RK
5080 /* If we don't have a conditional move, SET_SRC is an IF_THEN_ELSE, and we
5081 are comparing an item known to be 0 or -1 against 0, use a logical
5082 operation instead. Check for one of the arms being an IOR of the other
5083 arm with some value. We compute three terms to be IOR'ed together. In
5084 practice, at most two will be nonzero. Then we do the IOR's. */
5085
5086 if (GET_CODE (dest) != PC
5087 && GET_CODE (src) == IF_THEN_ELSE
36b8d792 5088 && GET_MODE_CLASS (GET_MODE (src)) == MODE_INT
8079805d
RK
5089 && (GET_CODE (XEXP (src, 0)) == EQ || GET_CODE (XEXP (src, 0)) == NE)
5090 && XEXP (XEXP (src, 0), 1) == const0_rtx
6dd49058 5091 && GET_MODE (src) == GET_MODE (XEXP (XEXP (src, 0), 0))
ea414472
DE
5092#ifdef HAVE_conditional_move
5093 && ! can_conditionally_move_p (GET_MODE (src))
5094#endif
8079805d
RK
5095 && (num_sign_bit_copies (XEXP (XEXP (src, 0), 0),
5096 GET_MODE (XEXP (XEXP (src, 0), 0)))
5097 == GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (src, 0), 0))))
5098 && ! side_effects_p (src))
5099 {
d6edb99e 5100 rtx true_rtx = (GET_CODE (XEXP (src, 0)) == NE
8079805d 5101 ? XEXP (src, 1) : XEXP (src, 2));
d6edb99e 5102 rtx false_rtx = (GET_CODE (XEXP (src, 0)) == NE
8079805d
RK
5103 ? XEXP (src, 2) : XEXP (src, 1));
5104 rtx term1 = const0_rtx, term2, term3;
5105
d6edb99e
ZW
5106 if (GET_CODE (true_rtx) == IOR
5107 && rtx_equal_p (XEXP (true_rtx, 0), false_rtx))
5108 term1 = false_rtx, true_rtx = XEXP(true_rtx, 1), false_rtx = const0_rtx;
5109 else if (GET_CODE (true_rtx) == IOR
5110 && rtx_equal_p (XEXP (true_rtx, 1), false_rtx))
5111 term1 = false_rtx, true_rtx = XEXP(true_rtx, 0), false_rtx = const0_rtx;
5112 else if (GET_CODE (false_rtx) == IOR
5113 && rtx_equal_p (XEXP (false_rtx, 0), true_rtx))
5114 term1 = true_rtx, false_rtx = XEXP(false_rtx, 1), true_rtx = const0_rtx;
5115 else if (GET_CODE (false_rtx) == IOR
5116 && rtx_equal_p (XEXP (false_rtx, 1), true_rtx))
5117 term1 = true_rtx, false_rtx = XEXP(false_rtx, 0), true_rtx = const0_rtx;
5118
5119 term2 = gen_binary (AND, GET_MODE (src),
5120 XEXP (XEXP (src, 0), 0), true_rtx);
8079805d 5121 term3 = gen_binary (AND, GET_MODE (src),
f1c6ba8b
RK
5122 simplify_gen_unary (NOT, GET_MODE (src),
5123 XEXP (XEXP (src, 0), 0),
5124 GET_MODE (src)),
d6edb99e 5125 false_rtx);
8079805d
RK
5126
5127 SUBST (SET_SRC (x),
5128 gen_binary (IOR, GET_MODE (src),
5129 gen_binary (IOR, GET_MODE (src), term1, term2),
5130 term3));
5131
5132 src = SET_SRC (x);
5133 }
230d793d 5134
246e00f2
RK
5135 /* If either SRC or DEST is a CLOBBER of (const_int 0), make this
5136 whole thing fail. */
5137 if (GET_CODE (src) == CLOBBER && XEXP (src, 0) == const0_rtx)
5138 return src;
5139 else if (GET_CODE (dest) == CLOBBER && XEXP (dest, 0) == const0_rtx)
5140 return dest;
5141 else
5142 /* Convert this into a field assignment operation, if possible. */
5143 return make_field_assignment (x);
8079805d
RK
5144}
5145\f
5146/* Simplify, X, and AND, IOR, or XOR operation, and return the simplified
5147 result. LAST is nonzero if this is the last retry. */
5148
5149static rtx
5150simplify_logical (x, last)
5151 rtx x;
5152 int last;
5153{
5154 enum machine_mode mode = GET_MODE (x);
5155 rtx op0 = XEXP (x, 0);
5156 rtx op1 = XEXP (x, 1);
9a915772 5157 rtx reversed;
8079805d
RK
5158
5159 switch (GET_CODE (x))
5160 {
230d793d 5161 case AND:
663522cb 5162 /* Convert (A ^ B) & A to A & (~B) since the latter is often a single
8079805d
RK
5163 insn (and may simplify more). */
5164 if (GET_CODE (op0) == XOR
5165 && rtx_equal_p (XEXP (op0, 0), op1)
5166 && ! side_effects_p (op1))
0c1c8ea6 5167 x = gen_binary (AND, mode,
f1c6ba8b
RK
5168 simplify_gen_unary (NOT, mode, XEXP (op0, 1), mode),
5169 op1);
8079805d
RK
5170
5171 if (GET_CODE (op0) == XOR
5172 && rtx_equal_p (XEXP (op0, 1), op1)
5173 && ! side_effects_p (op1))
0c1c8ea6 5174 x = gen_binary (AND, mode,
f1c6ba8b
RK
5175 simplify_gen_unary (NOT, mode, XEXP (op0, 0), mode),
5176 op1);
8079805d 5177
663522cb 5178 /* Similarly for (~(A ^ B)) & A. */
8079805d
RK
5179 if (GET_CODE (op0) == NOT
5180 && GET_CODE (XEXP (op0, 0)) == XOR
5181 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), op1)
5182 && ! side_effects_p (op1))
5183 x = gen_binary (AND, mode, XEXP (XEXP (op0, 0), 1), op1);
5184
5185 if (GET_CODE (op0) == NOT
5186 && GET_CODE (XEXP (op0, 0)) == XOR
5187 && rtx_equal_p (XEXP (XEXP (op0, 0), 1), op1)
5188 && ! side_effects_p (op1))
5189 x = gen_binary (AND, mode, XEXP (XEXP (op0, 0), 0), op1);
5190
2e8f9abf
DM
5191 /* We can call simplify_and_const_int only if we don't lose
5192 any (sign) bits when converting INTVAL (op1) to
5193 "unsigned HOST_WIDE_INT". */
5194 if (GET_CODE (op1) == CONST_INT
5195 && (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5196 || INTVAL (op1) > 0))
230d793d 5197 {
8079805d 5198 x = simplify_and_const_int (x, mode, op0, INTVAL (op1));
230d793d
RS
5199
5200 /* If we have (ior (and (X C1) C2)) and the next restart would be
5201 the last, simplify this by making C1 as small as possible
0f41302f 5202 and then exit. */
8079805d
RK
5203 if (last
5204 && GET_CODE (x) == IOR && GET_CODE (op0) == AND
5205 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5206 && GET_CODE (op1) == CONST_INT)
5207 return gen_binary (IOR, mode,
5208 gen_binary (AND, mode, XEXP (op0, 0),
5209 GEN_INT (INTVAL (XEXP (op0, 1))
663522cb 5210 & ~INTVAL (op1))), op1);
230d793d
RS
5211
5212 if (GET_CODE (x) != AND)
8079805d 5213 return x;
0e32506c 5214
663522cb 5215 if (GET_RTX_CLASS (GET_CODE (x)) == 'c'
0e32506c
RK
5216 || GET_RTX_CLASS (GET_CODE (x)) == '2')
5217 op0 = XEXP (x, 0), op1 = XEXP (x, 1);
230d793d
RS
5218 }
5219
5220 /* Convert (A | B) & A to A. */
8079805d
RK
5221 if (GET_CODE (op0) == IOR
5222 && (rtx_equal_p (XEXP (op0, 0), op1)
5223 || rtx_equal_p (XEXP (op0, 1), op1))
5224 && ! side_effects_p (XEXP (op0, 0))
5225 && ! side_effects_p (XEXP (op0, 1)))
5226 return op1;
230d793d 5227
d0ab8cd3 5228 /* In the following group of tests (and those in case IOR below),
230d793d
RS
5229 we start with some combination of logical operations and apply
5230 the distributive law followed by the inverse distributive law.
5231 Most of the time, this results in no change. However, if some of
5232 the operands are the same or inverses of each other, simplifications
5233 will result.
5234
5235 For example, (and (ior A B) (not B)) can occur as the result of
5236 expanding a bit field assignment. When we apply the distributive
5237 law to this, we get (ior (and (A (not B))) (and (B (not B)))),
663522cb 5238 which then simplifies to (and (A (not B))).
230d793d 5239
8079805d 5240 If we have (and (ior A B) C), apply the distributive law and then
230d793d
RS
5241 the inverse distributive law to see if things simplify. */
5242
8079805d 5243 if (GET_CODE (op0) == IOR || GET_CODE (op0) == XOR)
230d793d
RS
5244 {
5245 x = apply_distributive_law
8079805d
RK
5246 (gen_binary (GET_CODE (op0), mode,
5247 gen_binary (AND, mode, XEXP (op0, 0), op1),
3749f4ca
BS
5248 gen_binary (AND, mode, XEXP (op0, 1),
5249 copy_rtx (op1))));
230d793d 5250 if (GET_CODE (x) != AND)
8079805d 5251 return x;
230d793d
RS
5252 }
5253
8079805d
RK
5254 if (GET_CODE (op1) == IOR || GET_CODE (op1) == XOR)
5255 return apply_distributive_law
5256 (gen_binary (GET_CODE (op1), mode,
5257 gen_binary (AND, mode, XEXP (op1, 0), op0),
3749f4ca
BS
5258 gen_binary (AND, mode, XEXP (op1, 1),
5259 copy_rtx (op0))));
230d793d
RS
5260
5261 /* Similarly, taking advantage of the fact that
5262 (and (not A) (xor B C)) == (xor (ior A B) (ior A C)) */
5263
8079805d
RK
5264 if (GET_CODE (op0) == NOT && GET_CODE (op1) == XOR)
5265 return apply_distributive_law
5266 (gen_binary (XOR, mode,
5267 gen_binary (IOR, mode, XEXP (op0, 0), XEXP (op1, 0)),
3749f4ca
BS
5268 gen_binary (IOR, mode, copy_rtx (XEXP (op0, 0)),
5269 XEXP (op1, 1))));
663522cb 5270
8079805d
RK
5271 else if (GET_CODE (op1) == NOT && GET_CODE (op0) == XOR)
5272 return apply_distributive_law
5273 (gen_binary (XOR, mode,
5274 gen_binary (IOR, mode, XEXP (op1, 0), XEXP (op0, 0)),
3749f4ca 5275 gen_binary (IOR, mode, copy_rtx (XEXP (op1, 0)), XEXP (op0, 1))));
230d793d
RS
5276 break;
5277
5278 case IOR:
951553af 5279 /* (ior A C) is C if all bits of A that might be nonzero are on in C. */
8079805d 5280 if (GET_CODE (op1) == CONST_INT
ac49a949 5281 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
663522cb 5282 && (nonzero_bits (op0, mode) & ~INTVAL (op1)) == 0)
8079805d 5283 return op1;
d0ab8cd3 5284
230d793d 5285 /* Convert (A & B) | A to A. */
8079805d
RK
5286 if (GET_CODE (op0) == AND
5287 && (rtx_equal_p (XEXP (op0, 0), op1)
5288 || rtx_equal_p (XEXP (op0, 1), op1))
5289 && ! side_effects_p (XEXP (op0, 0))
5290 && ! side_effects_p (XEXP (op0, 1)))
5291 return op1;
230d793d
RS
5292
5293 /* If we have (ior (and A B) C), apply the distributive law and then
5294 the inverse distributive law to see if things simplify. */
5295
8079805d 5296 if (GET_CODE (op0) == AND)
230d793d
RS
5297 {
5298 x = apply_distributive_law
5299 (gen_binary (AND, mode,
8079805d 5300 gen_binary (IOR, mode, XEXP (op0, 0), op1),
3749f4ca
BS
5301 gen_binary (IOR, mode, XEXP (op0, 1),
5302 copy_rtx (op1))));
230d793d
RS
5303
5304 if (GET_CODE (x) != IOR)
8079805d 5305 return x;
230d793d
RS
5306 }
5307
8079805d 5308 if (GET_CODE (op1) == AND)
230d793d
RS
5309 {
5310 x = apply_distributive_law
5311 (gen_binary (AND, mode,
8079805d 5312 gen_binary (IOR, mode, XEXP (op1, 0), op0),
3749f4ca
BS
5313 gen_binary (IOR, mode, XEXP (op1, 1),
5314 copy_rtx (op0))));
230d793d
RS
5315
5316 if (GET_CODE (x) != IOR)
8079805d 5317 return x;
230d793d
RS
5318 }
5319
5320 /* Convert (ior (ashift A CX) (lshiftrt A CY)) where CX+CY equals the
5321 mode size to (rotate A CX). */
5322
8079805d
RK
5323 if (((GET_CODE (op0) == ASHIFT && GET_CODE (op1) == LSHIFTRT)
5324 || (GET_CODE (op1) == ASHIFT && GET_CODE (op0) == LSHIFTRT))
5325 && rtx_equal_p (XEXP (op0, 0), XEXP (op1, 0))
5326 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5327 && GET_CODE (XEXP (op1, 1)) == CONST_INT
5328 && (INTVAL (XEXP (op0, 1)) + INTVAL (XEXP (op1, 1))
230d793d 5329 == GET_MODE_BITSIZE (mode)))
38a448ca
RH
5330 return gen_rtx_ROTATE (mode, XEXP (op0, 0),
5331 (GET_CODE (op0) == ASHIFT
5332 ? XEXP (op0, 1) : XEXP (op1, 1)));
230d793d 5333
71923da7
RK
5334 /* If OP0 is (ashiftrt (plus ...) C), it might actually be
5335 a (sign_extend (plus ...)). If so, OP1 is a CONST_INT, and the PLUS
5336 does not affect any of the bits in OP1, it can really be done
5337 as a PLUS and we can associate. We do this by seeing if OP1
5338 can be safely shifted left C bits. */
5339 if (GET_CODE (op1) == CONST_INT && GET_CODE (op0) == ASHIFTRT
5340 && GET_CODE (XEXP (op0, 0)) == PLUS
5341 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
5342 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5343 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT)
5344 {
5345 int count = INTVAL (XEXP (op0, 1));
5346 HOST_WIDE_INT mask = INTVAL (op1) << count;
5347
5348 if (mask >> count == INTVAL (op1)
5349 && (mask & nonzero_bits (XEXP (op0, 0), mode)) == 0)
5350 {
5351 SUBST (XEXP (XEXP (op0, 0), 1),
5352 GEN_INT (INTVAL (XEXP (XEXP (op0, 0), 1)) | mask));
5353 return op0;
5354 }
5355 }
230d793d
RS
5356 break;
5357
5358 case XOR:
79e8185c
JH
5359 /* If we are XORing two things that have no bits in common,
5360 convert them into an IOR. This helps to detect rotation encoded
5361 using those methods and possibly other simplifications. */
5362
5363 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5364 && (nonzero_bits (op0, mode)
5365 & nonzero_bits (op1, mode)) == 0)
5366 return (gen_binary (IOR, mode, op0, op1));
5367
230d793d
RS
5368 /* Convert (XOR (NOT x) (NOT y)) to (XOR x y).
5369 Also convert (XOR (NOT x) y) to (NOT (XOR x y)), similarly for
5370 (NOT y). */
5371 {
5372 int num_negated = 0;
230d793d 5373
8079805d
RK
5374 if (GET_CODE (op0) == NOT)
5375 num_negated++, op0 = XEXP (op0, 0);
5376 if (GET_CODE (op1) == NOT)
5377 num_negated++, op1 = XEXP (op1, 0);
230d793d
RS
5378
5379 if (num_negated == 2)
5380 {
8079805d
RK
5381 SUBST (XEXP (x, 0), op0);
5382 SUBST (XEXP (x, 1), op1);
230d793d
RS
5383 }
5384 else if (num_negated == 1)
f1c6ba8b
RK
5385 return
5386 simplify_gen_unary (NOT, mode, gen_binary (XOR, mode, op0, op1),
5387 mode);
230d793d
RS
5388 }
5389
5390 /* Convert (xor (and A B) B) to (and (not A) B). The latter may
5391 correspond to a machine insn or result in further simplifications
5392 if B is a constant. */
5393
8079805d
RK
5394 if (GET_CODE (op0) == AND
5395 && rtx_equal_p (XEXP (op0, 1), op1)
5396 && ! side_effects_p (op1))
0c1c8ea6 5397 return gen_binary (AND, mode,
f1c6ba8b 5398 simplify_gen_unary (NOT, mode, XEXP (op0, 0), mode),
8079805d 5399 op1);
230d793d 5400
8079805d
RK
5401 else if (GET_CODE (op0) == AND
5402 && rtx_equal_p (XEXP (op0, 0), op1)
5403 && ! side_effects_p (op1))
0c1c8ea6 5404 return gen_binary (AND, mode,
f1c6ba8b 5405 simplify_gen_unary (NOT, mode, XEXP (op0, 1), mode),
8079805d 5406 op1);
230d793d 5407
230d793d 5408 /* (xor (comparison foo bar) (const_int 1)) can become the reversed
0802d516
RK
5409 comparison if STORE_FLAG_VALUE is 1. */
5410 if (STORE_FLAG_VALUE == 1
5411 && op1 == const1_rtx
8079805d 5412 && GET_RTX_CLASS (GET_CODE (op0)) == '<'
9a915772
JH
5413 && (reversed = reversed_comparison (op0, mode, XEXP (op0, 0),
5414 XEXP (op0, 1))))
5415 return reversed;
500c518b
RK
5416
5417 /* (lshiftrt foo C) where C is the number of bits in FOO minus 1
5418 is (lt foo (const_int 0)), so we can perform the above
0802d516 5419 simplification if STORE_FLAG_VALUE is 1. */
500c518b 5420
0802d516
RK
5421 if (STORE_FLAG_VALUE == 1
5422 && op1 == const1_rtx
8079805d
RK
5423 && GET_CODE (op0) == LSHIFTRT
5424 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5425 && INTVAL (XEXP (op0, 1)) == GET_MODE_BITSIZE (mode) - 1)
f1c6ba8b 5426 return gen_rtx_GE (mode, XEXP (op0, 0), const0_rtx);
230d793d
RS
5427
5428 /* (xor (comparison foo bar) (const_int sign-bit))
5429 when STORE_FLAG_VALUE is the sign bit. */
5f4f0e22 5430 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
0802d516 5431 && ((STORE_FLAG_VALUE & GET_MODE_MASK (mode))
e51712db 5432 == (unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
8079805d
RK
5433 && op1 == const_true_rtx
5434 && GET_RTX_CLASS (GET_CODE (op0)) == '<'
9a915772
JH
5435 && (reversed = reversed_comparison (op0, mode, XEXP (op0, 0),
5436 XEXP (op0, 1))))
5437 return reversed;
0918eca0 5438
230d793d 5439 break;
e9a25f70
JL
5440
5441 default:
5442 abort ();
230d793d
RS
5443 }
5444
5445 return x;
5446}
5447\f
5448/* We consider ZERO_EXTRACT, SIGN_EXTRACT, and SIGN_EXTEND as "compound
5449 operations" because they can be replaced with two more basic operations.
5450 ZERO_EXTEND is also considered "compound" because it can be replaced with
5451 an AND operation, which is simpler, though only one operation.
5452
5453 The function expand_compound_operation is called with an rtx expression
663522cb 5454 and will convert it to the appropriate shifts and AND operations,
230d793d
RS
5455 simplifying at each stage.
5456
5457 The function make_compound_operation is called to convert an expression
5458 consisting of shifts and ANDs into the equivalent compound expression.
5459 It is the inverse of this function, loosely speaking. */
5460
5461static rtx
5462expand_compound_operation (x)
5463 rtx x;
5464{
770ae6cc 5465 unsigned HOST_WIDE_INT pos = 0, len;
230d793d 5466 int unsignedp = 0;
770ae6cc 5467 unsigned int modewidth;
230d793d
RS
5468 rtx tem;
5469
5470 switch (GET_CODE (x))
5471 {
5472 case ZERO_EXTEND:
5473 unsignedp = 1;
5474 case SIGN_EXTEND:
75473182
RS
5475 /* We can't necessarily use a const_int for a multiword mode;
5476 it depends on implicitly extending the value.
5477 Since we don't know the right way to extend it,
5478 we can't tell whether the implicit way is right.
5479
5480 Even for a mode that is no wider than a const_int,
5481 we can't win, because we need to sign extend one of its bits through
5482 the rest of it, and we don't know which bit. */
230d793d 5483 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
75473182 5484 return x;
230d793d 5485
8079805d
RK
5486 /* Return if (subreg:MODE FROM 0) is not a safe replacement for
5487 (zero_extend:MODE FROM) or (sign_extend:MODE FROM). It is for any MEM
5488 because (SUBREG (MEM...)) is guaranteed to cause the MEM to be
5489 reloaded. If not for that, MEM's would very rarely be safe.
5490
5491 Reject MODEs bigger than a word, because we might not be able
5492 to reference a two-register group starting with an arbitrary register
5493 (and currently gen_lowpart might crash for a SUBREG). */
663522cb 5494
8079805d 5495 if (GET_MODE_SIZE (GET_MODE (XEXP (x, 0))) > UNITS_PER_WORD)
230d793d
RS
5496 return x;
5497
5498 len = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)));
5499 /* If the inner object has VOIDmode (the only way this can happen
5500 is if it is a ASM_OPERANDS), we can't do anything since we don't
5501 know how much masking to do. */
5502 if (len == 0)
5503 return x;
5504
5505 break;
5506
5507 case ZERO_EXTRACT:
5508 unsignedp = 1;
5509 case SIGN_EXTRACT:
5510 /* If the operand is a CLOBBER, just return it. */
5511 if (GET_CODE (XEXP (x, 0)) == CLOBBER)
5512 return XEXP (x, 0);
5513
5514 if (GET_CODE (XEXP (x, 1)) != CONST_INT
5515 || GET_CODE (XEXP (x, 2)) != CONST_INT
5516 || GET_MODE (XEXP (x, 0)) == VOIDmode)
5517 return x;
5518
5519 len = INTVAL (XEXP (x, 1));
5520 pos = INTVAL (XEXP (x, 2));
5521
5522 /* If this goes outside the object being extracted, replace the object
5523 with a (use (mem ...)) construct that only combine understands
5524 and is used only for this purpose. */
5525 if (len + pos > GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
38a448ca 5526 SUBST (XEXP (x, 0), gen_rtx_USE (GET_MODE (x), XEXP (x, 0)));
230d793d 5527
f76b9db2
ILT
5528 if (BITS_BIG_ENDIAN)
5529 pos = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - len - pos;
5530
230d793d
RS
5531 break;
5532
5533 default:
5534 return x;
5535 }
0f808b6f
JH
5536 /* Convert sign extension to zero extension, if we know that the high
5537 bit is not set, as this is easier to optimize. It will be converted
5538 back to cheaper alternative in make_extraction. */
5539 if (GET_CODE (x) == SIGN_EXTEND
5540 && (GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
5541 && ((nonzero_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
663522cb 5542 & ~(((unsigned HOST_WIDE_INT)
0f808b6f
JH
5543 GET_MODE_MASK (GET_MODE (XEXP (x, 0))))
5544 >> 1))
5545 == 0)))
5546 {
5547 rtx temp = gen_rtx_ZERO_EXTEND (GET_MODE (x), XEXP (x, 0));
5548 return expand_compound_operation (temp);
5549 }
230d793d 5550
0f13a422
ILT
5551 /* We can optimize some special cases of ZERO_EXTEND. */
5552 if (GET_CODE (x) == ZERO_EXTEND)
5553 {
5554 /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI if we
5555 know that the last value didn't have any inappropriate bits
5556 set. */
5557 if (GET_CODE (XEXP (x, 0)) == TRUNCATE
5558 && GET_MODE (XEXP (XEXP (x, 0), 0)) == GET_MODE (x)
5559 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
5560 && (nonzero_bits (XEXP (XEXP (x, 0), 0), GET_MODE (x))
663522cb 5561 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
0f13a422
ILT
5562 return XEXP (XEXP (x, 0), 0);
5563
5564 /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)). */
5565 if (GET_CODE (XEXP (x, 0)) == SUBREG
5566 && GET_MODE (SUBREG_REG (XEXP (x, 0))) == GET_MODE (x)
5567 && subreg_lowpart_p (XEXP (x, 0))
5568 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
5569 && (nonzero_bits (SUBREG_REG (XEXP (x, 0)), GET_MODE (x))
663522cb 5570 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
0f13a422
ILT
5571 return SUBREG_REG (XEXP (x, 0));
5572
5573 /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI when foo
5574 is a comparison and STORE_FLAG_VALUE permits. This is like
5575 the first case, but it works even when GET_MODE (x) is larger
5576 than HOST_WIDE_INT. */
5577 if (GET_CODE (XEXP (x, 0)) == TRUNCATE
5578 && GET_MODE (XEXP (XEXP (x, 0), 0)) == GET_MODE (x)
5579 && GET_RTX_CLASS (GET_CODE (XEXP (XEXP (x, 0), 0))) == '<'
5580 && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
5581 <= HOST_BITS_PER_WIDE_INT)
5582 && ((HOST_WIDE_INT) STORE_FLAG_VALUE
663522cb 5583 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
0f13a422
ILT
5584 return XEXP (XEXP (x, 0), 0);
5585
5586 /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)). */
5587 if (GET_CODE (XEXP (x, 0)) == SUBREG
5588 && GET_MODE (SUBREG_REG (XEXP (x, 0))) == GET_MODE (x)
5589 && subreg_lowpart_p (XEXP (x, 0))
5590 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0)))) == '<'
5591 && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
5592 <= HOST_BITS_PER_WIDE_INT)
5593 && ((HOST_WIDE_INT) STORE_FLAG_VALUE
663522cb 5594 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
0f13a422
ILT
5595 return SUBREG_REG (XEXP (x, 0));
5596
0f13a422
ILT
5597 }
5598
230d793d
RS
5599 /* If we reach here, we want to return a pair of shifts. The inner
5600 shift is a left shift of BITSIZE - POS - LEN bits. The outer
5601 shift is a right shift of BITSIZE - LEN bits. It is arithmetic or
5602 logical depending on the value of UNSIGNEDP.
5603
5604 If this was a ZERO_EXTEND or ZERO_EXTRACT, this pair of shifts will be
5605 converted into an AND of a shift.
5606
5607 We must check for the case where the left shift would have a negative
5608 count. This can happen in a case like (x >> 31) & 255 on machines
5609 that can't shift by a constant. On those machines, we would first
663522cb 5610 combine the shift with the AND to produce a variable-position
230d793d
RS
5611 extraction. Then the constant of 31 would be substituted in to produce
5612 a such a position. */
5613
5614 modewidth = GET_MODE_BITSIZE (GET_MODE (x));
770ae6cc 5615 if (modewidth + len >= pos)
5f4f0e22 5616 tem = simplify_shift_const (NULL_RTX, unsignedp ? LSHIFTRT : ASHIFTRT,
230d793d 5617 GET_MODE (x),
5f4f0e22
CH
5618 simplify_shift_const (NULL_RTX, ASHIFT,
5619 GET_MODE (x),
230d793d
RS
5620 XEXP (x, 0),
5621 modewidth - pos - len),
5622 modewidth - len);
5623
5f4f0e22
CH
5624 else if (unsignedp && len < HOST_BITS_PER_WIDE_INT)
5625 tem = simplify_and_const_int (NULL_RTX, GET_MODE (x),
5626 simplify_shift_const (NULL_RTX, LSHIFTRT,
230d793d
RS
5627 GET_MODE (x),
5628 XEXP (x, 0), pos),
5f4f0e22 5629 ((HOST_WIDE_INT) 1 << len) - 1);
230d793d
RS
5630 else
5631 /* Any other cases we can't handle. */
5632 return x;
230d793d
RS
5633
5634 /* If we couldn't do this for some reason, return the original
5635 expression. */
5636 if (GET_CODE (tem) == CLOBBER)
5637 return x;
5638
5639 return tem;
5640}
5641\f
5642/* X is a SET which contains an assignment of one object into
5643 a part of another (such as a bit-field assignment, STRICT_LOW_PART,
5644 or certain SUBREGS). If possible, convert it into a series of
5645 logical operations.
5646
5647 We half-heartedly support variable positions, but do not at all
5648 support variable lengths. */
5649
5650static rtx
5651expand_field_assignment (x)
5652 rtx x;
5653{
5654 rtx inner;
0f41302f 5655 rtx pos; /* Always counts from low bit. */
230d793d
RS
5656 int len;
5657 rtx mask;
5658 enum machine_mode compute_mode;
5659
5660 /* Loop until we find something we can't simplify. */
5661 while (1)
5662 {
5663 if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
5664 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG)
5665 {
ddef6bc7
JJ
5666 int byte_offset = SUBREG_BYTE (XEXP (SET_DEST (x), 0));
5667
230d793d
RS
5668 inner = SUBREG_REG (XEXP (SET_DEST (x), 0));
5669 len = GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)));
ddef6bc7 5670 pos = GEN_INT (BITS_PER_WORD * (byte_offset / UNITS_PER_WORD));
230d793d
RS
5671 }
5672 else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
5673 && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT)
5674 {
5675 inner = XEXP (SET_DEST (x), 0);
5676 len = INTVAL (XEXP (SET_DEST (x), 1));
5677 pos = XEXP (SET_DEST (x), 2);
5678
5679 /* If the position is constant and spans the width of INNER,
5680 surround INNER with a USE to indicate this. */
5681 if (GET_CODE (pos) == CONST_INT
5682 && INTVAL (pos) + len > GET_MODE_BITSIZE (GET_MODE (inner)))
38a448ca 5683 inner = gen_rtx_USE (GET_MODE (SET_DEST (x)), inner);
230d793d 5684
f76b9db2
ILT
5685 if (BITS_BIG_ENDIAN)
5686 {
5687 if (GET_CODE (pos) == CONST_INT)
5688 pos = GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner)) - len
5689 - INTVAL (pos));
5690 else if (GET_CODE (pos) == MINUS
5691 && GET_CODE (XEXP (pos, 1)) == CONST_INT
5692 && (INTVAL (XEXP (pos, 1))
5693 == GET_MODE_BITSIZE (GET_MODE (inner)) - len))
5694 /* If position is ADJUST - X, new position is X. */
5695 pos = XEXP (pos, 0);
5696 else
5697 pos = gen_binary (MINUS, GET_MODE (pos),
5698 GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner))
5699 - len),
5700 pos);
5701 }
230d793d
RS
5702 }
5703
5704 /* A SUBREG between two modes that occupy the same numbers of words
5705 can be done by moving the SUBREG to the source. */
5706 else if (GET_CODE (SET_DEST (x)) == SUBREG
b1e9c8a9
AO
5707 /* We need SUBREGs to compute nonzero_bits properly. */
5708 && nonzero_sign_valid
230d793d
RS
5709 && (((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
5710 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
5711 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
5712 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
5713 {
38a448ca 5714 x = gen_rtx_SET (VOIDmode, SUBREG_REG (SET_DEST (x)),
c5c76735
JL
5715 gen_lowpart_for_combine
5716 (GET_MODE (SUBREG_REG (SET_DEST (x))),
5717 SET_SRC (x)));
230d793d
RS
5718 continue;
5719 }
5720 else
5721 break;
5722
5723 while (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
5724 inner = SUBREG_REG (inner);
5725
5726 compute_mode = GET_MODE (inner);
5727
861556b4
RH
5728 /* Don't attempt bitwise arithmetic on non-integral modes. */
5729 if (! INTEGRAL_MODE_P (compute_mode))
5730 {
5731 enum machine_mode imode;
5732
5733 /* Something is probably seriously wrong if this matches. */
5734 if (! FLOAT_MODE_P (compute_mode))
5735 break;
5736
5737 /* Try to find an integral mode to pun with. */
5738 imode = mode_for_size (GET_MODE_BITSIZE (compute_mode), MODE_INT, 0);
5739 if (imode == BLKmode)
5740 break;
5741
5742 compute_mode = imode;
5743 inner = gen_lowpart_for_combine (imode, inner);
5744 }
5745
230d793d 5746 /* Compute a mask of LEN bits, if we can do this on the host machine. */
5f4f0e22
CH
5747 if (len < HOST_BITS_PER_WIDE_INT)
5748 mask = GEN_INT (((HOST_WIDE_INT) 1 << len) - 1);
230d793d
RS
5749 else
5750 break;
5751
5752 /* Now compute the equivalent expression. Make a copy of INNER
5753 for the SET_DEST in case it is a MEM into which we will substitute;
5754 we don't want shared RTL in that case. */
c5c76735
JL
5755 x = gen_rtx_SET
5756 (VOIDmode, copy_rtx (inner),
5757 gen_binary (IOR, compute_mode,
5758 gen_binary (AND, compute_mode,
f1c6ba8b
RK
5759 simplify_gen_unary (NOT, compute_mode,
5760 gen_binary (ASHIFT,
5761 compute_mode,
5762 mask, pos),
5763 compute_mode),
c5c76735
JL
5764 inner),
5765 gen_binary (ASHIFT, compute_mode,
5766 gen_binary (AND, compute_mode,
5767 gen_lowpart_for_combine
5768 (compute_mode, SET_SRC (x)),
5769 mask),
5770 pos)));
230d793d
RS
5771 }
5772
5773 return x;
5774}
5775\f
8999a12e
RK
5776/* Return an RTX for a reference to LEN bits of INNER. If POS_RTX is nonzero,
5777 it is an RTX that represents a variable starting position; otherwise,
5778 POS is the (constant) starting bit position (counted from the LSB).
230d793d
RS
5779
5780 INNER may be a USE. This will occur when we started with a bitfield
5781 that went outside the boundary of the object in memory, which is
5782 allowed on most machines. To isolate this case, we produce a USE
5783 whose mode is wide enough and surround the MEM with it. The only
5784 code that understands the USE is this routine. If it is not removed,
5785 it will cause the resulting insn not to match.
5786
663522cb 5787 UNSIGNEDP is non-zero for an unsigned reference and zero for a
230d793d
RS
5788 signed reference.
5789
5790 IN_DEST is non-zero if this is a reference in the destination of a
5791 SET. This is used when a ZERO_ or SIGN_EXTRACT isn't needed. If non-zero,
5792 a STRICT_LOW_PART will be used, if zero, ZERO_EXTEND or SIGN_EXTEND will
5793 be used.
5794
5795 IN_COMPARE is non-zero if we are in a COMPARE. This means that a
5796 ZERO_EXTRACT should be built even for bits starting at bit 0.
5797
76184def
DE
5798 MODE is the desired mode of the result (if IN_DEST == 0).
5799
5800 The result is an RTX for the extraction or NULL_RTX if the target
5801 can't handle it. */
230d793d
RS
5802
5803static rtx
5804make_extraction (mode, inner, pos, pos_rtx, len,
5805 unsignedp, in_dest, in_compare)
5806 enum machine_mode mode;
5807 rtx inner;
770ae6cc 5808 HOST_WIDE_INT pos;
230d793d 5809 rtx pos_rtx;
770ae6cc 5810 unsigned HOST_WIDE_INT len;
230d793d
RS
5811 int unsignedp;
5812 int in_dest, in_compare;
5813{
94b4b17a
RS
5814 /* This mode describes the size of the storage area
5815 to fetch the overall value from. Within that, we
5816 ignore the POS lowest bits, etc. */
230d793d
RS
5817 enum machine_mode is_mode = GET_MODE (inner);
5818 enum machine_mode inner_mode;
d7cd794f
RK
5819 enum machine_mode wanted_inner_mode = byte_mode;
5820 enum machine_mode wanted_inner_reg_mode = word_mode;
230d793d
RS
5821 enum machine_mode pos_mode = word_mode;
5822 enum machine_mode extraction_mode = word_mode;
5823 enum machine_mode tmode = mode_for_size (len, MODE_INT, 1);
5824 int spans_byte = 0;
5825 rtx new = 0;
8999a12e 5826 rtx orig_pos_rtx = pos_rtx;
770ae6cc 5827 HOST_WIDE_INT orig_pos;
230d793d
RS
5828
5829 /* Get some information about INNER and get the innermost object. */
5830 if (GET_CODE (inner) == USE)
94b4b17a 5831 /* (use:SI (mem:QI foo)) stands for (mem:SI foo). */
230d793d
RS
5832 /* We don't need to adjust the position because we set up the USE
5833 to pretend that it was a full-word object. */
5834 spans_byte = 1, inner = XEXP (inner, 0);
5835 else if (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
94b4b17a
RS
5836 {
5837 /* If going from (subreg:SI (mem:QI ...)) to (mem:QI ...),
5838 consider just the QI as the memory to extract from.
5839 The subreg adds or removes high bits; its mode is
5840 irrelevant to the meaning of this extraction,
5841 since POS and LEN count from the lsb. */
5842 if (GET_CODE (SUBREG_REG (inner)) == MEM)
5843 is_mode = GET_MODE (SUBREG_REG (inner));
5844 inner = SUBREG_REG (inner);
5845 }
230d793d
RS
5846
5847 inner_mode = GET_MODE (inner);
5848
5849 if (pos_rtx && GET_CODE (pos_rtx) == CONST_INT)
8999a12e 5850 pos = INTVAL (pos_rtx), pos_rtx = 0;
230d793d
RS
5851
5852 /* See if this can be done without an extraction. We never can if the
5853 width of the field is not the same as that of some integer mode. For
5854 registers, we can only avoid the extraction if the position is at the
5855 low-order bit and this is either not in the destination or we have the
5856 appropriate STRICT_LOW_PART operation available.
5857
5858 For MEM, we can avoid an extract if the field starts on an appropriate
5859 boundary and we can change the mode of the memory reference. However,
5860 we cannot directly access the MEM if we have a USE and the underlying
5861 MEM is not TMODE. This combination means that MEM was being used in a
5862 context where bits outside its mode were being referenced; that is only
5863 valid in bit-field insns. */
5864
5865 if (tmode != BLKmode
5866 && ! (spans_byte && inner_mode != tmode)
4d9cfc7b
RK
5867 && ((pos_rtx == 0 && (pos % BITS_PER_WORD) == 0
5868 && GET_CODE (inner) != MEM
230d793d 5869 && (! in_dest
df62f951
RK
5870 || (GET_CODE (inner) == REG
5871 && (movstrict_optab->handlers[(int) tmode].insn_code
5872 != CODE_FOR_nothing))))
8999a12e 5873 || (GET_CODE (inner) == MEM && pos_rtx == 0
dfbe1b2f
RK
5874 && (pos
5875 % (STRICT_ALIGNMENT ? GET_MODE_ALIGNMENT (tmode)
5876 : BITS_PER_UNIT)) == 0
230d793d
RS
5877 /* We can't do this if we are widening INNER_MODE (it
5878 may not be aligned, for one thing). */
5879 && GET_MODE_BITSIZE (inner_mode) >= GET_MODE_BITSIZE (tmode)
5880 && (inner_mode == tmode
5881 || (! mode_dependent_address_p (XEXP (inner, 0))
5882 && ! MEM_VOLATILE_P (inner))))))
5883 {
230d793d
RS
5884 /* If INNER is a MEM, make a new MEM that encompasses just the desired
5885 field. If the original and current mode are the same, we need not
663522cb 5886 adjust the offset. Otherwise, we do if bytes big endian.
230d793d 5887
4d9cfc7b
RK
5888 If INNER is not a MEM, get a piece consisting of just the field
5889 of interest (in this case POS % BITS_PER_WORD must be 0). */
230d793d
RS
5890
5891 if (GET_CODE (inner) == MEM)
5892 {
f1ec5147
RK
5893 HOST_WIDE_INT offset;
5894
94b4b17a
RS
5895 /* POS counts from lsb, but make OFFSET count in memory order. */
5896 if (BYTES_BIG_ENDIAN)
5897 offset = (GET_MODE_BITSIZE (is_mode) - len - pos) / BITS_PER_UNIT;
5898 else
5899 offset = pos / BITS_PER_UNIT;
230d793d 5900
f1ec5147 5901 new = adjust_address_nv (inner, tmode, offset);
230d793d 5902 }
df62f951 5903 else if (GET_CODE (inner) == REG)
c0d3ac4d
RK
5904 {
5905 /* We can't call gen_lowpart_for_combine here since we always want
5906 a SUBREG and it would sometimes return a new hard register. */
5907 if (tmode != inner_mode)
ddef6bc7 5908 {
f1ec5147 5909 HOST_WIDE_INT final_word = pos / BITS_PER_WORD;
ddef6bc7
JJ
5910
5911 if (WORDS_BIG_ENDIAN
5912 && GET_MODE_SIZE (inner_mode) > UNITS_PER_WORD)
5913 final_word = ((GET_MODE_SIZE (inner_mode)
5914 - GET_MODE_SIZE (tmode))
5915 / UNITS_PER_WORD) - final_word;
5916
5917 final_word *= UNITS_PER_WORD;
5918 if (BYTES_BIG_ENDIAN &&
5919 GET_MODE_SIZE (inner_mode) > GET_MODE_SIZE (tmode))
5920 final_word += (GET_MODE_SIZE (inner_mode)
5921 - GET_MODE_SIZE (tmode)) % UNITS_PER_WORD;
5922
5923 new = gen_rtx_SUBREG (tmode, inner, final_word);
5924 }
5925 else
5926 new = inner;
5927 }
230d793d 5928 else
6139ff20
RK
5929 new = force_to_mode (inner, tmode,
5930 len >= HOST_BITS_PER_WIDE_INT
0345195a 5931 ? ~(unsigned HOST_WIDE_INT) 0
729a2125 5932 : ((unsigned HOST_WIDE_INT) 1 << len) - 1,
e3d616e3 5933 NULL_RTX, 0);
230d793d 5934
663522cb 5935 /* If this extraction is going into the destination of a SET,
230d793d
RS
5936 make a STRICT_LOW_PART unless we made a MEM. */
5937
5938 if (in_dest)
5939 return (GET_CODE (new) == MEM ? new
77fa0940 5940 : (GET_CODE (new) != SUBREG
38a448ca 5941 ? gen_rtx_CLOBBER (tmode, const0_rtx)
f1c6ba8b 5942 : gen_rtx_STRICT_LOW_PART (VOIDmode, new)));
230d793d 5943
0f808b6f
JH
5944 if (mode == tmode)
5945 return new;
5946
5947 /* If we know that no extraneous bits are set, and that the high
5948 bit is not set, convert the extraction to the cheaper of
5949 sign and zero extension, that are equivalent in these cases. */
5950 if (flag_expensive_optimizations
5951 && (GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT
5952 && ((nonzero_bits (new, tmode)
663522cb
KH
5953 & ~(((unsigned HOST_WIDE_INT)
5954 GET_MODE_MASK (tmode))
5955 >> 1))
0f808b6f
JH
5956 == 0)))
5957 {
5958 rtx temp = gen_rtx_ZERO_EXTEND (mode, new);
5959 rtx temp1 = gen_rtx_SIGN_EXTEND (mode, new);
5960
5961 /* Prefer ZERO_EXTENSION, since it gives more information to
5962 backends. */
25ffb1f6 5963 if (rtx_cost (temp, SET) <= rtx_cost (temp1, SET))
0f808b6f
JH
5964 return temp;
5965 return temp1;
5966 }
5967
230d793d
RS
5968 /* Otherwise, sign- or zero-extend unless we already are in the
5969 proper mode. */
5970
f1c6ba8b
RK
5971 return (gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
5972 mode, new));
230d793d
RS
5973 }
5974
cc471082
RS
5975 /* Unless this is a COMPARE or we have a funny memory reference,
5976 don't do anything with zero-extending field extracts starting at
5977 the low-order bit since they are simple AND operations. */
8999a12e
RK
5978 if (pos_rtx == 0 && pos == 0 && ! in_dest
5979 && ! in_compare && ! spans_byte && unsignedp)
230d793d
RS
5980 return 0;
5981
c5c76735
JL
5982 /* Unless we are allowed to span bytes or INNER is not MEM, reject this if
5983 we would be spanning bytes or if the position is not a constant and the
5984 length is not 1. In all other cases, we would only be going outside
5985 our object in cases when an original shift would have been
e7373556 5986 undefined. */
c5c76735 5987 if (! spans_byte && GET_CODE (inner) == MEM
e7373556
RK
5988 && ((pos_rtx == 0 && pos + len > GET_MODE_BITSIZE (is_mode))
5989 || (pos_rtx != 0 && len != 1)))
5990 return 0;
5991
d7cd794f 5992 /* Get the mode to use should INNER not be a MEM, the mode for the position,
230d793d
RS
5993 and the mode for the result. */
5994#ifdef HAVE_insv
5995 if (in_dest)
5996 {
0d8e55d8 5997 wanted_inner_reg_mode
a995e389
RH
5998 = insn_data[(int) CODE_FOR_insv].operand[0].mode;
5999 if (wanted_inner_reg_mode == VOIDmode)
6000 wanted_inner_reg_mode = word_mode;
6001
6002 pos_mode = insn_data[(int) CODE_FOR_insv].operand[2].mode;
6003 if (pos_mode == VOIDmode)
6004 pos_mode = word_mode;
6005
6006 extraction_mode = insn_data[(int) CODE_FOR_insv].operand[3].mode;
6007 if (extraction_mode == VOIDmode)
6008 extraction_mode = word_mode;
230d793d
RS
6009 }
6010#endif
6011
6012#ifdef HAVE_extzv
6013 if (! in_dest && unsignedp)
6014 {
0d8e55d8 6015 wanted_inner_reg_mode
a995e389
RH
6016 = insn_data[(int) CODE_FOR_extzv].operand[1].mode;
6017 if (wanted_inner_reg_mode == VOIDmode)
6018 wanted_inner_reg_mode = word_mode;
6019
6020 pos_mode = insn_data[(int) CODE_FOR_extzv].operand[3].mode;
6021 if (pos_mode == VOIDmode)
6022 pos_mode = word_mode;
6023
6024 extraction_mode = insn_data[(int) CODE_FOR_extzv].operand[0].mode;
6025 if (extraction_mode == VOIDmode)
6026 extraction_mode = word_mode;
230d793d
RS
6027 }
6028#endif
6029
6030#ifdef HAVE_extv
6031 if (! in_dest && ! unsignedp)
6032 {
0d8e55d8 6033 wanted_inner_reg_mode
a995e389
RH
6034 = insn_data[(int) CODE_FOR_extv].operand[1].mode;
6035 if (wanted_inner_reg_mode == VOIDmode)
6036 wanted_inner_reg_mode = word_mode;
6037
6038 pos_mode = insn_data[(int) CODE_FOR_extv].operand[3].mode;
6039 if (pos_mode == VOIDmode)
6040 pos_mode = word_mode;
6041
6042 extraction_mode = insn_data[(int) CODE_FOR_extv].operand[0].mode;
6043 if (extraction_mode == VOIDmode)
6044 extraction_mode = word_mode;
230d793d
RS
6045 }
6046#endif
6047
6048 /* Never narrow an object, since that might not be safe. */
6049
6050 if (mode != VOIDmode
6051 && GET_MODE_SIZE (extraction_mode) < GET_MODE_SIZE (mode))
6052 extraction_mode = mode;
6053
6054 if (pos_rtx && GET_MODE (pos_rtx) != VOIDmode
6055 && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
6056 pos_mode = GET_MODE (pos_rtx);
6057
d7cd794f
RK
6058 /* If this is not from memory, the desired mode is wanted_inner_reg_mode;
6059 if we have to change the mode of memory and cannot, the desired mode is
6060 EXTRACTION_MODE. */
6061 if (GET_CODE (inner) != MEM)
6062 wanted_inner_mode = wanted_inner_reg_mode;
6063 else if (inner_mode != wanted_inner_mode
6064 && (mode_dependent_address_p (XEXP (inner, 0))
6065 || MEM_VOLATILE_P (inner)))
6066 wanted_inner_mode = extraction_mode;
230d793d 6067
6139ff20
RK
6068 orig_pos = pos;
6069
f76b9db2
ILT
6070 if (BITS_BIG_ENDIAN)
6071 {
cf54c2cd
DE
6072 /* POS is passed as if BITS_BIG_ENDIAN == 0, so we need to convert it to
6073 BITS_BIG_ENDIAN style. If position is constant, compute new
6074 position. Otherwise, build subtraction.
6075 Note that POS is relative to the mode of the original argument.
6076 If it's a MEM we need to recompute POS relative to that.
6077 However, if we're extracting from (or inserting into) a register,
6078 we want to recompute POS relative to wanted_inner_mode. */
6079 int width = (GET_CODE (inner) == MEM
6080 ? GET_MODE_BITSIZE (is_mode)
6081 : GET_MODE_BITSIZE (wanted_inner_mode));
6082
f76b9db2 6083 if (pos_rtx == 0)
cf54c2cd 6084 pos = width - len - pos;
f76b9db2
ILT
6085 else
6086 pos_rtx
f1c6ba8b 6087 = gen_rtx_MINUS (GET_MODE (pos_rtx), GEN_INT (width - len), pos_rtx);
cf54c2cd
DE
6088 /* POS may be less than 0 now, but we check for that below.
6089 Note that it can only be less than 0 if GET_CODE (inner) != MEM. */
f76b9db2 6090 }
230d793d
RS
6091
6092 /* If INNER has a wider mode, make it smaller. If this is a constant
6093 extract, try to adjust the byte to point to the byte containing
6094 the value. */
d7cd794f
RK
6095 if (wanted_inner_mode != VOIDmode
6096 && GET_MODE_SIZE (wanted_inner_mode) < GET_MODE_SIZE (is_mode)
230d793d 6097 && ((GET_CODE (inner) == MEM
d7cd794f 6098 && (inner_mode == wanted_inner_mode
230d793d
RS
6099 || (! mode_dependent_address_p (XEXP (inner, 0))
6100 && ! MEM_VOLATILE_P (inner))))))
6101 {
6102 int offset = 0;
6103
6104 /* The computations below will be correct if the machine is big
6105 endian in both bits and bytes or little endian in bits and bytes.
6106 If it is mixed, we must adjust. */
663522cb 6107
230d793d 6108 /* If bytes are big endian and we had a paradoxical SUBREG, we must
0f41302f 6109 adjust OFFSET to compensate. */
f76b9db2
ILT
6110 if (BYTES_BIG_ENDIAN
6111 && ! spans_byte
230d793d
RS
6112 && GET_MODE_SIZE (inner_mode) < GET_MODE_SIZE (is_mode))
6113 offset -= GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (inner_mode);
230d793d
RS
6114
6115 /* If this is a constant position, we can move to the desired byte. */
8999a12e 6116 if (pos_rtx == 0)
230d793d
RS
6117 {
6118 offset += pos / BITS_PER_UNIT;
d7cd794f 6119 pos %= GET_MODE_BITSIZE (wanted_inner_mode);
230d793d
RS
6120 }
6121
f76b9db2
ILT
6122 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
6123 && ! spans_byte
d7cd794f 6124 && is_mode != wanted_inner_mode)
c6b3f1f2 6125 offset = (GET_MODE_SIZE (is_mode)
d7cd794f 6126 - GET_MODE_SIZE (wanted_inner_mode) - offset);
c6b3f1f2 6127
d7cd794f 6128 if (offset != 0 || inner_mode != wanted_inner_mode)
f1ec5147 6129 inner = adjust_address_nv (inner, wanted_inner_mode, offset);
230d793d
RS
6130 }
6131
9e74dc41
RK
6132 /* If INNER is not memory, we can always get it into the proper mode. If we
6133 are changing its mode, POS must be a constant and smaller than the size
6134 of the new mode. */
230d793d 6135 else if (GET_CODE (inner) != MEM)
9e74dc41
RK
6136 {
6137 if (GET_MODE (inner) != wanted_inner_mode
6138 && (pos_rtx != 0
6139 || orig_pos + len > GET_MODE_BITSIZE (wanted_inner_mode)))
6140 return 0;
6141
6142 inner = force_to_mode (inner, wanted_inner_mode,
6143 pos_rtx
6144 || len + orig_pos >= HOST_BITS_PER_WIDE_INT
0345195a 6145 ? ~(unsigned HOST_WIDE_INT) 0
729a2125
RK
6146 : ((((unsigned HOST_WIDE_INT) 1 << len) - 1)
6147 << orig_pos),
9e74dc41
RK
6148 NULL_RTX, 0);
6149 }
230d793d
RS
6150
6151 /* Adjust mode of POS_RTX, if needed. If we want a wider mode, we
6152 have to zero extend. Otherwise, we can just use a SUBREG. */
8999a12e 6153 if (pos_rtx != 0
230d793d 6154 && GET_MODE_SIZE (pos_mode) > GET_MODE_SIZE (GET_MODE (pos_rtx)))
0f808b6f 6155 {
f1c6ba8b 6156 rtx temp = gen_rtx_ZERO_EXTEND (pos_mode, pos_rtx);
0f808b6f
JH
6157
6158 /* If we know that no extraneous bits are set, and that the high
6159 bit is not set, convert extraction to cheaper one - eighter
6160 SIGN_EXTENSION or ZERO_EXTENSION, that are equivalent in these
6161 cases. */
6162 if (flag_expensive_optimizations
6163 && (GET_MODE_BITSIZE (GET_MODE (pos_rtx)) <= HOST_BITS_PER_WIDE_INT
6164 && ((nonzero_bits (pos_rtx, GET_MODE (pos_rtx))
663522cb
KH
6165 & ~(((unsigned HOST_WIDE_INT)
6166 GET_MODE_MASK (GET_MODE (pos_rtx)))
6167 >> 1))
0f808b6f
JH
6168 == 0)))
6169 {
6170 rtx temp1 = gen_rtx_SIGN_EXTEND (pos_mode, pos_rtx);
6171
25ffb1f6 6172 /* Prefer ZERO_EXTENSION, since it gives more information to
0f808b6f
JH
6173 backends. */
6174 if (rtx_cost (temp1, SET) < rtx_cost (temp, SET))
6175 temp = temp1;
6176 }
6177 pos_rtx = temp;
6178 }
8999a12e 6179 else if (pos_rtx != 0
230d793d
RS
6180 && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
6181 pos_rtx = gen_lowpart_for_combine (pos_mode, pos_rtx);
6182
8999a12e
RK
6183 /* Make POS_RTX unless we already have it and it is correct. If we don't
6184 have a POS_RTX but we do have an ORIG_POS_RTX, the latter must
0f41302f 6185 be a CONST_INT. */
8999a12e
RK
6186 if (pos_rtx == 0 && orig_pos_rtx != 0 && INTVAL (orig_pos_rtx) == pos)
6187 pos_rtx = orig_pos_rtx;
6188
6189 else if (pos_rtx == 0)
5f4f0e22 6190 pos_rtx = GEN_INT (pos);
230d793d
RS
6191
6192 /* Make the required operation. See if we can use existing rtx. */
f1c6ba8b 6193 new = gen_rtx_fmt_eee (unsignedp ? ZERO_EXTRACT : SIGN_EXTRACT,
5f4f0e22 6194 extraction_mode, inner, GEN_INT (len), pos_rtx);
230d793d
RS
6195 if (! in_dest)
6196 new = gen_lowpart_for_combine (mode, new);
6197
6198 return new;
6199}
6200\f
71923da7
RK
6201/* See if X contains an ASHIFT of COUNT or more bits that can be commuted
6202 with any other operations in X. Return X without that shift if so. */
6203
6204static rtx
6205extract_left_shift (x, count)
6206 rtx x;
6207 int count;
6208{
6209 enum rtx_code code = GET_CODE (x);
6210 enum machine_mode mode = GET_MODE (x);
6211 rtx tem;
6212
6213 switch (code)
6214 {
6215 case ASHIFT:
6216 /* This is the shift itself. If it is wide enough, we will return
6217 either the value being shifted if the shift count is equal to
6218 COUNT or a shift for the difference. */
6219 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6220 && INTVAL (XEXP (x, 1)) >= count)
6221 return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (x, 0),
6222 INTVAL (XEXP (x, 1)) - count);
6223 break;
6224
6225 case NEG: case NOT:
6226 if ((tem = extract_left_shift (XEXP (x, 0), count)) != 0)
f1c6ba8b 6227 return simplify_gen_unary (code, mode, tem, mode);
71923da7
RK
6228
6229 break;
6230
6231 case PLUS: case IOR: case XOR: case AND:
6232 /* If we can safely shift this constant and we find the inner shift,
6233 make a new operation. */
6234 if (GET_CODE (XEXP (x,1)) == CONST_INT
b729186a 6235 && (INTVAL (XEXP (x, 1)) & ((((HOST_WIDE_INT) 1 << count)) - 1)) == 0
71923da7 6236 && (tem = extract_left_shift (XEXP (x, 0), count)) != 0)
663522cb 6237 return gen_binary (code, mode, tem,
71923da7
RK
6238 GEN_INT (INTVAL (XEXP (x, 1)) >> count));
6239
6240 break;
663522cb 6241
e9a25f70
JL
6242 default:
6243 break;
71923da7
RK
6244 }
6245
6246 return 0;
6247}
6248\f
230d793d
RS
6249/* Look at the expression rooted at X. Look for expressions
6250 equivalent to ZERO_EXTRACT, SIGN_EXTRACT, ZERO_EXTEND, SIGN_EXTEND.
6251 Form these expressions.
6252
6253 Return the new rtx, usually just X.
6254
6255 Also, for machines like the Vax that don't have logical shift insns,
6256 try to convert logical to arithmetic shift operations in cases where
6257 they are equivalent. This undoes the canonicalizations to logical
6258 shifts done elsewhere.
6259
6260 We try, as much as possible, to re-use rtl expressions to save memory.
6261
6262 IN_CODE says what kind of expression we are processing. Normally, it is
42495ca0
RK
6263 SET. In a memory address (inside a MEM, PLUS or minus, the latter two
6264 being kludges), it is MEM. When processing the arguments of a comparison
230d793d
RS
6265 or a COMPARE against zero, it is COMPARE. */
6266
6267static rtx
6268make_compound_operation (x, in_code)
6269 rtx x;
6270 enum rtx_code in_code;
6271{
6272 enum rtx_code code = GET_CODE (x);
6273 enum machine_mode mode = GET_MODE (x);
6274 int mode_width = GET_MODE_BITSIZE (mode);
71923da7 6275 rtx rhs, lhs;
230d793d 6276 enum rtx_code next_code;
f24ad0e4 6277 int i;
230d793d 6278 rtx new = 0;
280f58ba 6279 rtx tem;
6f7d635c 6280 const char *fmt;
230d793d
RS
6281
6282 /* Select the code to be used in recursive calls. Once we are inside an
6283 address, we stay there. If we have a comparison, set to COMPARE,
6284 but once inside, go back to our default of SET. */
6285
42495ca0 6286 next_code = (code == MEM || code == PLUS || code == MINUS ? MEM
230d793d
RS
6287 : ((code == COMPARE || GET_RTX_CLASS (code) == '<')
6288 && XEXP (x, 1) == const0_rtx) ? COMPARE
6289 : in_code == COMPARE ? SET : in_code);
6290
6291 /* Process depending on the code of this operation. If NEW is set
6292 non-zero, it will be returned. */
6293
6294 switch (code)
6295 {
6296 case ASHIFT:
230d793d
RS
6297 /* Convert shifts by constants into multiplications if inside
6298 an address. */
6299 if (in_code == MEM && GET_CODE (XEXP (x, 1)) == CONST_INT
5f4f0e22 6300 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
230d793d 6301 && INTVAL (XEXP (x, 1)) >= 0)
280f58ba
RK
6302 {
6303 new = make_compound_operation (XEXP (x, 0), next_code);
f1c6ba8b
RK
6304 new = gen_rtx_MULT (mode, new,
6305 GEN_INT ((HOST_WIDE_INT) 1
6306 << INTVAL (XEXP (x, 1))));
280f58ba 6307 }
230d793d
RS
6308 break;
6309
6310 case AND:
6311 /* If the second operand is not a constant, we can't do anything
6312 with it. */
6313 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6314 break;
6315
6316 /* If the constant is a power of two minus one and the first operand
6317 is a logical right shift, make an extraction. */
6318 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
6319 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
280f58ba
RK
6320 {
6321 new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
6322 new = make_extraction (mode, new, 0, XEXP (XEXP (x, 0), 1), i, 1,
6323 0, in_code == COMPARE);
6324 }
dfbe1b2f 6325
230d793d
RS
6326 /* Same as previous, but for (subreg (lshiftrt ...)) in first op. */
6327 else if (GET_CODE (XEXP (x, 0)) == SUBREG
6328 && subreg_lowpart_p (XEXP (x, 0))
6329 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == LSHIFTRT
6330 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
280f58ba
RK
6331 {
6332 new = make_compound_operation (XEXP (SUBREG_REG (XEXP (x, 0)), 0),
6333 next_code);
2f99f437 6334 new = make_extraction (GET_MODE (SUBREG_REG (XEXP (x, 0))), new, 0,
280f58ba
RK
6335 XEXP (SUBREG_REG (XEXP (x, 0)), 1), i, 1,
6336 0, in_code == COMPARE);
6337 }
45620ed4 6338 /* Same as previous, but for (xor/ior (lshiftrt...) (lshiftrt...)). */
c2f9f64e
JW
6339 else if ((GET_CODE (XEXP (x, 0)) == XOR
6340 || GET_CODE (XEXP (x, 0)) == IOR)
6341 && GET_CODE (XEXP (XEXP (x, 0), 0)) == LSHIFTRT
6342 && GET_CODE (XEXP (XEXP (x, 0), 1)) == LSHIFTRT
6343 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
6344 {
6345 /* Apply the distributive law, and then try to make extractions. */
f1c6ba8b
RK
6346 new = gen_rtx_fmt_ee (GET_CODE (XEXP (x, 0)), mode,
6347 gen_rtx_AND (mode, XEXP (XEXP (x, 0), 0),
6348 XEXP (x, 1)),
6349 gen_rtx_AND (mode, XEXP (XEXP (x, 0), 1),
6350 XEXP (x, 1)));
c2f9f64e
JW
6351 new = make_compound_operation (new, in_code);
6352 }
a7c99304
RK
6353
6354 /* If we are have (and (rotate X C) M) and C is larger than the number
6355 of bits in M, this is an extraction. */
6356
6357 else if (GET_CODE (XEXP (x, 0)) == ROTATE
6358 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6359 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0
6360 && i <= INTVAL (XEXP (XEXP (x, 0), 1)))
280f58ba
RK
6361 {
6362 new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
6363 new = make_extraction (mode, new,
6364 (GET_MODE_BITSIZE (mode)
6365 - INTVAL (XEXP (XEXP (x, 0), 1))),
6366 NULL_RTX, i, 1, 0, in_code == COMPARE);
6367 }
a7c99304
RK
6368
6369 /* On machines without logical shifts, if the operand of the AND is
230d793d
RS
6370 a logical shift and our mask turns off all the propagated sign
6371 bits, we can replace the logical shift with an arithmetic shift. */
d0ab8cd3
RK
6372 else if (ashr_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
6373 && (lshr_optab->handlers[(int) mode].insn_code
6374 == CODE_FOR_nothing)
230d793d
RS
6375 && GET_CODE (XEXP (x, 0)) == LSHIFTRT
6376 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6377 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
5f4f0e22
CH
6378 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
6379 && mode_width <= HOST_BITS_PER_WIDE_INT)
230d793d 6380 {
5f4f0e22 6381 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
230d793d
RS
6382
6383 mask >>= INTVAL (XEXP (XEXP (x, 0), 1));
6384 if ((INTVAL (XEXP (x, 1)) & ~mask) == 0)
6385 SUBST (XEXP (x, 0),
f1c6ba8b
RK
6386 gen_rtx_ASHIFTRT (mode,
6387 make_compound_operation
6388 (XEXP (XEXP (x, 0), 0), next_code),
6389 XEXP (XEXP (x, 0), 1)));
230d793d
RS
6390 }
6391
6392 /* If the constant is one less than a power of two, this might be
6393 representable by an extraction even if no shift is present.
6394 If it doesn't end up being a ZERO_EXTEND, we will ignore it unless
6395 we are in a COMPARE. */
6396 else if ((i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
280f58ba
RK
6397 new = make_extraction (mode,
6398 make_compound_operation (XEXP (x, 0),
6399 next_code),
6400 0, NULL_RTX, i, 1, 0, in_code == COMPARE);
230d793d
RS
6401
6402 /* If we are in a comparison and this is an AND with a power of two,
6403 convert this into the appropriate bit extract. */
6404 else if (in_code == COMPARE
6405 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
280f58ba
RK
6406 new = make_extraction (mode,
6407 make_compound_operation (XEXP (x, 0),
6408 next_code),
6409 i, NULL_RTX, 1, 1, 0, 1);
230d793d
RS
6410
6411 break;
6412
6413 case LSHIFTRT:
6414 /* If the sign bit is known to be zero, replace this with an
6415 arithmetic shift. */
d0ab8cd3
RK
6416 if (ashr_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing
6417 && lshr_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
5f4f0e22 6418 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 6419 && (nonzero_bits (XEXP (x, 0), mode) & (1 << (mode_width - 1))) == 0)
230d793d 6420 {
f1c6ba8b
RK
6421 new = gen_rtx_ASHIFTRT (mode,
6422 make_compound_operation (XEXP (x, 0),
6423 next_code),
6424 XEXP (x, 1));
230d793d
RS
6425 break;
6426 }
6427
0f41302f 6428 /* ... fall through ... */
230d793d
RS
6429
6430 case ASHIFTRT:
71923da7
RK
6431 lhs = XEXP (x, 0);
6432 rhs = XEXP (x, 1);
6433
230d793d
RS
6434 /* If we have (ashiftrt (ashift foo C1) C2) with C2 >= C1,
6435 this is a SIGN_EXTRACT. */
71923da7
RK
6436 if (GET_CODE (rhs) == CONST_INT
6437 && GET_CODE (lhs) == ASHIFT
6438 && GET_CODE (XEXP (lhs, 1)) == CONST_INT
6439 && INTVAL (rhs) >= INTVAL (XEXP (lhs, 1)))
280f58ba 6440 {
71923da7 6441 new = make_compound_operation (XEXP (lhs, 0), next_code);
280f58ba 6442 new = make_extraction (mode, new,
71923da7
RK
6443 INTVAL (rhs) - INTVAL (XEXP (lhs, 1)),
6444 NULL_RTX, mode_width - INTVAL (rhs),
d0ab8cd3 6445 code == LSHIFTRT, 0, in_code == COMPARE);
8231ad94 6446 break;
d0ab8cd3
RK
6447 }
6448
71923da7
RK
6449 /* See if we have operations between an ASHIFTRT and an ASHIFT.
6450 If so, try to merge the shifts into a SIGN_EXTEND. We could
6451 also do this for some cases of SIGN_EXTRACT, but it doesn't
6452 seem worth the effort; the case checked for occurs on Alpha. */
663522cb 6453
71923da7
RK
6454 if (GET_RTX_CLASS (GET_CODE (lhs)) != 'o'
6455 && ! (GET_CODE (lhs) == SUBREG
6456 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (lhs))) == 'o'))
6457 && GET_CODE (rhs) == CONST_INT
6458 && INTVAL (rhs) < HOST_BITS_PER_WIDE_INT
6459 && (new = extract_left_shift (lhs, INTVAL (rhs))) != 0)
6460 new = make_extraction (mode, make_compound_operation (new, next_code),
6461 0, NULL_RTX, mode_width - INTVAL (rhs),
6462 code == LSHIFTRT, 0, in_code == COMPARE);
663522cb 6463
230d793d 6464 break;
280f58ba
RK
6465
6466 case SUBREG:
6467 /* Call ourselves recursively on the inner expression. If we are
6468 narrowing the object and it has a different RTL code from
6469 what it originally did, do this SUBREG as a force_to_mode. */
6470
0a5cbff6 6471 tem = make_compound_operation (SUBREG_REG (x), in_code);
280f58ba
RK
6472 if (GET_CODE (tem) != GET_CODE (SUBREG_REG (x))
6473 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (tem))
6474 && subreg_lowpart_p (x))
0a5cbff6 6475 {
e8dc6d50
JH
6476 rtx newer = force_to_mode (tem, mode, ~(HOST_WIDE_INT) 0,
6477 NULL_RTX, 0);
0a5cbff6
RK
6478
6479 /* If we have something other than a SUBREG, we might have
6480 done an expansion, so rerun outselves. */
6481 if (GET_CODE (newer) != SUBREG)
6482 newer = make_compound_operation (newer, in_code);
6483
6484 return newer;
6485 }
6f28d3e9
RH
6486
6487 /* If this is a paradoxical subreg, and the new code is a sign or
6488 zero extension, omit the subreg and widen the extension. If it
6489 is a regular subreg, we can still get rid of the subreg by not
6490 widening so much, or in fact removing the extension entirely. */
6491 if ((GET_CODE (tem) == SIGN_EXTEND
6492 || GET_CODE (tem) == ZERO_EXTEND)
6493 && subreg_lowpart_p (x))
6494 {
6495 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (tem))
6496 || (GET_MODE_SIZE (mode) >
6497 GET_MODE_SIZE (GET_MODE (XEXP (tem, 0)))))
f1c6ba8b 6498 tem = gen_rtx_fmt_e (GET_CODE (tem), mode, XEXP (tem, 0));
6f28d3e9
RH
6499 else
6500 tem = gen_lowpart_for_combine (mode, XEXP (tem, 0));
6501 return tem;
6502 }
e9a25f70 6503 break;
663522cb 6504
e9a25f70
JL
6505 default:
6506 break;
230d793d
RS
6507 }
6508
6509 if (new)
6510 {
df62f951 6511 x = gen_lowpart_for_combine (mode, new);
230d793d
RS
6512 code = GET_CODE (x);
6513 }
6514
6515 /* Now recursively process each operand of this operation. */
6516 fmt = GET_RTX_FORMAT (code);
6517 for (i = 0; i < GET_RTX_LENGTH (code); i++)
6518 if (fmt[i] == 'e')
6519 {
6520 new = make_compound_operation (XEXP (x, i), next_code);
6521 SUBST (XEXP (x, i), new);
6522 }
6523
6524 return x;
6525}
6526\f
6527/* Given M see if it is a value that would select a field of bits
663522cb
KH
6528 within an item, but not the entire word. Return -1 if not.
6529 Otherwise, return the starting position of the field, where 0 is the
6530 low-order bit.
230d793d
RS
6531
6532 *PLEN is set to the length of the field. */
6533
6534static int
6535get_pos_from_mask (m, plen)
5f4f0e22 6536 unsigned HOST_WIDE_INT m;
770ae6cc 6537 unsigned HOST_WIDE_INT *plen;
230d793d
RS
6538{
6539 /* Get the bit number of the first 1 bit from the right, -1 if none. */
663522cb 6540 int pos = exact_log2 (m & -m);
d3bc8938 6541 int len;
230d793d
RS
6542
6543 if (pos < 0)
6544 return -1;
6545
6546 /* Now shift off the low-order zero bits and see if we have a power of
6547 two minus 1. */
d3bc8938 6548 len = exact_log2 ((m >> pos) + 1);
230d793d 6549
d3bc8938 6550 if (len <= 0)
230d793d
RS
6551 return -1;
6552
d3bc8938 6553 *plen = len;
230d793d
RS
6554 return pos;
6555}
6556\f
6139ff20
RK
6557/* See if X can be simplified knowing that we will only refer to it in
6558 MODE and will only refer to those bits that are nonzero in MASK.
6559 If other bits are being computed or if masking operations are done
6560 that select a superset of the bits in MASK, they can sometimes be
6561 ignored.
6562
6563 Return a possibly simplified expression, but always convert X to
6564 MODE. If X is a CONST_INT, AND the CONST_INT with MASK.
dfbe1b2f 6565
663522cb 6566 Also, if REG is non-zero and X is a register equal in value to REG,
e3d616e3
RK
6567 replace X with REG.
6568
6569 If JUST_SELECT is nonzero, don't optimize by noticing that bits in MASK
6570 are all off in X. This is used when X will be complemented, by either
180b8e4b 6571 NOT, NEG, or XOR. */
dfbe1b2f
RK
6572
6573static rtx
e3d616e3 6574force_to_mode (x, mode, mask, reg, just_select)
dfbe1b2f
RK
6575 rtx x;
6576 enum machine_mode mode;
6139ff20 6577 unsigned HOST_WIDE_INT mask;
dfbe1b2f 6578 rtx reg;
e3d616e3 6579 int just_select;
dfbe1b2f
RK
6580{
6581 enum rtx_code code = GET_CODE (x);
180b8e4b 6582 int next_select = just_select || code == XOR || code == NOT || code == NEG;
ef026f91
RS
6583 enum machine_mode op_mode;
6584 unsigned HOST_WIDE_INT fuller_mask, nonzero;
6139ff20
RK
6585 rtx op0, op1, temp;
6586
132d2040
RK
6587 /* If this is a CALL or ASM_OPERANDS, don't do anything. Some of the
6588 code below will do the wrong thing since the mode of such an
663522cb 6589 expression is VOIDmode.
be3d27d6
CI
6590
6591 Also do nothing if X is a CLOBBER; this can happen if X was
6592 the return value from a call to gen_lowpart_for_combine. */
6593 if (code == CALL || code == ASM_OPERANDS || code == CLOBBER)
246e00f2
RK
6594 return x;
6595
6139ff20
RK
6596 /* We want to perform the operation is its present mode unless we know
6597 that the operation is valid in MODE, in which case we do the operation
6598 in MODE. */
1c75dfa4
RK
6599 op_mode = ((GET_MODE_CLASS (mode) == GET_MODE_CLASS (GET_MODE (x))
6600 && code_to_optab[(int) code] != 0
ef026f91
RS
6601 && (code_to_optab[(int) code]->handlers[(int) mode].insn_code
6602 != CODE_FOR_nothing))
6603 ? mode : GET_MODE (x));
e3d616e3 6604
aa988991
RS
6605 /* It is not valid to do a right-shift in a narrower mode
6606 than the one it came in with. */
6607 if ((code == LSHIFTRT || code == ASHIFTRT)
6608 && GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (GET_MODE (x)))
6609 op_mode = GET_MODE (x);
ef026f91
RS
6610
6611 /* Truncate MASK to fit OP_MODE. */
6612 if (op_mode)
6613 mask &= GET_MODE_MASK (op_mode);
6139ff20
RK
6614
6615 /* When we have an arithmetic operation, or a shift whose count we
6616 do not know, we need to assume that all bit the up to the highest-order
6617 bit in MASK will be needed. This is how we form such a mask. */
ef026f91
RS
6618 if (op_mode)
6619 fuller_mask = (GET_MODE_BITSIZE (op_mode) >= HOST_BITS_PER_WIDE_INT
6620 ? GET_MODE_MASK (op_mode)
729a2125
RK
6621 : (((unsigned HOST_WIDE_INT) 1 << (floor_log2 (mask) + 1))
6622 - 1));
ef026f91 6623 else
663522cb 6624 fuller_mask = ~(HOST_WIDE_INT) 0;
ef026f91
RS
6625
6626 /* Determine what bits of X are guaranteed to be (non)zero. */
6627 nonzero = nonzero_bits (x, mode);
6139ff20
RK
6628
6629 /* If none of the bits in X are needed, return a zero. */
e3d616e3 6630 if (! just_select && (nonzero & mask) == 0)
6139ff20 6631 return const0_rtx;
dfbe1b2f 6632
6139ff20
RK
6633 /* If X is a CONST_INT, return a new one. Do this here since the
6634 test below will fail. */
6635 if (GET_CODE (x) == CONST_INT)
ceb7983c
RK
6636 {
6637 HOST_WIDE_INT cval = INTVAL (x) & mask;
6638 int width = GET_MODE_BITSIZE (mode);
6639
6640 /* If MODE is narrower that HOST_WIDE_INT and CVAL is a negative
6641 number, sign extend it. */
6642 if (width > 0 && width < HOST_BITS_PER_WIDE_INT
6643 && (cval & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
6644 cval |= (HOST_WIDE_INT) -1 << width;
663522cb 6645
ceb7983c
RK
6646 return GEN_INT (cval);
6647 }
dfbe1b2f 6648
180b8e4b
RK
6649 /* If X is narrower than MODE and we want all the bits in X's mode, just
6650 get X in the proper mode. */
6651 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode)
663522cb 6652 && (GET_MODE_MASK (GET_MODE (x)) & ~mask) == 0)
dfbe1b2f
RK
6653 return gen_lowpart_for_combine (mode, x);
6654
71923da7
RK
6655 /* If we aren't changing the mode, X is not a SUBREG, and all zero bits in
6656 MASK are already known to be zero in X, we need not do anything. */
663522cb 6657 if (GET_MODE (x) == mode && code != SUBREG && (~mask & nonzero) == 0)
6139ff20
RK
6658 return x;
6659
dfbe1b2f
RK
6660 switch (code)
6661 {
6139ff20
RK
6662 case CLOBBER:
6663 /* If X is a (clobber (const_int)), return it since we know we are
0f41302f 6664 generating something that won't match. */
6139ff20
RK
6665 return x;
6666
6139ff20
RK
6667 case USE:
6668 /* X is a (use (mem ..)) that was made from a bit-field extraction that
6669 spanned the boundary of the MEM. If we are now masking so it is
6670 within that boundary, we don't need the USE any more. */
f76b9db2 6671 if (! BITS_BIG_ENDIAN
663522cb 6672 && (mask & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
e3d616e3 6673 return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
f76b9db2 6674 break;
6139ff20 6675
dfbe1b2f
RK
6676 case SIGN_EXTEND:
6677 case ZERO_EXTEND:
6678 case ZERO_EXTRACT:
6679 case SIGN_EXTRACT:
6680 x = expand_compound_operation (x);
6681 if (GET_CODE (x) != code)
e3d616e3 6682 return force_to_mode (x, mode, mask, reg, next_select);
dfbe1b2f
RK
6683 break;
6684
6685 case REG:
6686 if (reg != 0 && (rtx_equal_p (get_last_value (reg), x)
6687 || rtx_equal_p (reg, get_last_value (x))))
6688 x = reg;
6689 break;
6690
dfbe1b2f 6691 case SUBREG:
6139ff20 6692 if (subreg_lowpart_p (x)
180b8e4b
RK
6693 /* We can ignore the effect of this SUBREG if it narrows the mode or
6694 if the constant masks to zero all the bits the mode doesn't
6695 have. */
6139ff20
RK
6696 && ((GET_MODE_SIZE (GET_MODE (x))
6697 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
6139ff20
RK
6698 || (0 == (mask
6699 & GET_MODE_MASK (GET_MODE (x))
663522cb 6700 & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x)))))))
e3d616e3 6701 return force_to_mode (SUBREG_REG (x), mode, mask, reg, next_select);
dfbe1b2f
RK
6702 break;
6703
6704 case AND:
6139ff20
RK
6705 /* If this is an AND with a constant, convert it into an AND
6706 whose constant is the AND of that constant with MASK. If it
6707 remains an AND of MASK, delete it since it is redundant. */
dfbe1b2f 6708
2ca9ae17 6709 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
dfbe1b2f 6710 {
6139ff20
RK
6711 x = simplify_and_const_int (x, op_mode, XEXP (x, 0),
6712 mask & INTVAL (XEXP (x, 1)));
dfbe1b2f
RK
6713
6714 /* If X is still an AND, see if it is an AND with a mask that
71923da7
RK
6715 is just some low-order bits. If so, and it is MASK, we don't
6716 need it. */
dfbe1b2f
RK
6717
6718 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
e51712db 6719 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (x, 1)) == mask)
dfbe1b2f 6720 x = XEXP (x, 0);
d0ab8cd3 6721
71923da7
RK
6722 /* If it remains an AND, try making another AND with the bits
6723 in the mode mask that aren't in MASK turned on. If the
6724 constant in the AND is wide enough, this might make a
6725 cheaper constant. */
6726
6727 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
2ca9ae17
JW
6728 && GET_MODE_MASK (GET_MODE (x)) != mask
6729 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
71923da7
RK
6730 {
6731 HOST_WIDE_INT cval = (INTVAL (XEXP (x, 1))
663522cb 6732 | (GET_MODE_MASK (GET_MODE (x)) & ~mask));
71923da7
RK
6733 int width = GET_MODE_BITSIZE (GET_MODE (x));
6734 rtx y;
6735
6736 /* If MODE is narrower that HOST_WIDE_INT and CVAL is a negative
6737 number, sign extend it. */
6738 if (width > 0 && width < HOST_BITS_PER_WIDE_INT
6739 && (cval & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
6740 cval |= (HOST_WIDE_INT) -1 << width;
6741
6742 y = gen_binary (AND, GET_MODE (x), XEXP (x, 0), GEN_INT (cval));
6743 if (rtx_cost (y, SET) < rtx_cost (x, SET))
6744 x = y;
6745 }
6746
d0ab8cd3 6747 break;
dfbe1b2f
RK
6748 }
6749
6139ff20 6750 goto binop;
dfbe1b2f
RK
6751
6752 case PLUS:
6139ff20
RK
6753 /* In (and (plus FOO C1) M), if M is a mask that just turns off
6754 low-order bits (as in an alignment operation) and FOO is already
6755 aligned to that boundary, mask C1 to that boundary as well.
6756 This may eliminate that PLUS and, later, the AND. */
9fa6d012
TG
6757
6758 {
770ae6cc 6759 unsigned int width = GET_MODE_BITSIZE (mode);
9fa6d012
TG
6760 unsigned HOST_WIDE_INT smask = mask;
6761
6762 /* If MODE is narrower than HOST_WIDE_INT and mask is a negative
6763 number, sign extend it. */
6764
6765 if (width < HOST_BITS_PER_WIDE_INT
6766 && (smask & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
6767 smask |= (HOST_WIDE_INT) -1 << width;
6768
6769 if (GET_CODE (XEXP (x, 1)) == CONST_INT
0e9ff885
DM
6770 && exact_log2 (- smask) >= 0)
6771 {
6772#ifdef STACK_BIAS
6773 if (STACK_BIAS
6774 && (XEXP (x, 0) == stack_pointer_rtx
6775 || XEXP (x, 0) == frame_pointer_rtx))
6776 {
663522cb
KH
6777 int sp_alignment = STACK_BOUNDARY / BITS_PER_UNIT;
6778 unsigned HOST_WIDE_INT sp_mask = GET_MODE_MASK (mode);
6779
6780 sp_mask &= ~(sp_alignment - 1);
6781 if ((sp_mask & ~smask) == 0
6782 && ((INTVAL (XEXP (x, 1)) - STACK_BIAS) & ~smask) != 0)
0e9ff885 6783 return force_to_mode (plus_constant (XEXP (x, 0),
663522cb 6784 ((INTVAL (XEXP (x, 1)) -
835c8e04 6785 STACK_BIAS) & smask)
0e9ff885 6786 + STACK_BIAS),
663522cb
KH
6787 mode, smask, reg, next_select);
6788 }
0e9ff885 6789#endif
663522cb
KH
6790 if ((nonzero_bits (XEXP (x, 0), mode) & ~smask) == 0
6791 && (INTVAL (XEXP (x, 1)) & ~smask) != 0)
0e9ff885 6792 return force_to_mode (plus_constant (XEXP (x, 0),
663522cb 6793 (INTVAL (XEXP (x, 1))
835c8e04
DT
6794 & smask)),
6795 mode, smask, reg, next_select);
0e9ff885 6796 }
9fa6d012 6797 }
6139ff20 6798
0f41302f 6799 /* ... fall through ... */
6139ff20 6800
dfbe1b2f 6801 case MULT:
6139ff20
RK
6802 /* For PLUS, MINUS and MULT, we need any bits less significant than the
6803 most significant bit in MASK since carries from those bits will
6804 affect the bits we are interested in. */
6805 mask = fuller_mask;
6806 goto binop;
6807
d41638e4
RH
6808 case MINUS:
6809 /* If X is (minus C Y) where C's least set bit is larger than any bit
6810 in the mask, then we may replace with (neg Y). */
6811 if (GET_CODE (XEXP (x, 0)) == CONST_INT
0345195a
RK
6812 && (((unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 0))
6813 & -INTVAL (XEXP (x, 0))))
6814 > mask))
d41638e4 6815 {
f1c6ba8b
RK
6816 x = simplify_gen_unary (NEG, GET_MODE (x), XEXP (x, 1),
6817 GET_MODE (x));
d41638e4
RH
6818 return force_to_mode (x, mode, mask, reg, next_select);
6819 }
6820
6821 /* Similarly, if C contains every bit in the mask, then we may
6822 replace with (not Y). */
6823 if (GET_CODE (XEXP (x, 0)) == CONST_INT
0345195a
RK
6824 && ((INTVAL (XEXP (x, 0)) | (HOST_WIDE_INT) mask)
6825 == INTVAL (XEXP (x, 0))))
d41638e4 6826 {
f1c6ba8b
RK
6827 x = simplify_gen_unary (NOT, GET_MODE (x),
6828 XEXP (x, 1), GET_MODE (x));
d41638e4
RH
6829 return force_to_mode (x, mode, mask, reg, next_select);
6830 }
6831
6832 mask = fuller_mask;
6833 goto binop;
6834
dfbe1b2f
RK
6835 case IOR:
6836 case XOR:
6139ff20
RK
6837 /* If X is (ior (lshiftrt FOO C1) C2), try to commute the IOR and
6838 LSHIFTRT so we end up with an (and (lshiftrt (ior ...) ...) ...)
6839 operation which may be a bitfield extraction. Ensure that the
6840 constant we form is not wider than the mode of X. */
6841
6842 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
6843 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6844 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
6845 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
6846 && GET_CODE (XEXP (x, 1)) == CONST_INT
6847 && ((INTVAL (XEXP (XEXP (x, 0), 1))
6848 + floor_log2 (INTVAL (XEXP (x, 1))))
6849 < GET_MODE_BITSIZE (GET_MODE (x)))
6850 && (INTVAL (XEXP (x, 1))
663522cb 6851 & ~nonzero_bits (XEXP (x, 0), GET_MODE (x))) == 0)
6139ff20
RK
6852 {
6853 temp = GEN_INT ((INTVAL (XEXP (x, 1)) & mask)
663522cb 6854 << INTVAL (XEXP (XEXP (x, 0), 1)));
6139ff20
RK
6855 temp = gen_binary (GET_CODE (x), GET_MODE (x),
6856 XEXP (XEXP (x, 0), 0), temp);
d4d2b13f
RK
6857 x = gen_binary (LSHIFTRT, GET_MODE (x), temp,
6858 XEXP (XEXP (x, 0), 1));
e3d616e3 6859 return force_to_mode (x, mode, mask, reg, next_select);
6139ff20
RK
6860 }
6861
6862 binop:
dfbe1b2f 6863 /* For most binary operations, just propagate into the operation and
6139ff20
RK
6864 change the mode if we have an operation of that mode. */
6865
e3d616e3
RK
6866 op0 = gen_lowpart_for_combine (op_mode,
6867 force_to_mode (XEXP (x, 0), mode, mask,
6868 reg, next_select));
6869 op1 = gen_lowpart_for_combine (op_mode,
6870 force_to_mode (XEXP (x, 1), mode, mask,
6871 reg, next_select));
6139ff20 6872
2dd484ed
RK
6873 /* If OP1 is a CONST_INT and X is an IOR or XOR, clear bits outside
6874 MASK since OP1 might have been sign-extended but we never want
6875 to turn on extra bits, since combine might have previously relied
6876 on them being off. */
6877 if (GET_CODE (op1) == CONST_INT && (code == IOR || code == XOR)
6878 && (INTVAL (op1) & mask) != 0)
6879 op1 = GEN_INT (INTVAL (op1) & mask);
663522cb 6880
6139ff20
RK
6881 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
6882 x = gen_binary (code, op_mode, op0, op1);
d0ab8cd3 6883 break;
dfbe1b2f
RK
6884
6885 case ASHIFT:
dfbe1b2f 6886 /* For left shifts, do the same, but just for the first operand.
f6785026
RK
6887 However, we cannot do anything with shifts where we cannot
6888 guarantee that the counts are smaller than the size of the mode
6889 because such a count will have a different meaning in a
6139ff20 6890 wider mode. */
f6785026
RK
6891
6892 if (! (GET_CODE (XEXP (x, 1)) == CONST_INT
6139ff20 6893 && INTVAL (XEXP (x, 1)) >= 0
f6785026
RK
6894 && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (mode))
6895 && ! (GET_MODE (XEXP (x, 1)) != VOIDmode
6896 && (nonzero_bits (XEXP (x, 1), GET_MODE (XEXP (x, 1)))
adb7a1cb 6897 < (unsigned HOST_WIDE_INT) GET_MODE_BITSIZE (mode))))
f6785026 6898 break;
663522cb 6899
6139ff20
RK
6900 /* If the shift count is a constant and we can do arithmetic in
6901 the mode of the shift, refine which bits we need. Otherwise, use the
6902 conservative form of the mask. */
6903 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6904 && INTVAL (XEXP (x, 1)) >= 0
6905 && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (op_mode)
6906 && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
6907 mask >>= INTVAL (XEXP (x, 1));
6908 else
6909 mask = fuller_mask;
6910
6911 op0 = gen_lowpart_for_combine (op_mode,
6912 force_to_mode (XEXP (x, 0), op_mode,
e3d616e3 6913 mask, reg, next_select));
6139ff20
RK
6914
6915 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
663522cb 6916 x = gen_binary (code, op_mode, op0, XEXP (x, 1));
d0ab8cd3 6917 break;
dfbe1b2f
RK
6918
6919 case LSHIFTRT:
1347292b
JW
6920 /* Here we can only do something if the shift count is a constant,
6921 this shift constant is valid for the host, and we can do arithmetic
6922 in OP_MODE. */
dfbe1b2f
RK
6923
6924 if (GET_CODE (XEXP (x, 1)) == CONST_INT
1347292b 6925 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
6139ff20 6926 && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
d0ab8cd3 6927 {
6139ff20 6928 rtx inner = XEXP (x, 0);
402b6c2a 6929 unsigned HOST_WIDE_INT inner_mask;
6139ff20
RK
6930
6931 /* Select the mask of the bits we need for the shift operand. */
402b6c2a 6932 inner_mask = mask << INTVAL (XEXP (x, 1));
d0ab8cd3 6933
6139ff20 6934 /* We can only change the mode of the shift if we can do arithmetic
402b6c2a
JW
6935 in the mode of the shift and INNER_MASK is no wider than the
6936 width of OP_MODE. */
6139ff20 6937 if (GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT
663522cb 6938 || (inner_mask & ~GET_MODE_MASK (op_mode)) != 0)
d0ab8cd3
RK
6939 op_mode = GET_MODE (x);
6940
402b6c2a 6941 inner = force_to_mode (inner, op_mode, inner_mask, reg, next_select);
6139ff20
RK
6942
6943 if (GET_MODE (x) != op_mode || inner != XEXP (x, 0))
6944 x = gen_binary (LSHIFTRT, op_mode, inner, XEXP (x, 1));
d0ab8cd3 6945 }
6139ff20
RK
6946
6947 /* If we have (and (lshiftrt FOO C1) C2) where the combination of the
6948 shift and AND produces only copies of the sign bit (C2 is one less
6949 than a power of two), we can do this with just a shift. */
6950
6951 if (GET_CODE (x) == LSHIFTRT
6952 && GET_CODE (XEXP (x, 1)) == CONST_INT
cfff35c1
JW
6953 /* The shift puts one of the sign bit copies in the least significant
6954 bit. */
6139ff20
RK
6955 && ((INTVAL (XEXP (x, 1))
6956 + num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0))))
6957 >= GET_MODE_BITSIZE (GET_MODE (x)))
6958 && exact_log2 (mask + 1) >= 0
cfff35c1
JW
6959 /* Number of bits left after the shift must be more than the mask
6960 needs. */
6961 && ((INTVAL (XEXP (x, 1)) + exact_log2 (mask + 1))
6962 <= GET_MODE_BITSIZE (GET_MODE (x)))
6963 /* Must be more sign bit copies than the mask needs. */
770ae6cc 6964 && ((int) num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
6139ff20
RK
6965 >= exact_log2 (mask + 1)))
6966 x = gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0),
6967 GEN_INT (GET_MODE_BITSIZE (GET_MODE (x))
6968 - exact_log2 (mask + 1)));
fae2db47
JW
6969
6970 goto shiftrt;
d0ab8cd3
RK
6971
6972 case ASHIFTRT:
6139ff20
RK
6973 /* If we are just looking for the sign bit, we don't need this shift at
6974 all, even if it has a variable count. */
9bf22b75 6975 if (GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
e51712db 6976 && (mask == ((unsigned HOST_WIDE_INT) 1
9bf22b75 6977 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
e3d616e3 6978 return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
6139ff20
RK
6979
6980 /* If this is a shift by a constant, get a mask that contains those bits
6981 that are not copies of the sign bit. We then have two cases: If
6982 MASK only includes those bits, this can be a logical shift, which may
6983 allow simplifications. If MASK is a single-bit field not within
6984 those bits, we are requesting a copy of the sign bit and hence can
6985 shift the sign bit to the appropriate location. */
6986
6987 if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) >= 0
6988 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
6989 {
6990 int i = -1;
6991
b69960ac
RK
6992 /* If the considered data is wider then HOST_WIDE_INT, we can't
6993 represent a mask for all its bits in a single scalar.
6994 But we only care about the lower bits, so calculate these. */
6995
6a11342f 6996 if (GET_MODE_BITSIZE (GET_MODE (x)) > HOST_BITS_PER_WIDE_INT)
b69960ac 6997 {
663522cb 6998 nonzero = ~(HOST_WIDE_INT) 0;
b69960ac
RK
6999
7000 /* GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1))
7001 is the number of bits a full-width mask would have set.
7002 We need only shift if these are fewer than nonzero can
7003 hold. If not, we must keep all bits set in nonzero. */
7004
7005 if (GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1))
7006 < HOST_BITS_PER_WIDE_INT)
7007 nonzero >>= INTVAL (XEXP (x, 1))
7008 + HOST_BITS_PER_WIDE_INT
7009 - GET_MODE_BITSIZE (GET_MODE (x)) ;
7010 }
7011 else
7012 {
7013 nonzero = GET_MODE_MASK (GET_MODE (x));
7014 nonzero >>= INTVAL (XEXP (x, 1));
7015 }
6139ff20 7016
663522cb 7017 if ((mask & ~nonzero) == 0
6139ff20
RK
7018 || (i = exact_log2 (mask)) >= 0)
7019 {
7020 x = simplify_shift_const
7021 (x, LSHIFTRT, GET_MODE (x), XEXP (x, 0),
7022 i < 0 ? INTVAL (XEXP (x, 1))
7023 : GET_MODE_BITSIZE (GET_MODE (x)) - 1 - i);
7024
7025 if (GET_CODE (x) != ASHIFTRT)
e3d616e3 7026 return force_to_mode (x, mode, mask, reg, next_select);
6139ff20
RK
7027 }
7028 }
7029
7030 /* If MASK is 1, convert this to a LSHIFTRT. This can be done
7031 even if the shift count isn't a constant. */
7032 if (mask == 1)
7033 x = gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0), XEXP (x, 1));
7034
fae2db47
JW
7035 shiftrt:
7036
7037 /* If this is a zero- or sign-extension operation that just affects bits
4c002f29
RK
7038 we don't care about, remove it. Be sure the call above returned
7039 something that is still a shift. */
d0ab8cd3 7040
4c002f29
RK
7041 if ((GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ASHIFTRT)
7042 && GET_CODE (XEXP (x, 1)) == CONST_INT
d0ab8cd3 7043 && INTVAL (XEXP (x, 1)) >= 0
6139ff20
RK
7044 && (INTVAL (XEXP (x, 1))
7045 <= GET_MODE_BITSIZE (GET_MODE (x)) - (floor_log2 (mask) + 1))
d0ab8cd3
RK
7046 && GET_CODE (XEXP (x, 0)) == ASHIFT
7047 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
7048 && INTVAL (XEXP (XEXP (x, 0), 1)) == INTVAL (XEXP (x, 1)))
e3d616e3
RK
7049 return force_to_mode (XEXP (XEXP (x, 0), 0), mode, mask,
7050 reg, next_select);
6139ff20 7051
dfbe1b2f
RK
7052 break;
7053
6139ff20
RK
7054 case ROTATE:
7055 case ROTATERT:
7056 /* If the shift count is constant and we can do computations
7057 in the mode of X, compute where the bits we care about are.
7058 Otherwise, we can't do anything. Don't change the mode of
7059 the shift or propagate MODE into the shift, though. */
7060 if (GET_CODE (XEXP (x, 1)) == CONST_INT
7061 && INTVAL (XEXP (x, 1)) >= 0)
7062 {
7063 temp = simplify_binary_operation (code == ROTATE ? ROTATERT : ROTATE,
7064 GET_MODE (x), GEN_INT (mask),
7065 XEXP (x, 1));
7d171a1e 7066 if (temp && GET_CODE(temp) == CONST_INT)
6139ff20
RK
7067 SUBST (XEXP (x, 0),
7068 force_to_mode (XEXP (x, 0), GET_MODE (x),
e3d616e3 7069 INTVAL (temp), reg, next_select));
6139ff20
RK
7070 }
7071 break;
663522cb 7072
dfbe1b2f 7073 case NEG:
180b8e4b
RK
7074 /* If we just want the low-order bit, the NEG isn't needed since it
7075 won't change the low-order bit. */
7076 if (mask == 1)
7077 return force_to_mode (XEXP (x, 0), mode, mask, reg, just_select);
7078
6139ff20
RK
7079 /* We need any bits less significant than the most significant bit in
7080 MASK since carries from those bits will affect the bits we are
7081 interested in. */
7082 mask = fuller_mask;
7083 goto unop;
7084
dfbe1b2f 7085 case NOT:
6139ff20
RK
7086 /* (not FOO) is (xor FOO CONST), so if FOO is an LSHIFTRT, we can do the
7087 same as the XOR case above. Ensure that the constant we form is not
7088 wider than the mode of X. */
7089
7090 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
7091 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
7092 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
7093 && (INTVAL (XEXP (XEXP (x, 0), 1)) + floor_log2 (mask)
7094 < GET_MODE_BITSIZE (GET_MODE (x)))
7095 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT)
7096 {
7097 temp = GEN_INT (mask << INTVAL (XEXP (XEXP (x, 0), 1)));
7098 temp = gen_binary (XOR, GET_MODE (x), XEXP (XEXP (x, 0), 0), temp);
7099 x = gen_binary (LSHIFTRT, GET_MODE (x), temp, XEXP (XEXP (x, 0), 1));
7100
e3d616e3 7101 return force_to_mode (x, mode, mask, reg, next_select);
6139ff20
RK
7102 }
7103
f82da7d2
JW
7104 /* (and (not FOO) CONST) is (not (or FOO (not CONST))), so we must
7105 use the full mask inside the NOT. */
7106 mask = fuller_mask;
7107
6139ff20 7108 unop:
e3d616e3
RK
7109 op0 = gen_lowpart_for_combine (op_mode,
7110 force_to_mode (XEXP (x, 0), mode, mask,
7111 reg, next_select));
6139ff20 7112 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
f1c6ba8b 7113 x = simplify_gen_unary (code, op_mode, op0, op_mode);
6139ff20
RK
7114 break;
7115
7116 case NE:
7117 /* (and (ne FOO 0) CONST) can be (and FOO CONST) if CONST is included
3aceff0d 7118 in STORE_FLAG_VALUE and FOO has a single bit that might be nonzero,
1a6ec070 7119 which is equal to STORE_FLAG_VALUE. */
663522cb 7120 if ((mask & ~STORE_FLAG_VALUE) == 0 && XEXP (x, 1) == const0_rtx
3aceff0d 7121 && exact_log2 (nonzero_bits (XEXP (x, 0), mode)) >= 0
1a6ec070 7122 && nonzero_bits (XEXP (x, 0), mode) == STORE_FLAG_VALUE)
e3d616e3 7123 return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
6139ff20 7124
d0ab8cd3
RK
7125 break;
7126
7127 case IF_THEN_ELSE:
7128 /* We have no way of knowing if the IF_THEN_ELSE can itself be
7129 written in a narrower mode. We play it safe and do not do so. */
7130
7131 SUBST (XEXP (x, 1),
7132 gen_lowpart_for_combine (GET_MODE (x),
7133 force_to_mode (XEXP (x, 1), mode,
e3d616e3 7134 mask, reg, next_select)));
d0ab8cd3
RK
7135 SUBST (XEXP (x, 2),
7136 gen_lowpart_for_combine (GET_MODE (x),
7137 force_to_mode (XEXP (x, 2), mode,
e3d616e3 7138 mask, reg,next_select)));
d0ab8cd3 7139 break;
663522cb 7140
e9a25f70
JL
7141 default:
7142 break;
dfbe1b2f
RK
7143 }
7144
d0ab8cd3 7145 /* Ensure we return a value of the proper mode. */
dfbe1b2f
RK
7146 return gen_lowpart_for_combine (mode, x);
7147}
7148\f
abe6e52f
RK
7149/* Return nonzero if X is an expression that has one of two values depending on
7150 whether some other value is zero or nonzero. In that case, we return the
7151 value that is being tested, *PTRUE is set to the value if the rtx being
7152 returned has a nonzero value, and *PFALSE is set to the other alternative.
7153
7154 If we return zero, we set *PTRUE and *PFALSE to X. */
7155
7156static rtx
7157if_then_else_cond (x, ptrue, pfalse)
7158 rtx x;
7159 rtx *ptrue, *pfalse;
7160{
7161 enum machine_mode mode = GET_MODE (x);
7162 enum rtx_code code = GET_CODE (x);
abe6e52f
RK
7163 rtx cond0, cond1, true0, true1, false0, false1;
7164 unsigned HOST_WIDE_INT nz;
7165
14a774a9
RK
7166 /* If we are comparing a value against zero, we are done. */
7167 if ((code == NE || code == EQ)
7168 && GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) == 0)
7169 {
e8758a3a
JL
7170 *ptrue = (code == NE) ? const_true_rtx : const0_rtx;
7171 *pfalse = (code == NE) ? const0_rtx : const_true_rtx;
14a774a9
RK
7172 return XEXP (x, 0);
7173 }
7174
abe6e52f
RK
7175 /* If this is a unary operation whose operand has one of two values, apply
7176 our opcode to compute those values. */
14a774a9
RK
7177 else if (GET_RTX_CLASS (code) == '1'
7178 && (cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0)) != 0)
abe6e52f 7179 {
f1c6ba8b
RK
7180 *ptrue = simplify_gen_unary (code, mode, true0, GET_MODE (XEXP (x, 0)));
7181 *pfalse = simplify_gen_unary (code, mode, false0,
7182 GET_MODE (XEXP (x, 0)));
abe6e52f
RK
7183 return cond0;
7184 }
7185
3a19aabc 7186 /* If this is a COMPARE, do nothing, since the IF_THEN_ELSE we would
ddd5a7c1 7187 make can't possibly match and would suppress other optimizations. */
3a19aabc
RK
7188 else if (code == COMPARE)
7189 ;
7190
abe6e52f
RK
7191 /* If this is a binary operation, see if either side has only one of two
7192 values. If either one does or if both do and they are conditional on
7193 the same value, compute the new true and false values. */
7194 else if (GET_RTX_CLASS (code) == 'c' || GET_RTX_CLASS (code) == '2'
7195 || GET_RTX_CLASS (code) == '<')
7196 {
7197 cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0);
7198 cond1 = if_then_else_cond (XEXP (x, 1), &true1, &false1);
7199
7200 if ((cond0 != 0 || cond1 != 0)
7201 && ! (cond0 != 0 && cond1 != 0 && ! rtx_equal_p (cond0, cond1)))
7202 {
987e845a
JW
7203 /* If if_then_else_cond returned zero, then true/false are the
7204 same rtl. We must copy one of them to prevent invalid rtl
7205 sharing. */
7206 if (cond0 == 0)
7207 true0 = copy_rtx (true0);
7208 else if (cond1 == 0)
7209 true1 = copy_rtx (true1);
7210
abe6e52f
RK
7211 *ptrue = gen_binary (code, mode, true0, true1);
7212 *pfalse = gen_binary (code, mode, false0, false1);
7213 return cond0 ? cond0 : cond1;
7214 }
9210df58 7215
9210df58 7216 /* See if we have PLUS, IOR, XOR, MINUS or UMAX, where one of the
0802d516
RK
7217 operands is zero when the other is non-zero, and vice-versa,
7218 and STORE_FLAG_VALUE is 1 or -1. */
9210df58 7219
0802d516
RK
7220 if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
7221 && (code == PLUS || code == IOR || code == XOR || code == MINUS
663522cb 7222 || code == UMAX)
9210df58
RK
7223 && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
7224 {
7225 rtx op0 = XEXP (XEXP (x, 0), 1);
7226 rtx op1 = XEXP (XEXP (x, 1), 1);
7227
7228 cond0 = XEXP (XEXP (x, 0), 0);
7229 cond1 = XEXP (XEXP (x, 1), 0);
7230
7231 if (GET_RTX_CLASS (GET_CODE (cond0)) == '<'
7232 && GET_RTX_CLASS (GET_CODE (cond1)) == '<'
9a915772 7233 && ((GET_CODE (cond0) == combine_reversed_comparison_code (cond1)
9210df58
RK
7234 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
7235 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
7236 || ((swap_condition (GET_CODE (cond0))
9a915772 7237 == combine_reversed_comparison_code (cond1))
9210df58
RK
7238 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
7239 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
7240 && ! side_effects_p (x))
7241 {
7242 *ptrue = gen_binary (MULT, mode, op0, const_true_rtx);
663522cb
KH
7243 *pfalse = gen_binary (MULT, mode,
7244 (code == MINUS
f1c6ba8b
RK
7245 ? simplify_gen_unary (NEG, mode, op1,
7246 mode)
7247 : op1),
9210df58
RK
7248 const_true_rtx);
7249 return cond0;
7250 }
7251 }
7252
7253 /* Similarly for MULT, AND and UMIN, execpt that for these the result
7254 is always zero. */
0802d516
RK
7255 if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
7256 && (code == MULT || code == AND || code == UMIN)
9210df58
RK
7257 && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
7258 {
7259 cond0 = XEXP (XEXP (x, 0), 0);
7260 cond1 = XEXP (XEXP (x, 1), 0);
7261
7262 if (GET_RTX_CLASS (GET_CODE (cond0)) == '<'
7263 && GET_RTX_CLASS (GET_CODE (cond1)) == '<'
9a915772 7264 && ((GET_CODE (cond0) == combine_reversed_comparison_code (cond1)
9210df58
RK
7265 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
7266 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
7267 || ((swap_condition (GET_CODE (cond0))
9a915772 7268 == combine_reversed_comparison_code (cond1))
9210df58
RK
7269 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
7270 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
7271 && ! side_effects_p (x))
7272 {
7273 *ptrue = *pfalse = const0_rtx;
7274 return cond0;
7275 }
7276 }
abe6e52f
RK
7277 }
7278
7279 else if (code == IF_THEN_ELSE)
7280 {
7281 /* If we have IF_THEN_ELSE already, extract the condition and
7282 canonicalize it if it is NE or EQ. */
7283 cond0 = XEXP (x, 0);
7284 *ptrue = XEXP (x, 1), *pfalse = XEXP (x, 2);
7285 if (GET_CODE (cond0) == NE && XEXP (cond0, 1) == const0_rtx)
7286 return XEXP (cond0, 0);
7287 else if (GET_CODE (cond0) == EQ && XEXP (cond0, 1) == const0_rtx)
7288 {
7289 *ptrue = XEXP (x, 2), *pfalse = XEXP (x, 1);
7290 return XEXP (cond0, 0);
7291 }
7292 else
7293 return cond0;
7294 }
7295
0631e0bf
JH
7296 /* If X is a SUBREG, we can narrow both the true and false values
7297 if the inner expression, if there is a condition. */
7298 else if (code == SUBREG
abe6e52f
RK
7299 && 0 != (cond0 = if_then_else_cond (SUBREG_REG (x),
7300 &true0, &false0)))
7301 {
0631e0bf
JH
7302 *ptrue = simplify_gen_subreg (mode, true0,
7303 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
7304 *pfalse = simplify_gen_subreg (mode, false0,
7305 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
abe6e52f 7306
abe6e52f
RK
7307 return cond0;
7308 }
7309
7310 /* If X is a constant, this isn't special and will cause confusions
7311 if we treat it as such. Likewise if it is equivalent to a constant. */
7312 else if (CONSTANT_P (x)
7313 || ((cond0 = get_last_value (x)) != 0 && CONSTANT_P (cond0)))
7314 ;
7315
1f3f36d1
RH
7316 /* If we're in BImode, canonicalize on 0 and STORE_FLAG_VALUE, as that
7317 will be least confusing to the rest of the compiler. */
7318 else if (mode == BImode)
7319 {
7320 *ptrue = GEN_INT (STORE_FLAG_VALUE), *pfalse = const0_rtx;
7321 return x;
7322 }
7323
663522cb 7324 /* If X is known to be either 0 or -1, those are the true and
abe6e52f 7325 false values when testing X. */
49219895
JH
7326 else if (x == constm1_rtx || x == const0_rtx
7327 || (mode != VOIDmode
7328 && num_sign_bit_copies (x, mode) == GET_MODE_BITSIZE (mode)))
abe6e52f
RK
7329 {
7330 *ptrue = constm1_rtx, *pfalse = const0_rtx;
7331 return x;
7332 }
7333
7334 /* Likewise for 0 or a single bit. */
49219895
JH
7335 else if (mode != VOIDmode
7336 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7337 && exact_log2 (nz = nonzero_bits (x, mode)) >= 0)
abe6e52f
RK
7338 {
7339 *ptrue = GEN_INT (nz), *pfalse = const0_rtx;
7340 return x;
7341 }
7342
7343 /* Otherwise fail; show no condition with true and false values the same. */
7344 *ptrue = *pfalse = x;
7345 return 0;
7346}
7347\f
1a26b032
RK
7348/* Return the value of expression X given the fact that condition COND
7349 is known to be true when applied to REG as its first operand and VAL
7350 as its second. X is known to not be shared and so can be modified in
7351 place.
7352
7353 We only handle the simplest cases, and specifically those cases that
7354 arise with IF_THEN_ELSE expressions. */
7355
7356static rtx
7357known_cond (x, cond, reg, val)
7358 rtx x;
7359 enum rtx_code cond;
7360 rtx reg, val;
7361{
7362 enum rtx_code code = GET_CODE (x);
f24ad0e4 7363 rtx temp;
6f7d635c 7364 const char *fmt;
1a26b032
RK
7365 int i, j;
7366
7367 if (side_effects_p (x))
7368 return x;
7369
69bc0a1f
JH
7370 if (cond == EQ && rtx_equal_p (x, reg) && !FLOAT_MODE_P (cond))
7371 return val;
7372 if (cond == UNEQ && rtx_equal_p (x, reg))
1a26b032
RK
7373 return val;
7374
7375 /* If X is (abs REG) and we know something about REG's relationship
7376 with zero, we may be able to simplify this. */
7377
7378 if (code == ABS && rtx_equal_p (XEXP (x, 0), reg) && val == const0_rtx)
7379 switch (cond)
7380 {
7381 case GE: case GT: case EQ:
7382 return XEXP (x, 0);
7383 case LT: case LE:
f1c6ba8b
RK
7384 return simplify_gen_unary (NEG, GET_MODE (XEXP (x, 0)),
7385 XEXP (x, 0),
7386 GET_MODE (XEXP (x, 0)));
e9a25f70
JL
7387 default:
7388 break;
1a26b032
RK
7389 }
7390
7391 /* The only other cases we handle are MIN, MAX, and comparisons if the
7392 operands are the same as REG and VAL. */
7393
7394 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
7395 {
7396 if (rtx_equal_p (XEXP (x, 0), val))
7397 cond = swap_condition (cond), temp = val, val = reg, reg = temp;
7398
7399 if (rtx_equal_p (XEXP (x, 0), reg) && rtx_equal_p (XEXP (x, 1), val))
7400 {
7401 if (GET_RTX_CLASS (code) == '<')
1eb8759b
RH
7402 {
7403 if (comparison_dominates_p (cond, code))
7404 return const_true_rtx;
1a26b032 7405
9a915772 7406 code = combine_reversed_comparison_code (x);
1eb8759b
RH
7407 if (code != UNKNOWN
7408 && comparison_dominates_p (cond, code))
7409 return const0_rtx;
7410 else
7411 return x;
7412 }
1a26b032
RK
7413 else if (code == SMAX || code == SMIN
7414 || code == UMIN || code == UMAX)
7415 {
7416 int unsignedp = (code == UMIN || code == UMAX);
7417
ac4cdf40
JE
7418 /* Do not reverse the condition when it is NE or EQ.
7419 This is because we cannot conclude anything about
7420 the value of 'SMAX (x, y)' when x is not equal to y,
7421 but we can when x equals y. */
7422 if ((code == SMAX || code == UMAX)
7423 && ! (cond == EQ || cond == NE))
1a26b032
RK
7424 cond = reverse_condition (cond);
7425
7426 switch (cond)
7427 {
7428 case GE: case GT:
7429 return unsignedp ? x : XEXP (x, 1);
7430 case LE: case LT:
7431 return unsignedp ? x : XEXP (x, 0);
7432 case GEU: case GTU:
7433 return unsignedp ? XEXP (x, 1) : x;
7434 case LEU: case LTU:
7435 return unsignedp ? XEXP (x, 0) : x;
e9a25f70
JL
7436 default:
7437 break;
1a26b032
RK
7438 }
7439 }
7440 }
7441 }
7442
7443 fmt = GET_RTX_FORMAT (code);
7444 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7445 {
7446 if (fmt[i] == 'e')
7447 SUBST (XEXP (x, i), known_cond (XEXP (x, i), cond, reg, val));
7448 else if (fmt[i] == 'E')
7449 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7450 SUBST (XVECEXP (x, i, j), known_cond (XVECEXP (x, i, j),
7451 cond, reg, val));
7452 }
7453
7454 return x;
7455}
7456\f
e11fa86f
RK
7457/* See if X and Y are equal for the purposes of seeing if we can rewrite an
7458 assignment as a field assignment. */
7459
7460static int
7461rtx_equal_for_field_assignment_p (x, y)
7462 rtx x;
7463 rtx y;
7464{
e11fa86f
RK
7465 if (x == y || rtx_equal_p (x, y))
7466 return 1;
7467
7468 if (x == 0 || y == 0 || GET_MODE (x) != GET_MODE (y))
7469 return 0;
7470
7471 /* Check for a paradoxical SUBREG of a MEM compared with the MEM.
7472 Note that all SUBREGs of MEM are paradoxical; otherwise they
7473 would have been rewritten. */
7474 if (GET_CODE (x) == MEM && GET_CODE (y) == SUBREG
7475 && GET_CODE (SUBREG_REG (y)) == MEM
7476 && rtx_equal_p (SUBREG_REG (y),
7477 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (y)), x)))
7478 return 1;
7479
7480 if (GET_CODE (y) == MEM && GET_CODE (x) == SUBREG
7481 && GET_CODE (SUBREG_REG (x)) == MEM
7482 && rtx_equal_p (SUBREG_REG (x),
7483 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (x)), y)))
7484 return 1;
7485
9ec36da5
JL
7486 /* We used to see if get_last_value of X and Y were the same but that's
7487 not correct. In one direction, we'll cause the assignment to have
7488 the wrong destination and in the case, we'll import a register into this
7489 insn that might have already have been dead. So fail if none of the
7490 above cases are true. */
7491 return 0;
e11fa86f
RK
7492}
7493\f
230d793d
RS
7494/* See if X, a SET operation, can be rewritten as a bit-field assignment.
7495 Return that assignment if so.
7496
7497 We only handle the most common cases. */
7498
7499static rtx
7500make_field_assignment (x)
7501 rtx x;
7502{
7503 rtx dest = SET_DEST (x);
7504 rtx src = SET_SRC (x);
dfbe1b2f 7505 rtx assign;
e11fa86f 7506 rtx rhs, lhs;
5f4f0e22 7507 HOST_WIDE_INT c1;
770ae6cc
RK
7508 HOST_WIDE_INT pos;
7509 unsigned HOST_WIDE_INT len;
dfbe1b2f
RK
7510 rtx other;
7511 enum machine_mode mode;
230d793d
RS
7512
7513 /* If SRC was (and (not (ashift (const_int 1) POS)) DEST), this is
7514 a clear of a one-bit field. We will have changed it to
7515 (and (rotate (const_int -2) POS) DEST), so check for that. Also check
7516 for a SUBREG. */
7517
7518 if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == ROTATE
7519 && GET_CODE (XEXP (XEXP (src, 0), 0)) == CONST_INT
7520 && INTVAL (XEXP (XEXP (src, 0), 0)) == -2
e11fa86f 7521 && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
230d793d 7522 {
8999a12e 7523 assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
230d793d 7524 1, 1, 1, 0);
76184def 7525 if (assign != 0)
38a448ca 7526 return gen_rtx_SET (VOIDmode, assign, const0_rtx);
76184def 7527 return x;
230d793d
RS
7528 }
7529
7530 else if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == SUBREG
7531 && subreg_lowpart_p (XEXP (src, 0))
663522cb 7532 && (GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
230d793d
RS
7533 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (src, 0)))))
7534 && GET_CODE (SUBREG_REG (XEXP (src, 0))) == ROTATE
7535 && INTVAL (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == -2
e11fa86f 7536 && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
230d793d 7537 {
8999a12e 7538 assign = make_extraction (VOIDmode, dest, 0,
230d793d
RS
7539 XEXP (SUBREG_REG (XEXP (src, 0)), 1),
7540 1, 1, 1, 0);
76184def 7541 if (assign != 0)
38a448ca 7542 return gen_rtx_SET (VOIDmode, assign, const0_rtx);
76184def 7543 return x;
230d793d
RS
7544 }
7545
9dd11dcb 7546 /* If SRC is (ior (ashift (const_int 1) POS) DEST), this is a set of a
230d793d
RS
7547 one-bit field. */
7548 else if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == ASHIFT
7549 && XEXP (XEXP (src, 0), 0) == const1_rtx
e11fa86f 7550 && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
230d793d 7551 {
8999a12e 7552 assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
230d793d 7553 1, 1, 1, 0);
76184def 7554 if (assign != 0)
38a448ca 7555 return gen_rtx_SET (VOIDmode, assign, const1_rtx);
76184def 7556 return x;
230d793d
RS
7557 }
7558
dfbe1b2f 7559 /* The other case we handle is assignments into a constant-position
9dd11dcb 7560 field. They look like (ior/xor (and DEST C1) OTHER). If C1 represents
dfbe1b2f
RK
7561 a mask that has all one bits except for a group of zero bits and
7562 OTHER is known to have zeros where C1 has ones, this is such an
7563 assignment. Compute the position and length from C1. Shift OTHER
7564 to the appropriate position, force it to the required mode, and
7565 make the extraction. Check for the AND in both operands. */
7566
9dd11dcb 7567 if (GET_CODE (src) != IOR && GET_CODE (src) != XOR)
e11fa86f
RK
7568 return x;
7569
7570 rhs = expand_compound_operation (XEXP (src, 0));
7571 lhs = expand_compound_operation (XEXP (src, 1));
7572
7573 if (GET_CODE (rhs) == AND
7574 && GET_CODE (XEXP (rhs, 1)) == CONST_INT
7575 && rtx_equal_for_field_assignment_p (XEXP (rhs, 0), dest))
7576 c1 = INTVAL (XEXP (rhs, 1)), other = lhs;
7577 else if (GET_CODE (lhs) == AND
7578 && GET_CODE (XEXP (lhs, 1)) == CONST_INT
7579 && rtx_equal_for_field_assignment_p (XEXP (lhs, 0), dest))
7580 c1 = INTVAL (XEXP (lhs, 1)), other = rhs;
dfbe1b2f
RK
7581 else
7582 return x;
230d793d 7583
663522cb 7584 pos = get_pos_from_mask ((~c1) & GET_MODE_MASK (GET_MODE (dest)), &len);
dfbe1b2f 7585 if (pos < 0 || pos + len > GET_MODE_BITSIZE (GET_MODE (dest))
e5e809f4
JL
7586 || GET_MODE_BITSIZE (GET_MODE (dest)) > HOST_BITS_PER_WIDE_INT
7587 || (c1 & nonzero_bits (other, GET_MODE (dest))) != 0)
dfbe1b2f 7588 return x;
230d793d 7589
5f4f0e22 7590 assign = make_extraction (VOIDmode, dest, pos, NULL_RTX, len, 1, 1, 0);
76184def
DE
7591 if (assign == 0)
7592 return x;
230d793d 7593
dfbe1b2f
RK
7594 /* The mode to use for the source is the mode of the assignment, or of
7595 what is inside a possible STRICT_LOW_PART. */
663522cb 7596 mode = (GET_CODE (assign) == STRICT_LOW_PART
dfbe1b2f 7597 ? GET_MODE (XEXP (assign, 0)) : GET_MODE (assign));
230d793d 7598
dfbe1b2f
RK
7599 /* Shift OTHER right POS places and make it the source, restricting it
7600 to the proper length and mode. */
230d793d 7601
5f4f0e22
CH
7602 src = force_to_mode (simplify_shift_const (NULL_RTX, LSHIFTRT,
7603 GET_MODE (src), other, pos),
6139ff20
RK
7604 mode,
7605 GET_MODE_BITSIZE (mode) >= HOST_BITS_PER_WIDE_INT
0345195a 7606 ? ~(unsigned HOST_WIDE_INT) 0
729a2125 7607 : ((unsigned HOST_WIDE_INT) 1 << len) - 1,
e3d616e3 7608 dest, 0);
230d793d 7609
f1c6ba8b 7610 return gen_rtx_SET (VOIDmode, assign, src);
230d793d
RS
7611}
7612\f
7613/* See if X is of the form (+ (* a c) (* b c)) and convert to (* (+ a b) c)
7614 if so. */
7615
7616static rtx
7617apply_distributive_law (x)
7618 rtx x;
7619{
7620 enum rtx_code code = GET_CODE (x);
7621 rtx lhs, rhs, other;
7622 rtx tem;
7623 enum rtx_code inner_code;
7624
d8a8a4da
RS
7625 /* Distributivity is not true for floating point.
7626 It can change the value. So don't do it.
7627 -- rms and moshier@world.std.com. */
3ad2180a 7628 if (FLOAT_MODE_P (GET_MODE (x)))
d8a8a4da
RS
7629 return x;
7630
230d793d
RS
7631 /* The outer operation can only be one of the following: */
7632 if (code != IOR && code != AND && code != XOR
7633 && code != PLUS && code != MINUS)
7634 return x;
7635
7636 lhs = XEXP (x, 0), rhs = XEXP (x, 1);
7637
0f41302f
MS
7638 /* If either operand is a primitive we can't do anything, so get out
7639 fast. */
230d793d 7640 if (GET_RTX_CLASS (GET_CODE (lhs)) == 'o'
dfbe1b2f 7641 || GET_RTX_CLASS (GET_CODE (rhs)) == 'o')
230d793d
RS
7642 return x;
7643
7644 lhs = expand_compound_operation (lhs);
7645 rhs = expand_compound_operation (rhs);
7646 inner_code = GET_CODE (lhs);
7647 if (inner_code != GET_CODE (rhs))
7648 return x;
7649
7650 /* See if the inner and outer operations distribute. */
7651 switch (inner_code)
7652 {
7653 case LSHIFTRT:
7654 case ASHIFTRT:
7655 case AND:
7656 case IOR:
7657 /* These all distribute except over PLUS. */
7658 if (code == PLUS || code == MINUS)
7659 return x;
7660 break;
7661
7662 case MULT:
7663 if (code != PLUS && code != MINUS)
7664 return x;
7665 break;
7666
7667 case ASHIFT:
45620ed4 7668 /* This is also a multiply, so it distributes over everything. */
230d793d
RS
7669 break;
7670
7671 case SUBREG:
dfbe1b2f 7672 /* Non-paradoxical SUBREGs distributes over all operations, provided
ddef6bc7 7673 the inner modes and byte offsets are the same, this is an extraction
2b4bd1bc
JW
7674 of a low-order part, we don't convert an fp operation to int or
7675 vice versa, and we would not be converting a single-word
dfbe1b2f 7676 operation into a multi-word operation. The latter test is not
2b4bd1bc 7677 required, but it prevents generating unneeded multi-word operations.
dfbe1b2f
RK
7678 Some of the previous tests are redundant given the latter test, but
7679 are retained because they are required for correctness.
7680
7681 We produce the result slightly differently in this case. */
7682
7683 if (GET_MODE (SUBREG_REG (lhs)) != GET_MODE (SUBREG_REG (rhs))
ddef6bc7 7684 || SUBREG_BYTE (lhs) != SUBREG_BYTE (rhs)
dfbe1b2f 7685 || ! subreg_lowpart_p (lhs)
2b4bd1bc
JW
7686 || (GET_MODE_CLASS (GET_MODE (lhs))
7687 != GET_MODE_CLASS (GET_MODE (SUBREG_REG (lhs))))
dfbe1b2f 7688 || (GET_MODE_SIZE (GET_MODE (lhs))
8af24e26 7689 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))))
dfbe1b2f 7690 || GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))) > UNITS_PER_WORD)
230d793d
RS
7691 return x;
7692
7693 tem = gen_binary (code, GET_MODE (SUBREG_REG (lhs)),
7694 SUBREG_REG (lhs), SUBREG_REG (rhs));
7695 return gen_lowpart_for_combine (GET_MODE (x), tem);
7696
7697 default:
7698 return x;
7699 }
7700
7701 /* Set LHS and RHS to the inner operands (A and B in the example
7702 above) and set OTHER to the common operand (C in the example).
7703 These is only one way to do this unless the inner operation is
7704 commutative. */
7705 if (GET_RTX_CLASS (inner_code) == 'c'
7706 && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 0)))
7707 other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 1);
7708 else if (GET_RTX_CLASS (inner_code) == 'c'
7709 && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 1)))
7710 other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 0);
7711 else if (GET_RTX_CLASS (inner_code) == 'c'
7712 && rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 0)))
7713 other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 1);
7714 else if (rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 1)))
7715 other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 0);
7716 else
7717 return x;
7718
7719 /* Form the new inner operation, seeing if it simplifies first. */
7720 tem = gen_binary (code, GET_MODE (x), lhs, rhs);
7721
7722 /* There is one exception to the general way of distributing:
7723 (a ^ b) | (a ^ c) -> (~a) & (b ^ c) */
7724 if (code == XOR && inner_code == IOR)
7725 {
7726 inner_code = AND;
f1c6ba8b 7727 other = simplify_gen_unary (NOT, GET_MODE (x), other, GET_MODE (x));
230d793d
RS
7728 }
7729
7730 /* We may be able to continuing distributing the result, so call
7731 ourselves recursively on the inner operation before forming the
7732 outer operation, which we return. */
7733 return gen_binary (inner_code, GET_MODE (x),
7734 apply_distributive_law (tem), other);
7735}
7736\f
7737/* We have X, a logical `and' of VAROP with the constant CONSTOP, to be done
7738 in MODE.
7739
7740 Return an equivalent form, if different from X. Otherwise, return X. If
7741 X is zero, we are to always construct the equivalent form. */
7742
7743static rtx
7744simplify_and_const_int (x, mode, varop, constop)
7745 rtx x;
7746 enum machine_mode mode;
7747 rtx varop;
5f4f0e22 7748 unsigned HOST_WIDE_INT constop;
230d793d 7749{
951553af 7750 unsigned HOST_WIDE_INT nonzero;
42301240 7751 int i;
230d793d 7752
6139ff20
RK
7753 /* Simplify VAROP knowing that we will be only looking at some of the
7754 bits in it. */
e3d616e3 7755 varop = force_to_mode (varop, mode, constop, NULL_RTX, 0);
230d793d 7756
6139ff20
RK
7757 /* If VAROP is a CLOBBER, we will fail so return it; if it is a
7758 CONST_INT, we are done. */
7759 if (GET_CODE (varop) == CLOBBER || GET_CODE (varop) == CONST_INT)
7760 return varop;
230d793d 7761
fc06d7aa
RK
7762 /* See what bits may be nonzero in VAROP. Unlike the general case of
7763 a call to nonzero_bits, here we don't care about bits outside
7764 MODE. */
7765
7766 nonzero = nonzero_bits (varop, mode) & GET_MODE_MASK (mode);
7e4ce834 7767 nonzero = trunc_int_for_mode (nonzero, mode);
9fa6d012 7768
230d793d 7769 /* Turn off all bits in the constant that are known to already be zero.
951553af 7770 Thus, if the AND isn't needed at all, we will have CONSTOP == NONZERO_BITS
230d793d
RS
7771 which is tested below. */
7772
951553af 7773 constop &= nonzero;
230d793d
RS
7774
7775 /* If we don't have any bits left, return zero. */
7776 if (constop == 0)
7777 return const0_rtx;
7778
42301240
RK
7779 /* If VAROP is a NEG of something known to be zero or 1 and CONSTOP is
7780 a power of two, we can replace this with a ASHIFT. */
7781 if (GET_CODE (varop) == NEG && nonzero_bits (XEXP (varop, 0), mode) == 1
7782 && (i = exact_log2 (constop)) >= 0)
7783 return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (varop, 0), i);
663522cb 7784
6139ff20
RK
7785 /* If VAROP is an IOR or XOR, apply the AND to both branches of the IOR
7786 or XOR, then try to apply the distributive law. This may eliminate
7787 operations if either branch can be simplified because of the AND.
7788 It may also make some cases more complex, but those cases probably
7789 won't match a pattern either with or without this. */
7790
7791 if (GET_CODE (varop) == IOR || GET_CODE (varop) == XOR)
7792 return
7793 gen_lowpart_for_combine
7794 (mode,
7795 apply_distributive_law
7796 (gen_binary (GET_CODE (varop), GET_MODE (varop),
7797 simplify_and_const_int (NULL_RTX, GET_MODE (varop),
7798 XEXP (varop, 0), constop),
7799 simplify_and_const_int (NULL_RTX, GET_MODE (varop),
7800 XEXP (varop, 1), constop))));
7801
230d793d
RS
7802 /* Get VAROP in MODE. Try to get a SUBREG if not. Don't make a new SUBREG
7803 if we already had one (just check for the simplest cases). */
7804 if (x && GET_CODE (XEXP (x, 0)) == SUBREG
7805 && GET_MODE (XEXP (x, 0)) == mode
7806 && SUBREG_REG (XEXP (x, 0)) == varop)
7807 varop = XEXP (x, 0);
7808 else
7809 varop = gen_lowpart_for_combine (mode, varop);
7810
0f41302f 7811 /* If we can't make the SUBREG, try to return what we were given. */
230d793d
RS
7812 if (GET_CODE (varop) == CLOBBER)
7813 return x ? x : varop;
7814
7815 /* If we are only masking insignificant bits, return VAROP. */
951553af 7816 if (constop == nonzero)
230d793d
RS
7817 x = varop;
7818
7819 /* Otherwise, return an AND. See how much, if any, of X we can use. */
7820 else if (x == 0 || GET_CODE (x) != AND || GET_MODE (x) != mode)
6139ff20 7821 x = gen_binary (AND, mode, varop, GEN_INT (constop));
230d793d
RS
7822
7823 else
7824 {
7825 if (GET_CODE (XEXP (x, 1)) != CONST_INT
e51712db 7826 || (unsigned HOST_WIDE_INT) INTVAL (XEXP (x, 1)) != constop)
5f4f0e22 7827 SUBST (XEXP (x, 1), GEN_INT (constop));
230d793d
RS
7828
7829 SUBST (XEXP (x, 0), varop);
7830 }
7831
7832 return x;
7833}
7834\f
b3728b0e
JW
7835/* We let num_sign_bit_copies recur into nonzero_bits as that is useful.
7836 We don't let nonzero_bits recur into num_sign_bit_copies, because that
7837 is less useful. We can't allow both, because that results in exponential
956d6950 7838 run time recursion. There is a nullstone testcase that triggered
b3728b0e
JW
7839 this. This macro avoids accidental uses of num_sign_bit_copies. */
7840#define num_sign_bit_copies()
7841
230d793d
RS
7842/* Given an expression, X, compute which bits in X can be non-zero.
7843 We don't care about bits outside of those defined in MODE.
7844
7845 For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
7846 a shift, AND, or zero_extract, we can do better. */
7847
5f4f0e22 7848static unsigned HOST_WIDE_INT
951553af 7849nonzero_bits (x, mode)
230d793d
RS
7850 rtx x;
7851 enum machine_mode mode;
7852{
951553af
RK
7853 unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
7854 unsigned HOST_WIDE_INT inner_nz;
230d793d 7855 enum rtx_code code;
770ae6cc 7856 unsigned int mode_width = GET_MODE_BITSIZE (mode);
230d793d
RS
7857 rtx tem;
7858
1c75dfa4
RK
7859 /* For floating-point values, assume all bits are needed. */
7860 if (FLOAT_MODE_P (GET_MODE (x)) || FLOAT_MODE_P (mode))
7861 return nonzero;
7862
230d793d
RS
7863 /* If X is wider than MODE, use its mode instead. */
7864 if (GET_MODE_BITSIZE (GET_MODE (x)) > mode_width)
7865 {
7866 mode = GET_MODE (x);
951553af 7867 nonzero = GET_MODE_MASK (mode);
230d793d
RS
7868 mode_width = GET_MODE_BITSIZE (mode);
7869 }
7870
5f4f0e22 7871 if (mode_width > HOST_BITS_PER_WIDE_INT)
230d793d
RS
7872 /* Our only callers in this case look for single bit values. So
7873 just return the mode mask. Those tests will then be false. */
951553af 7874 return nonzero;
230d793d 7875
8baf60bb 7876#ifndef WORD_REGISTER_OPERATIONS
c6965c0f 7877 /* If MODE is wider than X, but both are a single word for both the host
663522cb 7878 and target machines, we can compute this from which bits of the
0840fd91
RK
7879 object might be nonzero in its own mode, taking into account the fact
7880 that on many CISC machines, accessing an object in a wider mode
7881 causes the high-order bits to become undefined. So they are
7882 not known to be zero. */
7883
7884 if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode
7885 && GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD
7886 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
c6965c0f 7887 && GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (GET_MODE (x)))
0840fd91
RK
7888 {
7889 nonzero &= nonzero_bits (x, GET_MODE (x));
663522cb 7890 nonzero |= GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x));
0840fd91
RK
7891 return nonzero;
7892 }
7893#endif
7894
230d793d
RS
7895 code = GET_CODE (x);
7896 switch (code)
7897 {
7898 case REG:
320dd7a7
RK
7899#ifdef POINTERS_EXTEND_UNSIGNED
7900 /* If pointers extend unsigned and this is a pointer in Pmode, say that
7901 all the bits above ptr_mode are known to be zero. */
7902 if (POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
3502dc9c 7903 && REG_POINTER (x))
320dd7a7
RK
7904 nonzero &= GET_MODE_MASK (ptr_mode);
7905#endif
7906
b0d71df9
RK
7907#ifdef STACK_BOUNDARY
7908 /* If this is the stack pointer, we may know something about its
7909 alignment. If PUSH_ROUNDING is defined, it is possible for the
230d793d
RS
7910 stack to be momentarily aligned only to that amount, so we pick
7911 the least alignment. */
7912
ee49a9c7
JW
7913 /* We can't check for arg_pointer_rtx here, because it is not
7914 guaranteed to have as much alignment as the stack pointer.
7915 In particular, in the Irix6 n64 ABI, the stack has 128 bit
7916 alignment but the argument pointer has only 64 bit alignment. */
7917
0e9ff885
DM
7918 if ((x == frame_pointer_rtx
7919 || x == stack_pointer_rtx
7920 || x == hard_frame_pointer_rtx
7921 || (REGNO (x) >= FIRST_VIRTUAL_REGISTER
7922 && REGNO (x) <= LAST_VIRTUAL_REGISTER))
7923#ifdef STACK_BIAS
7924 && !STACK_BIAS
663522cb 7925#endif
0e9ff885 7926 )
230d793d 7927 {
b0d71df9 7928 int sp_alignment = STACK_BOUNDARY / BITS_PER_UNIT;
230d793d
RS
7929
7930#ifdef PUSH_ROUNDING
f73ad30e 7931 if (REGNO (x) == STACK_POINTER_REGNUM && PUSH_ARGS)
b0d71df9 7932 sp_alignment = MIN (PUSH_ROUNDING (1), sp_alignment);
230d793d
RS
7933#endif
7934
320dd7a7
RK
7935 /* We must return here, otherwise we may get a worse result from
7936 one of the choices below. There is nothing useful below as
7937 far as the stack pointer is concerned. */
663522cb 7938 return nonzero &= ~(sp_alignment - 1);
230d793d 7939 }
b0d71df9 7940#endif
230d793d 7941
55310dad
RK
7942 /* If X is a register whose nonzero bits value is current, use it.
7943 Otherwise, if X is a register whose value we can find, use that
7944 value. Otherwise, use the previously-computed global nonzero bits
7945 for this register. */
7946
7947 if (reg_last_set_value[REGNO (x)] != 0
7948 && reg_last_set_mode[REGNO (x)] == mode
57cf50a4
GRK
7949 && (reg_last_set_label[REGNO (x)] == label_tick
7950 || (REGNO (x) >= FIRST_PSEUDO_REGISTER
7951 && REG_N_SETS (REGNO (x)) == 1
663522cb 7952 && ! REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start,
57cf50a4 7953 REGNO (x))))
55310dad
RK
7954 && INSN_CUID (reg_last_set[REGNO (x)]) < subst_low_cuid)
7955 return reg_last_set_nonzero_bits[REGNO (x)];
230d793d
RS
7956
7957 tem = get_last_value (x);
9afa3d54 7958
230d793d 7959 if (tem)
9afa3d54
RK
7960 {
7961#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
7962 /* If X is narrower than MODE and TEM is a non-negative
7963 constant that would appear negative in the mode of X,
7964 sign-extend it for use in reg_nonzero_bits because some
7965 machines (maybe most) will actually do the sign-extension
663522cb 7966 and this is the conservative approach.
9afa3d54
RK
7967
7968 ??? For 2.5, try to tighten up the MD files in this regard
7969 instead of this kludge. */
7970
7971 if (GET_MODE_BITSIZE (GET_MODE (x)) < mode_width
7972 && GET_CODE (tem) == CONST_INT
7973 && INTVAL (tem) > 0
7974 && 0 != (INTVAL (tem)
7975 & ((HOST_WIDE_INT) 1
9e69be8c 7976 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
9afa3d54
RK
7977 tem = GEN_INT (INTVAL (tem)
7978 | ((HOST_WIDE_INT) (-1)
7979 << GET_MODE_BITSIZE (GET_MODE (x))));
7980#endif
7981 return nonzero_bits (tem, mode);
7982 }
951553af
RK
7983 else if (nonzero_sign_valid && reg_nonzero_bits[REGNO (x)])
7984 return reg_nonzero_bits[REGNO (x)] & nonzero;
230d793d 7985 else
951553af 7986 return nonzero;
230d793d
RS
7987
7988 case CONST_INT:
9afa3d54
RK
7989#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
7990 /* If X is negative in MODE, sign-extend the value. */
9e69be8c
RK
7991 if (INTVAL (x) > 0 && mode_width < BITS_PER_WORD
7992 && 0 != (INTVAL (x) & ((HOST_WIDE_INT) 1 << (mode_width - 1))))
7993 return (INTVAL (x) | ((HOST_WIDE_INT) (-1) << mode_width));
9afa3d54
RK
7994#endif
7995
230d793d
RS
7996 return INTVAL (x);
7997
230d793d 7998 case MEM:
8baf60bb 7999#ifdef LOAD_EXTEND_OP
230d793d
RS
8000 /* In many, if not most, RISC machines, reading a byte from memory
8001 zeros the rest of the register. Noticing that fact saves a lot
8002 of extra zero-extends. */
8baf60bb
RK
8003 if (LOAD_EXTEND_OP (GET_MODE (x)) == ZERO_EXTEND)
8004 nonzero &= GET_MODE_MASK (GET_MODE (x));
230d793d 8005#endif
8baf60bb 8006 break;
230d793d 8007
230d793d 8008 case EQ: case NE:
69bc0a1f
JH
8009 case UNEQ: case LTGT:
8010 case GT: case GTU: case UNGT:
8011 case LT: case LTU: case UNLT:
8012 case GE: case GEU: case UNGE:
8013 case LE: case LEU: case UNLE:
8014 case UNORDERED: case ORDERED:
3f508eca 8015
c6965c0f
RK
8016 /* If this produces an integer result, we know which bits are set.
8017 Code here used to clear bits outside the mode of X, but that is
8018 now done above. */
230d793d 8019
c6965c0f
RK
8020 if (GET_MODE_CLASS (mode) == MODE_INT
8021 && mode_width <= HOST_BITS_PER_WIDE_INT)
8022 nonzero = STORE_FLAG_VALUE;
230d793d 8023 break;
230d793d 8024
230d793d 8025 case NEG:
b3728b0e
JW
8026#if 0
8027 /* Disabled to avoid exponential mutual recursion between nonzero_bits
8028 and num_sign_bit_copies. */
d0ab8cd3
RK
8029 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
8030 == GET_MODE_BITSIZE (GET_MODE (x)))
951553af 8031 nonzero = 1;
b3728b0e 8032#endif
230d793d
RS
8033
8034 if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
663522cb 8035 nonzero |= (GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x)));
230d793d 8036 break;
d0ab8cd3
RK
8037
8038 case ABS:
b3728b0e
JW
8039#if 0
8040 /* Disabled to avoid exponential mutual recursion between nonzero_bits
8041 and num_sign_bit_copies. */
d0ab8cd3
RK
8042 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
8043 == GET_MODE_BITSIZE (GET_MODE (x)))
951553af 8044 nonzero = 1;
b3728b0e 8045#endif
d0ab8cd3 8046 break;
230d793d
RS
8047
8048 case TRUNCATE:
951553af 8049 nonzero &= (nonzero_bits (XEXP (x, 0), mode) & GET_MODE_MASK (mode));
230d793d
RS
8050 break;
8051
8052 case ZERO_EXTEND:
951553af 8053 nonzero &= nonzero_bits (XEXP (x, 0), mode);
230d793d 8054 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
951553af 8055 nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
230d793d
RS
8056 break;
8057
8058 case SIGN_EXTEND:
8059 /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
8060 Otherwise, show all the bits in the outer mode but not the inner
8061 may be non-zero. */
951553af 8062 inner_nz = nonzero_bits (XEXP (x, 0), mode);
230d793d
RS
8063 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
8064 {
951553af 8065 inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
e3da301d
MS
8066 if (inner_nz
8067 & (((HOST_WIDE_INT) 1
8068 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1))))
951553af 8069 inner_nz |= (GET_MODE_MASK (mode)
663522cb 8070 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
230d793d
RS
8071 }
8072
951553af 8073 nonzero &= inner_nz;
230d793d
RS
8074 break;
8075
8076 case AND:
951553af
RK
8077 nonzero &= (nonzero_bits (XEXP (x, 0), mode)
8078 & nonzero_bits (XEXP (x, 1), mode));
230d793d
RS
8079 break;
8080
d0ab8cd3
RK
8081 case XOR: case IOR:
8082 case UMIN: case UMAX: case SMIN: case SMAX:
951553af
RK
8083 nonzero &= (nonzero_bits (XEXP (x, 0), mode)
8084 | nonzero_bits (XEXP (x, 1), mode));
230d793d
RS
8085 break;
8086
8087 case PLUS: case MINUS:
8088 case MULT:
8089 case DIV: case UDIV:
8090 case MOD: case UMOD:
8091 /* We can apply the rules of arithmetic to compute the number of
8092 high- and low-order zero bits of these operations. We start by
8093 computing the width (position of the highest-order non-zero bit)
8094 and the number of low-order zero bits for each value. */
8095 {
951553af
RK
8096 unsigned HOST_WIDE_INT nz0 = nonzero_bits (XEXP (x, 0), mode);
8097 unsigned HOST_WIDE_INT nz1 = nonzero_bits (XEXP (x, 1), mode);
8098 int width0 = floor_log2 (nz0) + 1;
8099 int width1 = floor_log2 (nz1) + 1;
8100 int low0 = floor_log2 (nz0 & -nz0);
8101 int low1 = floor_log2 (nz1 & -nz1);
318b149c
RK
8102 HOST_WIDE_INT op0_maybe_minusp
8103 = (nz0 & ((HOST_WIDE_INT) 1 << (mode_width - 1)));
8104 HOST_WIDE_INT op1_maybe_minusp
8105 = (nz1 & ((HOST_WIDE_INT) 1 << (mode_width - 1)));
770ae6cc 8106 unsigned int result_width = mode_width;
230d793d
RS
8107 int result_low = 0;
8108
8109 switch (code)
8110 {
8111 case PLUS:
0e9ff885
DM
8112#ifdef STACK_BIAS
8113 if (STACK_BIAS
663522cb
KH
8114 && (XEXP (x, 0) == stack_pointer_rtx
8115 || XEXP (x, 0) == frame_pointer_rtx)
8116 && GET_CODE (XEXP (x, 1)) == CONST_INT)
0e9ff885
DM
8117 {
8118 int sp_alignment = STACK_BOUNDARY / BITS_PER_UNIT;
8119
663522cb
KH
8120 nz0 = (GET_MODE_MASK (mode) & ~(sp_alignment - 1));
8121 nz1 = INTVAL (XEXP (x, 1)) - STACK_BIAS;
8122 width0 = floor_log2 (nz0) + 1;
8123 width1 = floor_log2 (nz1) + 1;
8124 low0 = floor_log2 (nz0 & -nz0);
8125 low1 = floor_log2 (nz1 & -nz1);
0e9ff885 8126 }
663522cb 8127#endif
230d793d
RS
8128 result_width = MAX (width0, width1) + 1;
8129 result_low = MIN (low0, low1);
8130 break;
8131 case MINUS:
8132 result_low = MIN (low0, low1);
8133 break;
8134 case MULT:
8135 result_width = width0 + width1;
8136 result_low = low0 + low1;
8137 break;
8138 case DIV:
2a8bb5cf
AH
8139 if (width1 == 0)
8140 break;
230d793d
RS
8141 if (! op0_maybe_minusp && ! op1_maybe_minusp)
8142 result_width = width0;
8143 break;
8144 case UDIV:
2a8bb5cf
AH
8145 if (width1 == 0)
8146 break;
230d793d
RS
8147 result_width = width0;
8148 break;
8149 case MOD:
2a8bb5cf
AH
8150 if (width1 == 0)
8151 break;
230d793d
RS
8152 if (! op0_maybe_minusp && ! op1_maybe_minusp)
8153 result_width = MIN (width0, width1);
8154 result_low = MIN (low0, low1);
8155 break;
8156 case UMOD:
2a8bb5cf
AH
8157 if (width1 == 0)
8158 break;
230d793d
RS
8159 result_width = MIN (width0, width1);
8160 result_low = MIN (low0, low1);
8161 break;
e9a25f70
JL
8162 default:
8163 abort ();
230d793d
RS
8164 }
8165
8166 if (result_width < mode_width)
951553af 8167 nonzero &= ((HOST_WIDE_INT) 1 << result_width) - 1;
230d793d
RS
8168
8169 if (result_low > 0)
663522cb 8170 nonzero &= ~(((HOST_WIDE_INT) 1 << result_low) - 1);
d1405722
RK
8171
8172#ifdef POINTERS_EXTEND_UNSIGNED
8173 /* If pointers extend unsigned and this is an addition or subtraction
8174 to a pointer in Pmode, all the bits above ptr_mode are known to be
8175 zero. */
8176 if (POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
8177 && (code == PLUS || code == MINUS)
8178 && GET_CODE (XEXP (x, 0)) == REG && REG_POINTER (XEXP (x, 0)))
8179 nonzero &= GET_MODE_MASK (ptr_mode);
8180#endif
230d793d
RS
8181 }
8182 break;
8183
8184 case ZERO_EXTRACT:
8185 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5f4f0e22 8186 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
951553af 8187 nonzero &= ((HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
230d793d
RS
8188 break;
8189
8190 case SUBREG:
c3c2cb37
RK
8191 /* If this is a SUBREG formed for a promoted variable that has
8192 been zero-extended, we know that at least the high-order bits
8193 are zero, though others might be too. */
8194
8195 if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x))
951553af
RK
8196 nonzero = (GET_MODE_MASK (GET_MODE (x))
8197 & nonzero_bits (SUBREG_REG (x), GET_MODE (x)));
c3c2cb37 8198
230d793d
RS
8199 /* If the inner mode is a single word for both the host and target
8200 machines, we can compute this from which bits of the inner
951553af 8201 object might be nonzero. */
230d793d 8202 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) <= BITS_PER_WORD
5f4f0e22
CH
8203 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
8204 <= HOST_BITS_PER_WIDE_INT))
230d793d 8205 {
951553af 8206 nonzero &= nonzero_bits (SUBREG_REG (x), mode);
8baf60bb 8207
b52ce03d
R
8208#if defined (WORD_REGISTER_OPERATIONS) && defined (LOAD_EXTEND_OP)
8209 /* If this is a typical RISC machine, we only have to worry
8210 about the way loads are extended. */
8211 if (LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
729a2125
RK
8212 ? (((nonzero
8213 & (((unsigned HOST_WIDE_INT) 1
8214 << (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) - 1))))
8215 != 0))
b52ce03d 8216 : LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) != ZERO_EXTEND)
230d793d 8217#endif
b52ce03d
R
8218 {
8219 /* On many CISC machines, accessing an object in a wider mode
8220 causes the high-order bits to become undefined. So they are
8221 not known to be zero. */
8222 if (GET_MODE_SIZE (GET_MODE (x))
8223 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
8224 nonzero |= (GET_MODE_MASK (GET_MODE (x))
663522cb 8225 & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x))));
b52ce03d 8226 }
230d793d
RS
8227 }
8228 break;
8229
8230 case ASHIFTRT:
8231 case LSHIFTRT:
8232 case ASHIFT:
230d793d 8233 case ROTATE:
951553af 8234 /* The nonzero bits are in two classes: any bits within MODE
230d793d 8235 that aren't in GET_MODE (x) are always significant. The rest of the
951553af 8236 nonzero bits are those that are significant in the operand of
230d793d
RS
8237 the shift when shifted the appropriate number of bits. This
8238 shows that high-order bits are cleared by the right shift and
8239 low-order bits by left shifts. */
8240 if (GET_CODE (XEXP (x, 1)) == CONST_INT
8241 && INTVAL (XEXP (x, 1)) >= 0
5f4f0e22 8242 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
230d793d
RS
8243 {
8244 enum machine_mode inner_mode = GET_MODE (x);
770ae6cc 8245 unsigned int width = GET_MODE_BITSIZE (inner_mode);
230d793d 8246 int count = INTVAL (XEXP (x, 1));
5f4f0e22 8247 unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
951553af
RK
8248 unsigned HOST_WIDE_INT op_nonzero = nonzero_bits (XEXP (x, 0), mode);
8249 unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask;
5f4f0e22 8250 unsigned HOST_WIDE_INT outer = 0;
230d793d
RS
8251
8252 if (mode_width > width)
663522cb 8253 outer = (op_nonzero & nonzero & ~mode_mask);
230d793d
RS
8254
8255 if (code == LSHIFTRT)
8256 inner >>= count;
8257 else if (code == ASHIFTRT)
8258 {
8259 inner >>= count;
8260
951553af 8261 /* If the sign bit may have been nonzero before the shift, we
230d793d 8262 need to mark all the places it could have been copied to
951553af 8263 by the shift as possibly nonzero. */
5f4f0e22
CH
8264 if (inner & ((HOST_WIDE_INT) 1 << (width - 1 - count)))
8265 inner |= (((HOST_WIDE_INT) 1 << count) - 1) << (width - count);
230d793d 8266 }
45620ed4 8267 else if (code == ASHIFT)
230d793d
RS
8268 inner <<= count;
8269 else
8270 inner = ((inner << (count % width)
8271 | (inner >> (width - (count % width)))) & mode_mask);
8272
951553af 8273 nonzero &= (outer | inner);
230d793d
RS
8274 }
8275 break;
8276
8277 case FFS:
8278 /* This is at most the number of bits in the mode. */
951553af 8279 nonzero = ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width) + 1)) - 1;
230d793d 8280 break;
d0ab8cd3
RK
8281
8282 case IF_THEN_ELSE:
951553af
RK
8283 nonzero &= (nonzero_bits (XEXP (x, 1), mode)
8284 | nonzero_bits (XEXP (x, 2), mode));
d0ab8cd3 8285 break;
663522cb 8286
e9a25f70
JL
8287 default:
8288 break;
230d793d
RS
8289 }
8290
951553af 8291 return nonzero;
230d793d 8292}
b3728b0e
JW
8293
8294/* See the macro definition above. */
8295#undef num_sign_bit_copies
230d793d 8296\f
d0ab8cd3 8297/* Return the number of bits at the high-order end of X that are known to
5109d49f
RK
8298 be equal to the sign bit. X will be used in mode MODE; if MODE is
8299 VOIDmode, X will be used in its own mode. The returned value will always
8300 be between 1 and the number of bits in MODE. */
d0ab8cd3 8301
770ae6cc 8302static unsigned int
d0ab8cd3
RK
8303num_sign_bit_copies (x, mode)
8304 rtx x;
8305 enum machine_mode mode;
8306{
8307 enum rtx_code code = GET_CODE (x);
770ae6cc 8308 unsigned int bitwidth;
d0ab8cd3 8309 int num0, num1, result;
951553af 8310 unsigned HOST_WIDE_INT nonzero;
d0ab8cd3
RK
8311 rtx tem;
8312
8313 /* If we weren't given a mode, use the mode of X. If the mode is still
1c75dfa4
RK
8314 VOIDmode, we don't know anything. Likewise if one of the modes is
8315 floating-point. */
d0ab8cd3
RK
8316
8317 if (mode == VOIDmode)
8318 mode = GET_MODE (x);
8319
1c75dfa4 8320 if (mode == VOIDmode || FLOAT_MODE_P (mode) || FLOAT_MODE_P (GET_MODE (x)))
6752e8d2 8321 return 1;
d0ab8cd3
RK
8322
8323 bitwidth = GET_MODE_BITSIZE (mode);
8324
0f41302f 8325 /* For a smaller object, just ignore the high bits. */
312def2e 8326 if (bitwidth < GET_MODE_BITSIZE (GET_MODE (x)))
770ae6cc
RK
8327 {
8328 num0 = num_sign_bit_copies (x, GET_MODE (x));
8329 return MAX (1,
8330 num0 - (int) (GET_MODE_BITSIZE (GET_MODE (x)) - bitwidth));
8331 }
663522cb 8332
e9a25f70
JL
8333 if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_BITSIZE (GET_MODE (x)))
8334 {
0c314d1a
RK
8335#ifndef WORD_REGISTER_OPERATIONS
8336 /* If this machine does not do all register operations on the entire
8337 register and MODE is wider than the mode of X, we can say nothing
8338 at all about the high-order bits. */
e9a25f70
JL
8339 return 1;
8340#else
8341 /* Likewise on machines that do, if the mode of the object is smaller
8342 than a word and loads of that size don't sign extend, we can say
8343 nothing about the high order bits. */
8344 if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
8345#ifdef LOAD_EXTEND_OP
8346 && LOAD_EXTEND_OP (GET_MODE (x)) != SIGN_EXTEND
8347#endif
8348 )
8349 return 1;
0c314d1a 8350#endif
e9a25f70 8351 }
0c314d1a 8352
d0ab8cd3
RK
8353 switch (code)
8354 {
8355 case REG:
55310dad 8356
ff0dbdd1
RK
8357#ifdef POINTERS_EXTEND_UNSIGNED
8358 /* If pointers extend signed and this is a pointer in Pmode, say that
8359 all the bits above ptr_mode are known to be sign bit copies. */
8360 if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode && mode == Pmode
3502dc9c 8361 && REG_POINTER (x))
ff0dbdd1
RK
8362 return GET_MODE_BITSIZE (Pmode) - GET_MODE_BITSIZE (ptr_mode) + 1;
8363#endif
8364
55310dad
RK
8365 if (reg_last_set_value[REGNO (x)] != 0
8366 && reg_last_set_mode[REGNO (x)] == mode
57cf50a4
GRK
8367 && (reg_last_set_label[REGNO (x)] == label_tick
8368 || (REGNO (x) >= FIRST_PSEUDO_REGISTER
8369 && REG_N_SETS (REGNO (x)) == 1
8370 && ! REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start,
8371 REGNO (x))))
55310dad
RK
8372 && INSN_CUID (reg_last_set[REGNO (x)]) < subst_low_cuid)
8373 return reg_last_set_sign_bit_copies[REGNO (x)];
d0ab8cd3 8374
663522cb 8375 tem = get_last_value (x);
d0ab8cd3
RK
8376 if (tem != 0)
8377 return num_sign_bit_copies (tem, mode);
55310dad
RK
8378
8379 if (nonzero_sign_valid && reg_sign_bit_copies[REGNO (x)] != 0)
8380 return reg_sign_bit_copies[REGNO (x)];
d0ab8cd3
RK
8381 break;
8382
457816e2 8383 case MEM:
8baf60bb 8384#ifdef LOAD_EXTEND_OP
457816e2 8385 /* Some RISC machines sign-extend all loads of smaller than a word. */
8baf60bb 8386 if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND)
770ae6cc
RK
8387 return MAX (1, ((int) bitwidth
8388 - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1));
457816e2 8389#endif
8baf60bb 8390 break;
457816e2 8391
d0ab8cd3
RK
8392 case CONST_INT:
8393 /* If the constant is negative, take its 1's complement and remask.
8394 Then see how many zero bits we have. */
951553af 8395 nonzero = INTVAL (x) & GET_MODE_MASK (mode);
ac49a949 8396 if (bitwidth <= HOST_BITS_PER_WIDE_INT
951553af 8397 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
663522cb 8398 nonzero = (~nonzero) & GET_MODE_MASK (mode);
d0ab8cd3 8399
951553af 8400 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
d0ab8cd3
RK
8401
8402 case SUBREG:
c3c2cb37
RK
8403 /* If this is a SUBREG for a promoted object that is sign-extended
8404 and we are looking at it in a wider mode, we know that at least the
8405 high-order bits are known to be sign bit copies. */
8406
8407 if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x))
770ae6cc
RK
8408 {
8409 num0 = num_sign_bit_copies (SUBREG_REG (x), mode);
8410 return MAX ((int) bitwidth
8411 - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1,
8412 num0);
8413 }
663522cb 8414
0f41302f 8415 /* For a smaller object, just ignore the high bits. */
d0ab8cd3
RK
8416 if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
8417 {
8418 num0 = num_sign_bit_copies (SUBREG_REG (x), VOIDmode);
8419 return MAX (1, (num0
770ae6cc
RK
8420 - (int) (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
8421 - bitwidth)));
d0ab8cd3 8422 }
457816e2 8423
8baf60bb 8424#ifdef WORD_REGISTER_OPERATIONS
2aec5b7a 8425#ifdef LOAD_EXTEND_OP
8baf60bb
RK
8426 /* For paradoxical SUBREGs on machines where all register operations
8427 affect the entire register, just look inside. Note that we are
8428 passing MODE to the recursive call, so the number of sign bit copies
8429 will remain relative to that mode, not the inner mode. */
457816e2 8430
2aec5b7a
JW
8431 /* This works only if loads sign extend. Otherwise, if we get a
8432 reload for the inner part, it may be loaded from the stack, and
8433 then we lose all sign bit copies that existed before the store
8434 to the stack. */
8435
8436 if ((GET_MODE_SIZE (GET_MODE (x))
8437 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
8438 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND)
457816e2 8439 return num_sign_bit_copies (SUBREG_REG (x), mode);
2aec5b7a 8440#endif
457816e2 8441#endif
d0ab8cd3
RK
8442 break;
8443
8444 case SIGN_EXTRACT:
8445 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
770ae6cc 8446 return MAX (1, (int) bitwidth - INTVAL (XEXP (x, 1)));
d0ab8cd3
RK
8447 break;
8448
663522cb 8449 case SIGN_EXTEND:
d0ab8cd3
RK
8450 return (bitwidth - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
8451 + num_sign_bit_copies (XEXP (x, 0), VOIDmode));
8452
8453 case TRUNCATE:
0f41302f 8454 /* For a smaller object, just ignore the high bits. */
d0ab8cd3 8455 num0 = num_sign_bit_copies (XEXP (x, 0), VOIDmode);
770ae6cc
RK
8456 return MAX (1, (num0 - (int) (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
8457 - bitwidth)));
d0ab8cd3
RK
8458
8459 case NOT:
8460 return num_sign_bit_copies (XEXP (x, 0), mode);
8461
8462 case ROTATE: case ROTATERT:
8463 /* If we are rotating left by a number of bits less than the number
8464 of sign bit copies, we can just subtract that amount from the
8465 number. */
8466 if (GET_CODE (XEXP (x, 1)) == CONST_INT
ae0ed63a
JM
8467 && INTVAL (XEXP (x, 1)) >= 0
8468 && INTVAL (XEXP (x, 1)) < (int) bitwidth)
d0ab8cd3
RK
8469 {
8470 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8471 return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
770ae6cc 8472 : (int) bitwidth - INTVAL (XEXP (x, 1))));
d0ab8cd3
RK
8473 }
8474 break;
8475
8476 case NEG:
8477 /* In general, this subtracts one sign bit copy. But if the value
8478 is known to be positive, the number of sign bit copies is the
951553af
RK
8479 same as that of the input. Finally, if the input has just one bit
8480 that might be nonzero, all the bits are copies of the sign bit. */
70186b34
BS
8481 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8482 if (bitwidth > HOST_BITS_PER_WIDE_INT)
8483 return num0 > 1 ? num0 - 1 : 1;
8484
951553af
RK
8485 nonzero = nonzero_bits (XEXP (x, 0), mode);
8486 if (nonzero == 1)
d0ab8cd3
RK
8487 return bitwidth;
8488
d0ab8cd3 8489 if (num0 > 1
951553af 8490 && (((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero))
d0ab8cd3
RK
8491 num0--;
8492
8493 return num0;
8494
8495 case IOR: case AND: case XOR:
8496 case SMIN: case SMAX: case UMIN: case UMAX:
8497 /* Logical operations will preserve the number of sign-bit copies.
8498 MIN and MAX operations always return one of the operands. */
8499 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8500 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
8501 return MIN (num0, num1);
8502
8503 case PLUS: case MINUS:
8504 /* For addition and subtraction, we can have a 1-bit carry. However,
8505 if we are subtracting 1 from a positive number, there will not
8506 be such a carry. Furthermore, if the positive number is known to
8507 be 0 or 1, we know the result is either -1 or 0. */
8508
3e3ea975 8509 if (code == PLUS && XEXP (x, 1) == constm1_rtx
9295e6af 8510 && bitwidth <= HOST_BITS_PER_WIDE_INT)
d0ab8cd3 8511 {
951553af
RK
8512 nonzero = nonzero_bits (XEXP (x, 0), mode);
8513 if ((((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0)
8514 return (nonzero == 1 || nonzero == 0 ? bitwidth
8515 : bitwidth - floor_log2 (nonzero) - 1);
d0ab8cd3
RK
8516 }
8517
8518 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8519 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
d1405722
RK
8520 result = MAX (1, MIN (num0, num1) - 1);
8521
8522#ifdef POINTERS_EXTEND_UNSIGNED
8523 /* If pointers extend signed and this is an addition or subtraction
8524 to a pointer in Pmode, all the bits above ptr_mode are known to be
8525 sign bit copies. */
8526 if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
8527 && (code == PLUS || code == MINUS)
8528 && GET_CODE (XEXP (x, 0)) == REG && REG_POINTER (XEXP (x, 0)))
8529 result = MAX ((GET_MODE_BITSIZE (Pmode)
8530 - GET_MODE_BITSIZE (ptr_mode) + 1),
8531 result);
8532#endif
8533 return result;
663522cb 8534
d0ab8cd3
RK
8535 case MULT:
8536 /* The number of bits of the product is the sum of the number of
8537 bits of both terms. However, unless one of the terms if known
8538 to be positive, we must allow for an additional bit since negating
8539 a negative number can remove one sign bit copy. */
8540
8541 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8542 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
8543
8544 result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
8545 if (result > 0
70186b34
BS
8546 && (bitwidth > HOST_BITS_PER_WIDE_INT
8547 || (((nonzero_bits (XEXP (x, 0), mode)
8548 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
8549 && ((nonzero_bits (XEXP (x, 1), mode)
8550 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))))
d0ab8cd3
RK
8551 result--;
8552
8553 return MAX (1, result);
8554
8555 case UDIV:
70186b34
BS
8556 /* The result must be <= the first operand. If the first operand
8557 has the high bit set, we know nothing about the number of sign
8558 bit copies. */
8559 if (bitwidth > HOST_BITS_PER_WIDE_INT)
8560 return 1;
8561 else if ((nonzero_bits (XEXP (x, 0), mode)
8562 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
8563 return 1;
8564 else
8565 return num_sign_bit_copies (XEXP (x, 0), mode);
663522cb 8566
d0ab8cd3
RK
8567 case UMOD:
8568 /* The result must be <= the scond operand. */
8569 return num_sign_bit_copies (XEXP (x, 1), mode);
8570
8571 case DIV:
8572 /* Similar to unsigned division, except that we have to worry about
8573 the case where the divisor is negative, in which case we have
8574 to add 1. */
8575 result = num_sign_bit_copies (XEXP (x, 0), mode);
8576 if (result > 1
70186b34
BS
8577 && (bitwidth > HOST_BITS_PER_WIDE_INT
8578 || (nonzero_bits (XEXP (x, 1), mode)
8579 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
8580 result--;
d0ab8cd3
RK
8581
8582 return result;
8583
8584 case MOD:
8585 result = num_sign_bit_copies (XEXP (x, 1), mode);
8586 if (result > 1
70186b34
BS
8587 && (bitwidth > HOST_BITS_PER_WIDE_INT
8588 || (nonzero_bits (XEXP (x, 1), mode)
8589 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
8590 result--;
d0ab8cd3
RK
8591
8592 return result;
8593
8594 case ASHIFTRT:
8595 /* Shifts by a constant add to the number of bits equal to the
8596 sign bit. */
8597 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8598 if (GET_CODE (XEXP (x, 1)) == CONST_INT
8599 && INTVAL (XEXP (x, 1)) > 0)
ae0ed63a 8600 num0 = MIN ((int) bitwidth, num0 + INTVAL (XEXP (x, 1)));
d0ab8cd3
RK
8601
8602 return num0;
8603
8604 case ASHIFT:
d0ab8cd3
RK
8605 /* Left shifts destroy copies. */
8606 if (GET_CODE (XEXP (x, 1)) != CONST_INT
8607 || INTVAL (XEXP (x, 1)) < 0
ae0ed63a 8608 || INTVAL (XEXP (x, 1)) >= (int) bitwidth)
d0ab8cd3
RK
8609 return 1;
8610
8611 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8612 return MAX (1, num0 - INTVAL (XEXP (x, 1)));
8613
8614 case IF_THEN_ELSE:
8615 num0 = num_sign_bit_copies (XEXP (x, 1), mode);
8616 num1 = num_sign_bit_copies (XEXP (x, 2), mode);
8617 return MIN (num0, num1);
8618
d0ab8cd3 8619 case EQ: case NE: case GE: case GT: case LE: case LT:
69bc0a1f 8620 case UNEQ: case LTGT: case UNGE: case UNGT: case UNLE: case UNLT:
d0ab8cd3 8621 case GEU: case GTU: case LEU: case LTU:
69bc0a1f
JH
8622 case UNORDERED: case ORDERED:
8623 /* If the constant is negative, take its 1's complement and remask.
8624 Then see how many zero bits we have. */
8625 nonzero = STORE_FLAG_VALUE;
8626 if (bitwidth <= HOST_BITS_PER_WIDE_INT
8627 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
8628 nonzero = (~nonzero) & GET_MODE_MASK (mode);
8629
8630 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
e9a25f70 8631 break;
663522cb 8632
e9a25f70
JL
8633 default:
8634 break;
d0ab8cd3
RK
8635 }
8636
8637 /* If we haven't been able to figure it out by one of the above rules,
8638 see if some of the high-order bits are known to be zero. If so,
ac49a949
RS
8639 count those bits and return one less than that amount. If we can't
8640 safely compute the mask for this mode, always return BITWIDTH. */
8641
8642 if (bitwidth > HOST_BITS_PER_WIDE_INT)
6752e8d2 8643 return 1;
d0ab8cd3 8644
951553af 8645 nonzero = nonzero_bits (x, mode);
df6f4086 8646 return (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))
951553af 8647 ? 1 : bitwidth - floor_log2 (nonzero) - 1);
d0ab8cd3
RK
8648}
8649\f
1a26b032
RK
8650/* Return the number of "extended" bits there are in X, when interpreted
8651 as a quantity in MODE whose signedness is indicated by UNSIGNEDP. For
8652 unsigned quantities, this is the number of high-order zero bits.
8653 For signed quantities, this is the number of copies of the sign bit
8654 minus 1. In both case, this function returns the number of "spare"
8655 bits. For example, if two quantities for which this function returns
8656 at least 1 are added, the addition is known not to overflow.
8657
8658 This function will always return 0 unless called during combine, which
8659 implies that it must be called from a define_split. */
8660
770ae6cc 8661unsigned int
1a26b032
RK
8662extended_count (x, mode, unsignedp)
8663 rtx x;
8664 enum machine_mode mode;
8665 int unsignedp;
8666{
951553af 8667 if (nonzero_sign_valid == 0)
1a26b032
RK
8668 return 0;
8669
8670 return (unsignedp
ac49a949 8671 ? (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
770ae6cc
RK
8672 ? (GET_MODE_BITSIZE (mode) - 1
8673 - floor_log2 (nonzero_bits (x, mode)))
8674 : 0)
1a26b032
RK
8675 : num_sign_bit_copies (x, mode) - 1);
8676}
8677\f
230d793d
RS
8678/* This function is called from `simplify_shift_const' to merge two
8679 outer operations. Specifically, we have already found that we need
8680 to perform operation *POP0 with constant *PCONST0 at the outermost
8681 position. We would now like to also perform OP1 with constant CONST1
8682 (with *POP0 being done last).
8683
8684 Return 1 if we can do the operation and update *POP0 and *PCONST0 with
663522cb 8685 the resulting operation. *PCOMP_P is set to 1 if we would need to
230d793d
RS
8686 complement the innermost operand, otherwise it is unchanged.
8687
8688 MODE is the mode in which the operation will be done. No bits outside
8689 the width of this mode matter. It is assumed that the width of this mode
5f4f0e22 8690 is smaller than or equal to HOST_BITS_PER_WIDE_INT.
230d793d
RS
8691
8692 If *POP0 or OP1 are NIL, it means no operation is required. Only NEG, PLUS,
8693 IOR, XOR, and AND are supported. We may set *POP0 to SET if the proper
8694 result is simply *PCONST0.
8695
8696 If the resulting operation cannot be expressed as one operation, we
8697 return 0 and do not change *POP0, *PCONST0, and *PCOMP_P. */
8698
8699static int
8700merge_outer_ops (pop0, pconst0, op1, const1, mode, pcomp_p)
8701 enum rtx_code *pop0;
5f4f0e22 8702 HOST_WIDE_INT *pconst0;
230d793d 8703 enum rtx_code op1;
5f4f0e22 8704 HOST_WIDE_INT const1;
230d793d
RS
8705 enum machine_mode mode;
8706 int *pcomp_p;
8707{
8708 enum rtx_code op0 = *pop0;
5f4f0e22 8709 HOST_WIDE_INT const0 = *pconst0;
230d793d
RS
8710
8711 const0 &= GET_MODE_MASK (mode);
8712 const1 &= GET_MODE_MASK (mode);
8713
8714 /* If OP0 is an AND, clear unimportant bits in CONST1. */
8715 if (op0 == AND)
8716 const1 &= const0;
8717
8718 /* If OP0 or OP1 is NIL, this is easy. Similarly if they are the same or
8719 if OP0 is SET. */
8720
8721 if (op1 == NIL || op0 == SET)
8722 return 1;
8723
8724 else if (op0 == NIL)
8725 op0 = op1, const0 = const1;
8726
8727 else if (op0 == op1)
8728 {
8729 switch (op0)
8730 {
8731 case AND:
8732 const0 &= const1;
8733 break;
8734 case IOR:
8735 const0 |= const1;
8736 break;
8737 case XOR:
8738 const0 ^= const1;
8739 break;
8740 case PLUS:
8741 const0 += const1;
8742 break;
8743 case NEG:
8744 op0 = NIL;
8745 break;
e9a25f70
JL
8746 default:
8747 break;
230d793d
RS
8748 }
8749 }
8750
8751 /* Otherwise, if either is a PLUS or NEG, we can't do anything. */
8752 else if (op0 == PLUS || op1 == PLUS || op0 == NEG || op1 == NEG)
8753 return 0;
8754
8755 /* If the two constants aren't the same, we can't do anything. The
8756 remaining six cases can all be done. */
8757 else if (const0 != const1)
8758 return 0;
8759
8760 else
8761 switch (op0)
8762 {
8763 case IOR:
8764 if (op1 == AND)
8765 /* (a & b) | b == b */
8766 op0 = SET;
8767 else /* op1 == XOR */
8768 /* (a ^ b) | b == a | b */
b729186a 8769 {;}
230d793d
RS
8770 break;
8771
8772 case XOR:
8773 if (op1 == AND)
8774 /* (a & b) ^ b == (~a) & b */
8775 op0 = AND, *pcomp_p = 1;
8776 else /* op1 == IOR */
8777 /* (a | b) ^ b == a & ~b */
663522cb 8778 op0 = AND, *pconst0 = ~const0;
230d793d
RS
8779 break;
8780
8781 case AND:
8782 if (op1 == IOR)
8783 /* (a | b) & b == b */
8784 op0 = SET;
8785 else /* op1 == XOR */
8786 /* (a ^ b) & b) == (~a) & b */
8787 *pcomp_p = 1;
8788 break;
e9a25f70
JL
8789 default:
8790 break;
230d793d
RS
8791 }
8792
8793 /* Check for NO-OP cases. */
8794 const0 &= GET_MODE_MASK (mode);
8795 if (const0 == 0
8796 && (op0 == IOR || op0 == XOR || op0 == PLUS))
8797 op0 = NIL;
8798 else if (const0 == 0 && op0 == AND)
8799 op0 = SET;
e51712db
KG
8800 else if ((unsigned HOST_WIDE_INT) const0 == GET_MODE_MASK (mode)
8801 && op0 == AND)
230d793d
RS
8802 op0 = NIL;
8803
7e4ce834
RH
8804 /* ??? Slightly redundant with the above mask, but not entirely.
8805 Moving this above means we'd have to sign-extend the mode mask
8806 for the final test. */
8807 const0 = trunc_int_for_mode (const0, mode);
9fa6d012 8808
230d793d
RS
8809 *pop0 = op0;
8810 *pconst0 = const0;
8811
8812 return 1;
8813}
8814\f
8815/* Simplify a shift of VAROP by COUNT bits. CODE says what kind of shift.
8816 The result of the shift is RESULT_MODE. X, if non-zero, is an expression
8817 that we started with.
8818
8819 The shift is normally computed in the widest mode we find in VAROP, as
8820 long as it isn't a different number of words than RESULT_MODE. Exceptions
8821 are ASHIFTRT and ROTATE, which are always done in their original mode, */
8822
8823static rtx
770ae6cc 8824simplify_shift_const (x, code, result_mode, varop, input_count)
230d793d
RS
8825 rtx x;
8826 enum rtx_code code;
8827 enum machine_mode result_mode;
8828 rtx varop;
770ae6cc 8829 int input_count;
230d793d
RS
8830{
8831 enum rtx_code orig_code = code;
770ae6cc
RK
8832 int orig_count = input_count;
8833 unsigned int count;
8834 int signed_count;
230d793d
RS
8835 enum machine_mode mode = result_mode;
8836 enum machine_mode shift_mode, tmode;
770ae6cc 8837 unsigned int mode_words
230d793d
RS
8838 = (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
8839 /* We form (outer_op (code varop count) (outer_const)). */
8840 enum rtx_code outer_op = NIL;
c4e861e8 8841 HOST_WIDE_INT outer_const = 0;
230d793d
RS
8842 rtx const_rtx;
8843 int complement_p = 0;
8844 rtx new;
8845
8846 /* If we were given an invalid count, don't do anything except exactly
8847 what was requested. */
8848
770ae6cc 8849 if (input_count < 0 || input_count > (int) GET_MODE_BITSIZE (mode))
230d793d
RS
8850 {
8851 if (x)
8852 return x;
8853
770ae6cc 8854 return gen_rtx_fmt_ee (code, mode, varop, GEN_INT (input_count));
230d793d
RS
8855 }
8856
770ae6cc
RK
8857 count = input_count;
8858
853d8828
RH
8859 /* Make sure and truncate the "natural" shift on the way in. We don't
8860 want to do this inside the loop as it makes it more difficult to
8861 combine shifts. */
8862#ifdef SHIFT_COUNT_TRUNCATED
8863 if (SHIFT_COUNT_TRUNCATED)
8864 count %= GET_MODE_BITSIZE (mode);
8865#endif
8866
230d793d
RS
8867 /* Unless one of the branches of the `if' in this loop does a `continue',
8868 we will `break' the loop after the `if'. */
8869
8870 while (count != 0)
8871 {
8872 /* If we have an operand of (clobber (const_int 0)), just return that
8873 value. */
8874 if (GET_CODE (varop) == CLOBBER)
8875 return varop;
8876
8877 /* If we discovered we had to complement VAROP, leave. Making a NOT
8878 here would cause an infinite loop. */
8879 if (complement_p)
8880 break;
8881
abc95ed3 8882 /* Convert ROTATERT to ROTATE. */
230d793d
RS
8883 if (code == ROTATERT)
8884 code = ROTATE, count = GET_MODE_BITSIZE (result_mode) - count;
8885
230d793d 8886 /* We need to determine what mode we will do the shift in. If the
f6789c77
RK
8887 shift is a right shift or a ROTATE, we must always do it in the mode
8888 it was originally done in. Otherwise, we can do it in MODE, the
0f41302f 8889 widest mode encountered. */
f6789c77
RK
8890 shift_mode
8891 = (code == ASHIFTRT || code == LSHIFTRT || code == ROTATE
8892 ? result_mode : mode);
230d793d
RS
8893
8894 /* Handle cases where the count is greater than the size of the mode
853d8828
RH
8895 minus 1. For ASHIFT, use the size minus one as the count (this can
8896 occur when simplifying (lshiftrt (ashiftrt ..))). For rotates,
8897 take the count modulo the size. For other shifts, the result is
8898 zero.
230d793d
RS
8899
8900 Since these shifts are being produced by the compiler by combining
8901 multiple operations, each of which are defined, we know what the
8902 result is supposed to be. */
663522cb 8903
230d793d
RS
8904 if (count > GET_MODE_BITSIZE (shift_mode) - 1)
8905 {
8906 if (code == ASHIFTRT)
8907 count = GET_MODE_BITSIZE (shift_mode) - 1;
8908 else if (code == ROTATE || code == ROTATERT)
8909 count %= GET_MODE_BITSIZE (shift_mode);
8910 else
8911 {
8912 /* We can't simply return zero because there may be an
8913 outer op. */
8914 varop = const0_rtx;
8915 count = 0;
8916 break;
8917 }
8918 }
8919
312def2e
RK
8920 /* An arithmetic right shift of a quantity known to be -1 or 0
8921 is a no-op. */
8922 if (code == ASHIFTRT
8923 && (num_sign_bit_copies (varop, shift_mode)
8924 == GET_MODE_BITSIZE (shift_mode)))
d0ab8cd3 8925 {
312def2e
RK
8926 count = 0;
8927 break;
8928 }
d0ab8cd3 8929
312def2e
RK
8930 /* If we are doing an arithmetic right shift and discarding all but
8931 the sign bit copies, this is equivalent to doing a shift by the
8932 bitsize minus one. Convert it into that shift because it will often
8933 allow other simplifications. */
500c518b 8934
312def2e
RK
8935 if (code == ASHIFTRT
8936 && (count + num_sign_bit_copies (varop, shift_mode)
8937 >= GET_MODE_BITSIZE (shift_mode)))
8938 count = GET_MODE_BITSIZE (shift_mode) - 1;
500c518b 8939
230d793d
RS
8940 /* We simplify the tests below and elsewhere by converting
8941 ASHIFTRT to LSHIFTRT if we know the sign bit is clear.
8942 `make_compound_operation' will convert it to a ASHIFTRT for
8943 those machines (such as Vax) that don't have a LSHIFTRT. */
5f4f0e22 8944 if (GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT
230d793d 8945 && code == ASHIFTRT
951553af 8946 && ((nonzero_bits (varop, shift_mode)
5f4f0e22
CH
8947 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (shift_mode) - 1)))
8948 == 0))
230d793d
RS
8949 code = LSHIFTRT;
8950
8951 switch (GET_CODE (varop))
8952 {
8953 case SIGN_EXTEND:
8954 case ZERO_EXTEND:
8955 case SIGN_EXTRACT:
8956 case ZERO_EXTRACT:
8957 new = expand_compound_operation (varop);
8958 if (new != varop)
8959 {
8960 varop = new;
8961 continue;
8962 }
8963 break;
8964
8965 case MEM:
8966 /* If we have (xshiftrt (mem ...) C) and C is MODE_WIDTH
8967 minus the width of a smaller mode, we can do this with a
8968 SIGN_EXTEND or ZERO_EXTEND from the narrower memory location. */
8969 if ((code == ASHIFTRT || code == LSHIFTRT)
8970 && ! mode_dependent_address_p (XEXP (varop, 0))
8971 && ! MEM_VOLATILE_P (varop)
8972 && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
8973 MODE_INT, 1)) != BLKmode)
8974 {
f1ec5147
RK
8975 new = adjust_address_nv (varop, tmode,
8976 BYTES_BIG_ENDIAN ? 0
8977 : count / BITS_PER_UNIT);
bf49b139 8978
f1c6ba8b
RK
8979 varop = gen_rtx_fmt_e (code == ASHIFTRT ? SIGN_EXTEND
8980 : ZERO_EXTEND, mode, new);
230d793d
RS
8981 count = 0;
8982 continue;
8983 }
8984 break;
8985
8986 case USE:
8987 /* Similar to the case above, except that we can only do this if
8988 the resulting mode is the same as that of the underlying
8989 MEM and adjust the address depending on the *bits* endianness
8990 because of the way that bit-field extract insns are defined. */
8991 if ((code == ASHIFTRT || code == LSHIFTRT)
8992 && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
8993 MODE_INT, 1)) != BLKmode
8994 && tmode == GET_MODE (XEXP (varop, 0)))
8995 {
f76b9db2
ILT
8996 if (BITS_BIG_ENDIAN)
8997 new = XEXP (varop, 0);
8998 else
8999 {
9000 new = copy_rtx (XEXP (varop, 0));
663522cb 9001 SUBST (XEXP (new, 0),
f76b9db2
ILT
9002 plus_constant (XEXP (new, 0),
9003 count / BITS_PER_UNIT));
9004 }
230d793d 9005
f1c6ba8b
RK
9006 varop = gen_rtx_fmt_e (code == ASHIFTRT ? SIGN_EXTEND
9007 : ZERO_EXTEND, mode, new);
230d793d
RS
9008 count = 0;
9009 continue;
9010 }
9011 break;
9012
9013 case SUBREG:
9014 /* If VAROP is a SUBREG, strip it as long as the inner operand has
9015 the same number of words as what we've seen so far. Then store
9016 the widest mode in MODE. */
f9e67232
RS
9017 if (subreg_lowpart_p (varop)
9018 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
9019 > GET_MODE_SIZE (GET_MODE (varop)))
230d793d
RS
9020 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
9021 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
9022 == mode_words))
9023 {
9024 varop = SUBREG_REG (varop);
9025 if (GET_MODE_SIZE (GET_MODE (varop)) > GET_MODE_SIZE (mode))
9026 mode = GET_MODE (varop);
9027 continue;
9028 }
9029 break;
9030
9031 case MULT:
9032 /* Some machines use MULT instead of ASHIFT because MULT
9033 is cheaper. But it is still better on those machines to
9034 merge two shifts into one. */
9035 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
9036 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
9037 {
770ae6cc
RK
9038 varop
9039 = gen_binary (ASHIFT, GET_MODE (varop), XEXP (varop, 0),
9040 GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));
230d793d
RS
9041 continue;
9042 }
9043 break;
9044
9045 case UDIV:
9046 /* Similar, for when divides are cheaper. */
9047 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
9048 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
9049 {
770ae6cc
RK
9050 varop
9051 = gen_binary (LSHIFTRT, GET_MODE (varop), XEXP (varop, 0),
9052 GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));
230d793d
RS
9053 continue;
9054 }
9055 break;
9056
9057 case ASHIFTRT:
8f8d8d6e
AO
9058 /* If we are extracting just the sign bit of an arithmetic
9059 right shift, that shift is not needed. However, the sign
9060 bit of a wider mode may be different from what would be
9061 interpreted as the sign bit in a narrower mode, so, if
9062 the result is narrower, don't discard the shift. */
9063 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1
9064 && (GET_MODE_BITSIZE (result_mode)
9065 >= GET_MODE_BITSIZE (GET_MODE (varop))))
230d793d
RS
9066 {
9067 varop = XEXP (varop, 0);
9068 continue;
9069 }
9070
0f41302f 9071 /* ... fall through ... */
230d793d
RS
9072
9073 case LSHIFTRT:
9074 case ASHIFT:
230d793d
RS
9075 case ROTATE:
9076 /* Here we have two nested shifts. The result is usually the
9077 AND of a new shift with a mask. We compute the result below. */
9078 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
9079 && INTVAL (XEXP (varop, 1)) >= 0
9080 && INTVAL (XEXP (varop, 1)) < GET_MODE_BITSIZE (GET_MODE (varop))
5f4f0e22
CH
9081 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
9082 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
230d793d
RS
9083 {
9084 enum rtx_code first_code = GET_CODE (varop);
770ae6cc 9085 unsigned int first_count = INTVAL (XEXP (varop, 1));
5f4f0e22 9086 unsigned HOST_WIDE_INT mask;
230d793d 9087 rtx mask_rtx;
230d793d 9088
230d793d
RS
9089 /* We have one common special case. We can't do any merging if
9090 the inner code is an ASHIFTRT of a smaller mode. However, if
9091 we have (ashift:M1 (subreg:M1 (ashiftrt:M2 FOO C1) 0) C2)
9092 with C2 == GET_MODE_BITSIZE (M1) - GET_MODE_BITSIZE (M2),
9093 we can convert it to
9094 (ashiftrt:M1 (ashift:M1 (and:M1 (subreg:M1 FOO 0 C2) C3) C1).
9095 This simplifies certain SIGN_EXTEND operations. */
9096 if (code == ASHIFT && first_code == ASHIFTRT
9097 && (GET_MODE_BITSIZE (result_mode)
9098 - GET_MODE_BITSIZE (GET_MODE (varop))) == count)
9099 {
9100 /* C3 has the low-order C1 bits zero. */
663522cb 9101
5f4f0e22 9102 mask = (GET_MODE_MASK (mode)
663522cb 9103 & ~(((HOST_WIDE_INT) 1 << first_count) - 1));
230d793d 9104
5f4f0e22 9105 varop = simplify_and_const_int (NULL_RTX, result_mode,
230d793d 9106 XEXP (varop, 0), mask);
5f4f0e22 9107 varop = simplify_shift_const (NULL_RTX, ASHIFT, result_mode,
230d793d
RS
9108 varop, count);
9109 count = first_count;
9110 code = ASHIFTRT;
9111 continue;
9112 }
663522cb 9113
d0ab8cd3
RK
9114 /* If this was (ashiftrt (ashift foo C1) C2) and FOO has more
9115 than C1 high-order bits equal to the sign bit, we can convert
9116 this to either an ASHIFT or a ASHIFTRT depending on the
663522cb 9117 two counts.
230d793d
RS
9118
9119 We cannot do this if VAROP's mode is not SHIFT_MODE. */
9120
9121 if (code == ASHIFTRT && first_code == ASHIFT
9122 && GET_MODE (varop) == shift_mode
d0ab8cd3
RK
9123 && (num_sign_bit_copies (XEXP (varop, 0), shift_mode)
9124 > first_count))
230d793d 9125 {
d0ab8cd3 9126 varop = XEXP (varop, 0);
770ae6cc
RK
9127
9128 signed_count = count - first_count;
9129 if (signed_count < 0)
663522cb 9130 count = -signed_count, code = ASHIFT;
770ae6cc
RK
9131 else
9132 count = signed_count;
9133
d0ab8cd3 9134 continue;
230d793d
RS
9135 }
9136
9137 /* There are some cases we can't do. If CODE is ASHIFTRT,
9138 we can only do this if FIRST_CODE is also ASHIFTRT.
9139
9140 We can't do the case when CODE is ROTATE and FIRST_CODE is
9141 ASHIFTRT.
9142
9143 If the mode of this shift is not the mode of the outer shift,
bdaae9a0 9144 we can't do this if either shift is a right shift or ROTATE.
230d793d
RS
9145
9146 Finally, we can't do any of these if the mode is too wide
9147 unless the codes are the same.
9148
9149 Handle the case where the shift codes are the same
9150 first. */
9151
9152 if (code == first_code)
9153 {
9154 if (GET_MODE (varop) != result_mode
bdaae9a0
RK
9155 && (code == ASHIFTRT || code == LSHIFTRT
9156 || code == ROTATE))
230d793d
RS
9157 break;
9158
9159 count += first_count;
9160 varop = XEXP (varop, 0);
9161 continue;
9162 }
9163
9164 if (code == ASHIFTRT
9165 || (code == ROTATE && first_code == ASHIFTRT)
5f4f0e22 9166 || GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT
230d793d 9167 || (GET_MODE (varop) != result_mode
bdaae9a0
RK
9168 && (first_code == ASHIFTRT || first_code == LSHIFTRT
9169 || first_code == ROTATE
230d793d
RS
9170 || code == ROTATE)))
9171 break;
9172
9173 /* To compute the mask to apply after the shift, shift the
663522cb 9174 nonzero bits of the inner shift the same way the
230d793d
RS
9175 outer shift will. */
9176
951553af 9177 mask_rtx = GEN_INT (nonzero_bits (varop, GET_MODE (varop)));
230d793d
RS
9178
9179 mask_rtx
9180 = simplify_binary_operation (code, result_mode, mask_rtx,
5f4f0e22 9181 GEN_INT (count));
663522cb 9182
230d793d
RS
9183 /* Give up if we can't compute an outer operation to use. */
9184 if (mask_rtx == 0
9185 || GET_CODE (mask_rtx) != CONST_INT
9186 || ! merge_outer_ops (&outer_op, &outer_const, AND,
9187 INTVAL (mask_rtx),
9188 result_mode, &complement_p))
9189 break;
9190
9191 /* If the shifts are in the same direction, we add the
9192 counts. Otherwise, we subtract them. */
770ae6cc 9193 signed_count = count;
230d793d
RS
9194 if ((code == ASHIFTRT || code == LSHIFTRT)
9195 == (first_code == ASHIFTRT || first_code == LSHIFTRT))
770ae6cc 9196 signed_count += first_count;
230d793d 9197 else
770ae6cc 9198 signed_count -= first_count;
230d793d 9199
663522cb 9200 /* If COUNT is positive, the new shift is usually CODE,
230d793d
RS
9201 except for the two exceptions below, in which case it is
9202 FIRST_CODE. If the count is negative, FIRST_CODE should
9203 always be used */
770ae6cc 9204 if (signed_count > 0
230d793d
RS
9205 && ((first_code == ROTATE && code == ASHIFT)
9206 || (first_code == ASHIFTRT && code == LSHIFTRT)))
770ae6cc
RK
9207 code = first_code, count = signed_count;
9208 else if (signed_count < 0)
663522cb 9209 code = first_code, count = -signed_count;
770ae6cc
RK
9210 else
9211 count = signed_count;
230d793d
RS
9212
9213 varop = XEXP (varop, 0);
9214 continue;
9215 }
9216
9217 /* If we have (A << B << C) for any shift, we can convert this to
9218 (A << C << B). This wins if A is a constant. Only try this if
9219 B is not a constant. */
9220
9221 else if (GET_CODE (varop) == code
9222 && GET_CODE (XEXP (varop, 1)) != CONST_INT
9223 && 0 != (new
9224 = simplify_binary_operation (code, mode,
9225 XEXP (varop, 0),
5f4f0e22 9226 GEN_INT (count))))
230d793d 9227 {
f1c6ba8b 9228 varop = gen_rtx_fmt_ee (code, mode, new, XEXP (varop, 1));
230d793d
RS
9229 count = 0;
9230 continue;
9231 }
9232 break;
9233
9234 case NOT:
9235 /* Make this fit the case below. */
f1c6ba8b
RK
9236 varop = gen_rtx_XOR (mode, XEXP (varop, 0),
9237 GEN_INT (GET_MODE_MASK (mode)));
230d793d
RS
9238 continue;
9239
9240 case IOR:
9241 case AND:
9242 case XOR:
9243 /* If we have (xshiftrt (ior (plus X (const_int -1)) X) C)
9244 with C the size of VAROP - 1 and the shift is logical if
9245 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
9246 we have an (le X 0) operation. If we have an arithmetic shift
9247 and STORE_FLAG_VALUE is 1 or we have a logical shift with
9248 STORE_FLAG_VALUE of -1, we have a (neg (le X 0)) operation. */
9249
9250 if (GET_CODE (varop) == IOR && GET_CODE (XEXP (varop, 0)) == PLUS
9251 && XEXP (XEXP (varop, 0), 1) == constm1_rtx
9252 && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
9253 && (code == LSHIFTRT || code == ASHIFTRT)
9254 && count == GET_MODE_BITSIZE (GET_MODE (varop)) - 1
9255 && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
9256 {
9257 count = 0;
f1c6ba8b
RK
9258 varop = gen_rtx_LE (GET_MODE (varop), XEXP (varop, 1),
9259 const0_rtx);
230d793d
RS
9260
9261 if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
f1c6ba8b 9262 varop = gen_rtx_NEG (GET_MODE (varop), varop);
230d793d
RS
9263
9264 continue;
9265 }
9266
9267 /* If we have (shift (logical)), move the logical to the outside
9268 to allow it to possibly combine with another logical and the
9269 shift to combine with another shift. This also canonicalizes to
9270 what a ZERO_EXTRACT looks like. Also, some machines have
9271 (and (shift)) insns. */
9272
9273 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
9274 && (new = simplify_binary_operation (code, result_mode,
9275 XEXP (varop, 1),
5f4f0e22 9276 GEN_INT (count))) != 0
663522cb 9277 && GET_CODE (new) == CONST_INT
230d793d
RS
9278 && merge_outer_ops (&outer_op, &outer_const, GET_CODE (varop),
9279 INTVAL (new), result_mode, &complement_p))
9280 {
9281 varop = XEXP (varop, 0);
9282 continue;
9283 }
9284
9285 /* If we can't do that, try to simplify the shift in each arm of the
9286 logical expression, make a new logical expression, and apply
9287 the inverse distributive law. */
9288 {
00d4ca1c 9289 rtx lhs = simplify_shift_const (NULL_RTX, code, shift_mode,
230d793d 9290 XEXP (varop, 0), count);
00d4ca1c 9291 rtx rhs = simplify_shift_const (NULL_RTX, code, shift_mode,
230d793d
RS
9292 XEXP (varop, 1), count);
9293
21a64bf1 9294 varop = gen_binary (GET_CODE (varop), shift_mode, lhs, rhs);
230d793d
RS
9295 varop = apply_distributive_law (varop);
9296
9297 count = 0;
9298 }
9299 break;
9300
9301 case EQ:
45620ed4 9302 /* convert (lshiftrt (eq FOO 0) C) to (xor FOO 1) if STORE_FLAG_VALUE
230d793d 9303 says that the sign bit can be tested, FOO has mode MODE, C is
45620ed4
RK
9304 GET_MODE_BITSIZE (MODE) - 1, and FOO has only its low-order bit
9305 that may be nonzero. */
9306 if (code == LSHIFTRT
230d793d
RS
9307 && XEXP (varop, 1) == const0_rtx
9308 && GET_MODE (XEXP (varop, 0)) == result_mode
9309 && count == GET_MODE_BITSIZE (result_mode) - 1
5f4f0e22 9310 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
230d793d 9311 && ((STORE_FLAG_VALUE
663522cb 9312 & ((HOST_WIDE_INT) 1
770ae6cc 9313 < (GET_MODE_BITSIZE (result_mode) - 1))))
951553af 9314 && nonzero_bits (XEXP (varop, 0), result_mode) == 1
5f4f0e22
CH
9315 && merge_outer_ops (&outer_op, &outer_const, XOR,
9316 (HOST_WIDE_INT) 1, result_mode,
9317 &complement_p))
230d793d
RS
9318 {
9319 varop = XEXP (varop, 0);
9320 count = 0;
9321 continue;
9322 }
9323 break;
9324
9325 case NEG:
d0ab8cd3
RK
9326 /* (lshiftrt (neg A) C) where A is either 0 or 1 and C is one less
9327 than the number of bits in the mode is equivalent to A. */
9328 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1
951553af 9329 && nonzero_bits (XEXP (varop, 0), result_mode) == 1)
230d793d 9330 {
d0ab8cd3 9331 varop = XEXP (varop, 0);
230d793d
RS
9332 count = 0;
9333 continue;
9334 }
9335
9336 /* NEG commutes with ASHIFT since it is multiplication. Move the
9337 NEG outside to allow shifts to combine. */
9338 if (code == ASHIFT
5f4f0e22
CH
9339 && merge_outer_ops (&outer_op, &outer_const, NEG,
9340 (HOST_WIDE_INT) 0, result_mode,
9341 &complement_p))
230d793d
RS
9342 {
9343 varop = XEXP (varop, 0);
9344 continue;
9345 }
9346 break;
9347
9348 case PLUS:
d0ab8cd3
RK
9349 /* (lshiftrt (plus A -1) C) where A is either 0 or 1 and C
9350 is one less than the number of bits in the mode is
9351 equivalent to (xor A 1). */
230d793d
RS
9352 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1
9353 && XEXP (varop, 1) == constm1_rtx
951553af 9354 && nonzero_bits (XEXP (varop, 0), result_mode) == 1
5f4f0e22
CH
9355 && merge_outer_ops (&outer_op, &outer_const, XOR,
9356 (HOST_WIDE_INT) 1, result_mode,
9357 &complement_p))
230d793d
RS
9358 {
9359 count = 0;
9360 varop = XEXP (varop, 0);
9361 continue;
9362 }
9363
3f508eca 9364 /* If we have (xshiftrt (plus FOO BAR) C), and the only bits
951553af 9365 that might be nonzero in BAR are those being shifted out and those
3f508eca
RK
9366 bits are known zero in FOO, we can replace the PLUS with FOO.
9367 Similarly in the other operand order. This code occurs when
9368 we are computing the size of a variable-size array. */
9369
9370 if ((code == ASHIFTRT || code == LSHIFTRT)
5f4f0e22 9371 && count < HOST_BITS_PER_WIDE_INT
951553af
RK
9372 && nonzero_bits (XEXP (varop, 1), result_mode) >> count == 0
9373 && (nonzero_bits (XEXP (varop, 1), result_mode)
9374 & nonzero_bits (XEXP (varop, 0), result_mode)) == 0)
3f508eca
RK
9375 {
9376 varop = XEXP (varop, 0);
9377 continue;
9378 }
9379 else if ((code == ASHIFTRT || code == LSHIFTRT)
5f4f0e22 9380 && count < HOST_BITS_PER_WIDE_INT
ac49a949 9381 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
951553af 9382 && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
3f508eca 9383 >> count)
951553af
RK
9384 && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
9385 & nonzero_bits (XEXP (varop, 1),
3f508eca
RK
9386 result_mode)))
9387 {
9388 varop = XEXP (varop, 1);
9389 continue;
9390 }
9391
230d793d
RS
9392 /* (ashift (plus foo C) N) is (plus (ashift foo N) C'). */
9393 if (code == ASHIFT
9394 && GET_CODE (XEXP (varop, 1)) == CONST_INT
9395 && (new = simplify_binary_operation (ASHIFT, result_mode,
9396 XEXP (varop, 1),
5f4f0e22 9397 GEN_INT (count))) != 0
770ae6cc 9398 && GET_CODE (new) == CONST_INT
230d793d
RS
9399 && merge_outer_ops (&outer_op, &outer_const, PLUS,
9400 INTVAL (new), result_mode, &complement_p))
9401 {
9402 varop = XEXP (varop, 0);
9403 continue;
9404 }
9405 break;
9406
9407 case MINUS:
9408 /* If we have (xshiftrt (minus (ashiftrt X C)) X) C)
9409 with C the size of VAROP - 1 and the shift is logical if
9410 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
9411 we have a (gt X 0) operation. If the shift is arithmetic with
9412 STORE_FLAG_VALUE of 1 or logical with STORE_FLAG_VALUE == -1,
9413 we have a (neg (gt X 0)) operation. */
9414
0802d516
RK
9415 if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
9416 && GET_CODE (XEXP (varop, 0)) == ASHIFTRT
230d793d 9417 && count == GET_MODE_BITSIZE (GET_MODE (varop)) - 1
230d793d
RS
9418 && (code == LSHIFTRT || code == ASHIFTRT)
9419 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
9420 && INTVAL (XEXP (XEXP (varop, 0), 1)) == count
9421 && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
9422 {
9423 count = 0;
f1c6ba8b
RK
9424 varop = gen_rtx_GT (GET_MODE (varop), XEXP (varop, 1),
9425 const0_rtx);
230d793d
RS
9426
9427 if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
f1c6ba8b 9428 varop = gen_rtx_NEG (GET_MODE (varop), varop);
230d793d
RS
9429
9430 continue;
9431 }
9432 break;
6e0ef100
JC
9433
9434 case TRUNCATE:
9435 /* Change (lshiftrt (truncate (lshiftrt))) to (truncate (lshiftrt))
9436 if the truncate does not affect the value. */
9437 if (code == LSHIFTRT
9438 && GET_CODE (XEXP (varop, 0)) == LSHIFTRT
9439 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
9440 && (INTVAL (XEXP (XEXP (varop, 0), 1))
b577a8ff
JL
9441 >= (GET_MODE_BITSIZE (GET_MODE (XEXP (varop, 0)))
9442 - GET_MODE_BITSIZE (GET_MODE (varop)))))
6e0ef100
JC
9443 {
9444 rtx varop_inner = XEXP (varop, 0);
9445
770ae6cc 9446 varop_inner
f1c6ba8b
RK
9447 = gen_rtx_LSHIFTRT (GET_MODE (varop_inner),
9448 XEXP (varop_inner, 0),
9449 GEN_INT
9450 (count + INTVAL (XEXP (varop_inner, 1))));
9451 varop = gen_rtx_TRUNCATE (GET_MODE (varop), varop_inner);
6e0ef100
JC
9452 count = 0;
9453 continue;
9454 }
9455 break;
663522cb 9456
e9a25f70
JL
9457 default:
9458 break;
230d793d
RS
9459 }
9460
9461 break;
9462 }
9463
9464 /* We need to determine what mode to do the shift in. If the shift is
f6789c77
RK
9465 a right shift or ROTATE, we must always do it in the mode it was
9466 originally done in. Otherwise, we can do it in MODE, the widest mode
9467 encountered. The code we care about is that of the shift that will
9468 actually be done, not the shift that was originally requested. */
9469 shift_mode
9470 = (code == ASHIFTRT || code == LSHIFTRT || code == ROTATE
9471 ? result_mode : mode);
230d793d
RS
9472
9473 /* We have now finished analyzing the shift. The result should be
9474 a shift of type CODE with SHIFT_MODE shifting VAROP COUNT places. If
9475 OUTER_OP is non-NIL, it is an operation that needs to be applied
9476 to the result of the shift. OUTER_CONST is the relevant constant,
9477 but we must turn off all bits turned off in the shift.
9478
9479 If we were passed a value for X, see if we can use any pieces of
9480 it. If not, make new rtx. */
9481
9482 if (x && GET_RTX_CLASS (GET_CODE (x)) == '2'
9483 && GET_CODE (XEXP (x, 1)) == CONST_INT
9484 && INTVAL (XEXP (x, 1)) == count)
9485 const_rtx = XEXP (x, 1);
9486 else
5f4f0e22 9487 const_rtx = GEN_INT (count);
230d793d
RS
9488
9489 if (x && GET_CODE (XEXP (x, 0)) == SUBREG
9490 && GET_MODE (XEXP (x, 0)) == shift_mode
9491 && SUBREG_REG (XEXP (x, 0)) == varop)
9492 varop = XEXP (x, 0);
9493 else if (GET_MODE (varop) != shift_mode)
9494 varop = gen_lowpart_for_combine (shift_mode, varop);
9495
0f41302f 9496 /* If we can't make the SUBREG, try to return what we were given. */
230d793d
RS
9497 if (GET_CODE (varop) == CLOBBER)
9498 return x ? x : varop;
9499
9500 new = simplify_binary_operation (code, shift_mode, varop, const_rtx);
9501 if (new != 0)
9502 x = new;
9503 else
9504 {
9505 if (x == 0 || GET_CODE (x) != code || GET_MODE (x) != shift_mode)
f1c6ba8b 9506 x = gen_rtx_fmt_ee (code, shift_mode, varop, const_rtx);
230d793d
RS
9507
9508 SUBST (XEXP (x, 0), varop);
9509 SUBST (XEXP (x, 1), const_rtx);
9510 }
9511
224eeff2
RK
9512 /* If we have an outer operation and we just made a shift, it is
9513 possible that we could have simplified the shift were it not
9514 for the outer operation. So try to do the simplification
9515 recursively. */
9516
9517 if (outer_op != NIL && GET_CODE (x) == code
9518 && GET_CODE (XEXP (x, 1)) == CONST_INT)
9519 x = simplify_shift_const (x, code, shift_mode, XEXP (x, 0),
9520 INTVAL (XEXP (x, 1)));
9521
230d793d
RS
9522 /* If we were doing a LSHIFTRT in a wider mode than it was originally,
9523 turn off all the bits that the shift would have turned off. */
9524 if (orig_code == LSHIFTRT && result_mode != shift_mode)
5f4f0e22 9525 x = simplify_and_const_int (NULL_RTX, shift_mode, x,
230d793d 9526 GET_MODE_MASK (result_mode) >> orig_count);
663522cb 9527
230d793d
RS
9528 /* Do the remainder of the processing in RESULT_MODE. */
9529 x = gen_lowpart_for_combine (result_mode, x);
9530
9531 /* If COMPLEMENT_P is set, we have to complement X before doing the outer
9532 operation. */
9533 if (complement_p)
f1c6ba8b 9534 x =simplify_gen_unary (NOT, result_mode, x, result_mode);
230d793d
RS
9535
9536 if (outer_op != NIL)
9537 {
5f4f0e22 9538 if (GET_MODE_BITSIZE (result_mode) < HOST_BITS_PER_WIDE_INT)
7e4ce834 9539 outer_const = trunc_int_for_mode (outer_const, result_mode);
230d793d
RS
9540
9541 if (outer_op == AND)
5f4f0e22 9542 x = simplify_and_const_int (NULL_RTX, result_mode, x, outer_const);
230d793d
RS
9543 else if (outer_op == SET)
9544 /* This means that we have determined that the result is
9545 equivalent to a constant. This should be rare. */
5f4f0e22 9546 x = GEN_INT (outer_const);
230d793d 9547 else if (GET_RTX_CLASS (outer_op) == '1')
f1c6ba8b 9548 x = simplify_gen_unary (outer_op, result_mode, x, result_mode);
230d793d 9549 else
5f4f0e22 9550 x = gen_binary (outer_op, result_mode, x, GEN_INT (outer_const));
230d793d
RS
9551 }
9552
9553 return x;
663522cb 9554}
230d793d
RS
9555\f
9556/* Like recog, but we receive the address of a pointer to a new pattern.
9557 We try to match the rtx that the pointer points to.
9558 If that fails, we may try to modify or replace the pattern,
9559 storing the replacement into the same pointer object.
9560
9561 Modifications include deletion or addition of CLOBBERs.
9562
9563 PNOTES is a pointer to a location where any REG_UNUSED notes added for
9564 the CLOBBERs are placed.
9565
9566 The value is the final insn code from the pattern ultimately matched,
9567 or -1. */
9568
9569static int
8e2f6e35 9570recog_for_combine (pnewpat, insn, pnotes)
230d793d
RS
9571 rtx *pnewpat;
9572 rtx insn;
9573 rtx *pnotes;
9574{
9575 register rtx pat = *pnewpat;
9576 int insn_code_number;
9577 int num_clobbers_to_add = 0;
9578 int i;
9579 rtx notes = 0;
c1194d74 9580 rtx old_notes;
230d793d 9581
974f4146
RK
9582 /* If PAT is a PARALLEL, check to see if it contains the CLOBBER
9583 we use to indicate that something didn't match. If we find such a
9584 thing, force rejection. */
d96023cf 9585 if (GET_CODE (pat) == PARALLEL)
974f4146 9586 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
d96023cf
RK
9587 if (GET_CODE (XVECEXP (pat, 0, i)) == CLOBBER
9588 && XEXP (XVECEXP (pat, 0, i), 0) == const0_rtx)
974f4146
RK
9589 return -1;
9590
c1194d74
JW
9591 /* Remove the old notes prior to trying to recognize the new pattern. */
9592 old_notes = REG_NOTES (insn);
9593 REG_NOTES (insn) = 0;
9594
230d793d
RS
9595 /* Is the result of combination a valid instruction? */
9596 insn_code_number = recog (pat, insn, &num_clobbers_to_add);
9597
9598 /* If it isn't, there is the possibility that we previously had an insn
9599 that clobbered some register as a side effect, but the combined
9600 insn doesn't need to do that. So try once more without the clobbers
9601 unless this represents an ASM insn. */
9602
9603 if (insn_code_number < 0 && ! check_asm_operands (pat)
9604 && GET_CODE (pat) == PARALLEL)
9605 {
9606 int pos;
9607
9608 for (pos = 0, i = 0; i < XVECLEN (pat, 0); i++)
9609 if (GET_CODE (XVECEXP (pat, 0, i)) != CLOBBER)
9610 {
9611 if (i != pos)
9612 SUBST (XVECEXP (pat, 0, pos), XVECEXP (pat, 0, i));
9613 pos++;
9614 }
9615
9616 SUBST_INT (XVECLEN (pat, 0), pos);
9617
9618 if (pos == 1)
9619 pat = XVECEXP (pat, 0, 0);
9620
9621 insn_code_number = recog (pat, insn, &num_clobbers_to_add);
9622 }
9623
c1194d74
JW
9624 REG_NOTES (insn) = old_notes;
9625
230d793d
RS
9626 /* If we had any clobbers to add, make a new pattern than contains
9627 them. Then check to make sure that all of them are dead. */
9628 if (num_clobbers_to_add)
9629 {
38a448ca 9630 rtx newpat = gen_rtx_PARALLEL (VOIDmode,
bf103ec2
R
9631 rtvec_alloc (GET_CODE (pat) == PARALLEL
9632 ? (XVECLEN (pat, 0)
9633 + num_clobbers_to_add)
9634 : num_clobbers_to_add + 1));
230d793d
RS
9635
9636 if (GET_CODE (pat) == PARALLEL)
9637 for (i = 0; i < XVECLEN (pat, 0); i++)
9638 XVECEXP (newpat, 0, i) = XVECEXP (pat, 0, i);
9639 else
9640 XVECEXP (newpat, 0, 0) = pat;
9641
9642 add_clobbers (newpat, insn_code_number);
9643
9644 for (i = XVECLEN (newpat, 0) - num_clobbers_to_add;
9645 i < XVECLEN (newpat, 0); i++)
9646 {
9647 if (GET_CODE (XEXP (XVECEXP (newpat, 0, i), 0)) == REG
9648 && ! reg_dead_at_p (XEXP (XVECEXP (newpat, 0, i), 0), insn))
9649 return -1;
38a448ca
RH
9650 notes = gen_rtx_EXPR_LIST (REG_UNUSED,
9651 XEXP (XVECEXP (newpat, 0, i), 0), notes);
230d793d
RS
9652 }
9653 pat = newpat;
9654 }
9655
9656 *pnewpat = pat;
9657 *pnotes = notes;
9658
9659 return insn_code_number;
9660}
9661\f
9662/* Like gen_lowpart but for use by combine. In combine it is not possible
9663 to create any new pseudoregs. However, it is safe to create
9664 invalid memory addresses, because combine will try to recognize
9665 them and all they will do is make the combine attempt fail.
9666
9667 If for some reason this cannot do its job, an rtx
9668 (clobber (const_int 0)) is returned.
9669 An insn containing that will not be recognized. */
9670
9671#undef gen_lowpart
9672
9673static rtx
9674gen_lowpart_for_combine (mode, x)
9675 enum machine_mode mode;
9676 register rtx x;
9677{
9678 rtx result;
9679
9680 if (GET_MODE (x) == mode)
9681 return x;
9682
eae957a8
RK
9683 /* We can only support MODE being wider than a word if X is a
9684 constant integer or has a mode the same size. */
9685
9686 if (GET_MODE_SIZE (mode) > UNITS_PER_WORD
9687 && ! ((GET_MODE (x) == VOIDmode
9688 && (GET_CODE (x) == CONST_INT
9689 || GET_CODE (x) == CONST_DOUBLE))
9690 || GET_MODE_SIZE (GET_MODE (x)) == GET_MODE_SIZE (mode)))
38a448ca 9691 return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
230d793d
RS
9692
9693 /* X might be a paradoxical (subreg (mem)). In that case, gen_lowpart
9694 won't know what to do. So we will strip off the SUBREG here and
9695 process normally. */
9696 if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
9697 {
9698 x = SUBREG_REG (x);
9699 if (GET_MODE (x) == mode)
9700 return x;
9701 }
9702
9703 result = gen_lowpart_common (mode, x);
02188693 9704#ifdef CLASS_CANNOT_CHANGE_MODE
64bf47a2
RK
9705 if (result != 0
9706 && GET_CODE (result) == SUBREG
9707 && GET_CODE (SUBREG_REG (result)) == REG
9708 && REGNO (SUBREG_REG (result)) >= FIRST_PSEUDO_REGISTER
02188693
RH
9709 && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (result),
9710 GET_MODE (SUBREG_REG (result))))
9711 REG_CHANGES_MODE (REGNO (SUBREG_REG (result))) = 1;
9712#endif
64bf47a2 9713
230d793d
RS
9714 if (result)
9715 return result;
9716
9717 if (GET_CODE (x) == MEM)
9718 {
9719 register int offset = 0;
9720 rtx new;
9721
9722 /* Refuse to work on a volatile memory ref or one with a mode-dependent
9723 address. */
9724 if (MEM_VOLATILE_P (x) || mode_dependent_address_p (XEXP (x, 0)))
38a448ca 9725 return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
230d793d
RS
9726
9727 /* If we want to refer to something bigger than the original memref,
9728 generate a perverse subreg instead. That will force a reload
9729 of the original memref X. */
9730 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode))
38a448ca 9731 return gen_rtx_SUBREG (mode, x, 0);
230d793d 9732
f76b9db2
ILT
9733 if (WORDS_BIG_ENDIAN)
9734 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
9735 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
c5c76735 9736
f76b9db2
ILT
9737 if (BYTES_BIG_ENDIAN)
9738 {
9739 /* Adjust the address so that the address-after-the-data is
9740 unchanged. */
9741 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
9742 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
9743 }
f1ec5147
RK
9744
9745 return adjust_address_nv (x, mode, offset);
230d793d
RS
9746 }
9747
9748 /* If X is a comparison operator, rewrite it in a new mode. This
9749 probably won't match, but may allow further simplifications. */
9750 else if (GET_RTX_CLASS (GET_CODE (x)) == '<')
f1c6ba8b 9751 return gen_rtx_fmt_ee (GET_CODE (x), mode, XEXP (x, 0), XEXP (x, 1));
230d793d
RS
9752
9753 /* If we couldn't simplify X any other way, just enclose it in a
9754 SUBREG. Normally, this SUBREG won't match, but some patterns may
a7c99304 9755 include an explicit SUBREG or we may simplify it further in combine. */
230d793d 9756 else
dfbe1b2f 9757 {
ddef6bc7 9758 int offset = 0;
e0e08ac2 9759 rtx res;
dfbe1b2f 9760
e0e08ac2
JH
9761 offset = subreg_lowpart_offset (mode, GET_MODE (x));
9762 res = simplify_gen_subreg (mode, x, GET_MODE (x), offset);
9763 if (res)
9764 return res;
9765 return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
dfbe1b2f 9766 }
230d793d
RS
9767}
9768\f
230d793d
RS
9769/* These routines make binary and unary operations by first seeing if they
9770 fold; if not, a new expression is allocated. */
9771
9772static rtx
9773gen_binary (code, mode, op0, op1)
9774 enum rtx_code code;
9775 enum machine_mode mode;
9776 rtx op0, op1;
9777{
9778 rtx result;
1a26b032
RK
9779 rtx tem;
9780
9781 if (GET_RTX_CLASS (code) == 'c'
8c9864f3 9782 && swap_commutative_operands_p (op0, op1))
1a26b032 9783 tem = op0, op0 = op1, op1 = tem;
230d793d 9784
663522cb 9785 if (GET_RTX_CLASS (code) == '<')
230d793d
RS
9786 {
9787 enum machine_mode op_mode = GET_MODE (op0);
9210df58 9788
663522cb 9789 /* Strip the COMPARE from (REL_OP (compare X Y) 0) to get
0f41302f 9790 just (REL_OP X Y). */
9210df58
RK
9791 if (GET_CODE (op0) == COMPARE && op1 == const0_rtx)
9792 {
9793 op1 = XEXP (op0, 1);
9794 op0 = XEXP (op0, 0);
9795 op_mode = GET_MODE (op0);
9796 }
9797
230d793d
RS
9798 if (op_mode == VOIDmode)
9799 op_mode = GET_MODE (op1);
9800 result = simplify_relational_operation (code, op_mode, op0, op1);
9801 }
9802 else
9803 result = simplify_binary_operation (code, mode, op0, op1);
9804
9805 if (result)
9806 return result;
9807
9808 /* Put complex operands first and constants second. */
9809 if (GET_RTX_CLASS (code) == 'c'
e5c56fd9 9810 && swap_commutative_operands_p (op0, op1))
f1c6ba8b 9811 return gen_rtx_fmt_ee (code, mode, op1, op0);
230d793d 9812
e5e809f4
JL
9813 /* If we are turning off bits already known off in OP0, we need not do
9814 an AND. */
9815 else if (code == AND && GET_CODE (op1) == CONST_INT
9816 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
663522cb 9817 && (nonzero_bits (op0, mode) & ~INTVAL (op1)) == 0)
e5e809f4
JL
9818 return op0;
9819
f1c6ba8b 9820 return gen_rtx_fmt_ee (code, mode, op0, op1);
230d793d
RS
9821}
9822\f
9823/* Simplify a comparison between *POP0 and *POP1 where CODE is the
9824 comparison code that will be tested.
9825
9826 The result is a possibly different comparison code to use. *POP0 and
9827 *POP1 may be updated.
9828
9829 It is possible that we might detect that a comparison is either always
9830 true or always false. However, we do not perform general constant
5089e22e 9831 folding in combine, so this knowledge isn't useful. Such tautologies
230d793d
RS
9832 should have been detected earlier. Hence we ignore all such cases. */
9833
9834static enum rtx_code
9835simplify_comparison (code, pop0, pop1)
9836 enum rtx_code code;
9837 rtx *pop0;
9838 rtx *pop1;
9839{
9840 rtx op0 = *pop0;
9841 rtx op1 = *pop1;
9842 rtx tem, tem1;
9843 int i;
9844 enum machine_mode mode, tmode;
9845
9846 /* Try a few ways of applying the same transformation to both operands. */
9847 while (1)
9848 {
3a19aabc
RK
9849#ifndef WORD_REGISTER_OPERATIONS
9850 /* The test below this one won't handle SIGN_EXTENDs on these machines,
9851 so check specially. */
9852 if (code != GTU && code != GEU && code != LTU && code != LEU
9853 && GET_CODE (op0) == ASHIFTRT && GET_CODE (op1) == ASHIFTRT
9854 && GET_CODE (XEXP (op0, 0)) == ASHIFT
9855 && GET_CODE (XEXP (op1, 0)) == ASHIFT
9856 && GET_CODE (XEXP (XEXP (op0, 0), 0)) == SUBREG
9857 && GET_CODE (XEXP (XEXP (op1, 0), 0)) == SUBREG
9858 && (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0)))
ad25ba17 9859 == GET_MODE (SUBREG_REG (XEXP (XEXP (op1, 0), 0))))
3a19aabc
RK
9860 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9861 && GET_CODE (XEXP (op1, 1)) == CONST_INT
9862 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
9863 && GET_CODE (XEXP (XEXP (op1, 0), 1)) == CONST_INT
9864 && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (op1, 1))
9865 && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (XEXP (op0, 0), 1))
9866 && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (XEXP (op1, 0), 1))
9867 && (INTVAL (XEXP (op0, 1))
9868 == (GET_MODE_BITSIZE (GET_MODE (op0))
9869 - (GET_MODE_BITSIZE
9870 (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0))))))))
9871 {
9872 op0 = SUBREG_REG (XEXP (XEXP (op0, 0), 0));
9873 op1 = SUBREG_REG (XEXP (XEXP (op1, 0), 0));
9874 }
9875#endif
9876
230d793d
RS
9877 /* If both operands are the same constant shift, see if we can ignore the
9878 shift. We can if the shift is a rotate or if the bits shifted out of
951553af 9879 this shift are known to be zero for both inputs and if the type of
230d793d 9880 comparison is compatible with the shift. */
67232b23
RK
9881 if (GET_CODE (op0) == GET_CODE (op1)
9882 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
9883 && ((GET_CODE (op0) == ROTATE && (code == NE || code == EQ))
45620ed4 9884 || ((GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFT)
67232b23
RK
9885 && (code != GT && code != LT && code != GE && code != LE))
9886 || (GET_CODE (op0) == ASHIFTRT
9887 && (code != GTU && code != LTU
99dc5306 9888 && code != GEU && code != LEU)))
67232b23
RK
9889 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9890 && INTVAL (XEXP (op0, 1)) >= 0
9891 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
9892 && XEXP (op0, 1) == XEXP (op1, 1))
230d793d
RS
9893 {
9894 enum machine_mode mode = GET_MODE (op0);
5f4f0e22 9895 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
230d793d
RS
9896 int shift_count = INTVAL (XEXP (op0, 1));
9897
9898 if (GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFTRT)
9899 mask &= (mask >> shift_count) << shift_count;
45620ed4 9900 else if (GET_CODE (op0) == ASHIFT)
230d793d
RS
9901 mask = (mask & (mask << shift_count)) >> shift_count;
9902
663522cb
KH
9903 if ((nonzero_bits (XEXP (op0, 0), mode) & ~mask) == 0
9904 && (nonzero_bits (XEXP (op1, 0), mode) & ~mask) == 0)
230d793d
RS
9905 op0 = XEXP (op0, 0), op1 = XEXP (op1, 0);
9906 else
9907 break;
9908 }
9909
9910 /* If both operands are AND's of a paradoxical SUBREG by constant, the
9911 SUBREGs are of the same mode, and, in both cases, the AND would
9912 be redundant if the comparison was done in the narrower mode,
9913 do the comparison in the narrower mode (e.g., we are AND'ing with 1
951553af
RK
9914 and the operand's possibly nonzero bits are 0xffffff01; in that case
9915 if we only care about QImode, we don't need the AND). This case
9916 occurs if the output mode of an scc insn is not SImode and
7e4dc511
RK
9917 STORE_FLAG_VALUE == 1 (e.g., the 386).
9918
9919 Similarly, check for a case where the AND's are ZERO_EXTEND
9920 operations from some narrower mode even though a SUBREG is not
9921 present. */
230d793d 9922
663522cb
KH
9923 else if (GET_CODE (op0) == AND && GET_CODE (op1) == AND
9924 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9925 && GET_CODE (XEXP (op1, 1)) == CONST_INT)
230d793d 9926 {
7e4dc511
RK
9927 rtx inner_op0 = XEXP (op0, 0);
9928 rtx inner_op1 = XEXP (op1, 0);
9929 HOST_WIDE_INT c0 = INTVAL (XEXP (op0, 1));
9930 HOST_WIDE_INT c1 = INTVAL (XEXP (op1, 1));
9931 int changed = 0;
663522cb 9932
7e4dc511
RK
9933 if (GET_CODE (inner_op0) == SUBREG && GET_CODE (inner_op1) == SUBREG
9934 && (GET_MODE_SIZE (GET_MODE (inner_op0))
9935 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (inner_op0))))
9936 && (GET_MODE (SUBREG_REG (inner_op0))
9937 == GET_MODE (SUBREG_REG (inner_op1)))
729a2bc6 9938 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (inner_op0)))
7e4dc511 9939 <= HOST_BITS_PER_WIDE_INT)
01c82bbb 9940 && (0 == ((~c0) & nonzero_bits (SUBREG_REG (inner_op0),
729a2bc6 9941 GET_MODE (SUBREG_REG (inner_op0)))))
01c82bbb
RK
9942 && (0 == ((~c1) & nonzero_bits (SUBREG_REG (inner_op1),
9943 GET_MODE (SUBREG_REG (inner_op1))))))
7e4dc511
RK
9944 {
9945 op0 = SUBREG_REG (inner_op0);
9946 op1 = SUBREG_REG (inner_op1);
9947
9948 /* The resulting comparison is always unsigned since we masked
0f41302f 9949 off the original sign bit. */
7e4dc511
RK
9950 code = unsigned_condition (code);
9951
9952 changed = 1;
9953 }
230d793d 9954
7e4dc511
RK
9955 else if (c0 == c1)
9956 for (tmode = GET_CLASS_NARROWEST_MODE
9957 (GET_MODE_CLASS (GET_MODE (op0)));
9958 tmode != GET_MODE (op0); tmode = GET_MODE_WIDER_MODE (tmode))
e51712db 9959 if ((unsigned HOST_WIDE_INT) c0 == GET_MODE_MASK (tmode))
7e4dc511
RK
9960 {
9961 op0 = gen_lowpart_for_combine (tmode, inner_op0);
9962 op1 = gen_lowpart_for_combine (tmode, inner_op1);
66415c8b 9963 code = unsigned_condition (code);
7e4dc511
RK
9964 changed = 1;
9965 break;
9966 }
9967
9968 if (! changed)
9969 break;
230d793d 9970 }
3a19aabc 9971
ad25ba17
RK
9972 /* If both operands are NOT, we can strip off the outer operation
9973 and adjust the comparison code for swapped operands; similarly for
9974 NEG, except that this must be an equality comparison. */
9975 else if ((GET_CODE (op0) == NOT && GET_CODE (op1) == NOT)
9976 || (GET_CODE (op0) == NEG && GET_CODE (op1) == NEG
9977 && (code == EQ || code == NE)))
9978 op0 = XEXP (op0, 0), op1 = XEXP (op1, 0), code = swap_condition (code);
3a19aabc 9979
230d793d
RS
9980 else
9981 break;
9982 }
663522cb 9983
230d793d 9984 /* If the first operand is a constant, swap the operands and adjust the
3aceff0d
RK
9985 comparison code appropriately, but don't do this if the second operand
9986 is already a constant integer. */
8c9864f3 9987 if (swap_commutative_operands_p (op0, op1))
230d793d
RS
9988 {
9989 tem = op0, op0 = op1, op1 = tem;
9990 code = swap_condition (code);
9991 }
9992
9993 /* We now enter a loop during which we will try to simplify the comparison.
9994 For the most part, we only are concerned with comparisons with zero,
9995 but some things may really be comparisons with zero but not start
9996 out looking that way. */
9997
9998 while (GET_CODE (op1) == CONST_INT)
9999 {
10000 enum machine_mode mode = GET_MODE (op0);
770ae6cc 10001 unsigned int mode_width = GET_MODE_BITSIZE (mode);
5f4f0e22 10002 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
230d793d
RS
10003 int equality_comparison_p;
10004 int sign_bit_comparison_p;
10005 int unsigned_comparison_p;
5f4f0e22 10006 HOST_WIDE_INT const_op;
230d793d
RS
10007
10008 /* We only want to handle integral modes. This catches VOIDmode,
10009 CCmode, and the floating-point modes. An exception is that we
10010 can handle VOIDmode if OP0 is a COMPARE or a comparison
10011 operation. */
10012
10013 if (GET_MODE_CLASS (mode) != MODE_INT
10014 && ! (mode == VOIDmode
10015 && (GET_CODE (op0) == COMPARE
10016 || GET_RTX_CLASS (GET_CODE (op0)) == '<')))
10017 break;
10018
10019 /* Get the constant we are comparing against and turn off all bits
10020 not on in our mode. */
3c094e22 10021 const_op = trunc_int_for_mode (INTVAL (op1), mode);
b4fbaca7 10022 op1 = GEN_INT (const_op);
230d793d
RS
10023
10024 /* If we are comparing against a constant power of two and the value
951553af 10025 being compared can only have that single bit nonzero (e.g., it was
230d793d
RS
10026 `and'ed with that bit), we can replace this with a comparison
10027 with zero. */
10028 if (const_op
10029 && (code == EQ || code == NE || code == GE || code == GEU
10030 || code == LT || code == LTU)
5f4f0e22 10031 && mode_width <= HOST_BITS_PER_WIDE_INT
230d793d 10032 && exact_log2 (const_op) >= 0
e51712db 10033 && nonzero_bits (op0, mode) == (unsigned HOST_WIDE_INT) const_op)
230d793d
RS
10034 {
10035 code = (code == EQ || code == GE || code == GEU ? NE : EQ);
10036 op1 = const0_rtx, const_op = 0;
10037 }
10038
d0ab8cd3
RK
10039 /* Similarly, if we are comparing a value known to be either -1 or
10040 0 with -1, change it to the opposite comparison against zero. */
10041
10042 if (const_op == -1
10043 && (code == EQ || code == NE || code == GT || code == LE
10044 || code == GEU || code == LTU)
10045 && num_sign_bit_copies (op0, mode) == mode_width)
10046 {
10047 code = (code == EQ || code == LE || code == GEU ? NE : EQ);
10048 op1 = const0_rtx, const_op = 0;
10049 }
10050
230d793d 10051 /* Do some canonicalizations based on the comparison code. We prefer
663522cb 10052 comparisons against zero and then prefer equality comparisons.
4803a34a 10053 If we can reduce the size of a constant, we will do that too. */
230d793d
RS
10054
10055 switch (code)
10056 {
10057 case LT:
4803a34a
RK
10058 /* < C is equivalent to <= (C - 1) */
10059 if (const_op > 0)
230d793d 10060 {
4803a34a 10061 const_op -= 1;
5f4f0e22 10062 op1 = GEN_INT (const_op);
230d793d
RS
10063 code = LE;
10064 /* ... fall through to LE case below. */
10065 }
10066 else
10067 break;
10068
10069 case LE:
4803a34a
RK
10070 /* <= C is equivalent to < (C + 1); we do this for C < 0 */
10071 if (const_op < 0)
10072 {
10073 const_op += 1;
5f4f0e22 10074 op1 = GEN_INT (const_op);
4803a34a
RK
10075 code = LT;
10076 }
230d793d
RS
10077
10078 /* If we are doing a <= 0 comparison on a value known to have
10079 a zero sign bit, we can replace this with == 0. */
10080 else if (const_op == 0
5f4f0e22 10081 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 10082 && (nonzero_bits (op0, mode)
5f4f0e22 10083 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
230d793d
RS
10084 code = EQ;
10085 break;
10086
10087 case GE:
0f41302f 10088 /* >= C is equivalent to > (C - 1). */
4803a34a 10089 if (const_op > 0)
230d793d 10090 {
4803a34a 10091 const_op -= 1;
5f4f0e22 10092 op1 = GEN_INT (const_op);
230d793d
RS
10093 code = GT;
10094 /* ... fall through to GT below. */
10095 }
10096 else
10097 break;
10098
10099 case GT:
663522cb 10100 /* > C is equivalent to >= (C + 1); we do this for C < 0. */
4803a34a
RK
10101 if (const_op < 0)
10102 {
10103 const_op += 1;
5f4f0e22 10104 op1 = GEN_INT (const_op);
4803a34a
RK
10105 code = GE;
10106 }
230d793d
RS
10107
10108 /* If we are doing a > 0 comparison on a value known to have
10109 a zero sign bit, we can replace this with != 0. */
10110 else if (const_op == 0
5f4f0e22 10111 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 10112 && (nonzero_bits (op0, mode)
5f4f0e22 10113 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
230d793d
RS
10114 code = NE;
10115 break;
10116
230d793d 10117 case LTU:
4803a34a
RK
10118 /* < C is equivalent to <= (C - 1). */
10119 if (const_op > 0)
10120 {
10121 const_op -= 1;
5f4f0e22 10122 op1 = GEN_INT (const_op);
4803a34a 10123 code = LEU;
0f41302f 10124 /* ... fall through ... */
4803a34a 10125 }
d0ab8cd3
RK
10126
10127 /* (unsigned) < 0x80000000 is equivalent to >= 0. */
f77aada2
JW
10128 else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
10129 && (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1)))
d0ab8cd3
RK
10130 {
10131 const_op = 0, op1 = const0_rtx;
10132 code = GE;
10133 break;
10134 }
4803a34a
RK
10135 else
10136 break;
230d793d
RS
10137
10138 case LEU:
10139 /* unsigned <= 0 is equivalent to == 0 */
10140 if (const_op == 0)
10141 code = EQ;
d0ab8cd3 10142
0f41302f 10143 /* (unsigned) <= 0x7fffffff is equivalent to >= 0. */
f77aada2
JW
10144 else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
10145 && (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1))
d0ab8cd3
RK
10146 {
10147 const_op = 0, op1 = const0_rtx;
10148 code = GE;
10149 }
230d793d
RS
10150 break;
10151
4803a34a
RK
10152 case GEU:
10153 /* >= C is equivalent to < (C - 1). */
10154 if (const_op > 1)
10155 {
10156 const_op -= 1;
5f4f0e22 10157 op1 = GEN_INT (const_op);
4803a34a 10158 code = GTU;
0f41302f 10159 /* ... fall through ... */
4803a34a 10160 }
d0ab8cd3
RK
10161
10162 /* (unsigned) >= 0x80000000 is equivalent to < 0. */
f77aada2
JW
10163 else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
10164 && (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1)))
d0ab8cd3
RK
10165 {
10166 const_op = 0, op1 = const0_rtx;
10167 code = LT;
8b2e69e1 10168 break;
d0ab8cd3 10169 }
4803a34a
RK
10170 else
10171 break;
10172
230d793d
RS
10173 case GTU:
10174 /* unsigned > 0 is equivalent to != 0 */
10175 if (const_op == 0)
10176 code = NE;
d0ab8cd3
RK
10177
10178 /* (unsigned) > 0x7fffffff is equivalent to < 0. */
f77aada2
JW
10179 else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
10180 && (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1))
d0ab8cd3
RK
10181 {
10182 const_op = 0, op1 = const0_rtx;
10183 code = LT;
10184 }
230d793d 10185 break;
e9a25f70
JL
10186
10187 default:
10188 break;
230d793d
RS
10189 }
10190
10191 /* Compute some predicates to simplify code below. */
10192
10193 equality_comparison_p = (code == EQ || code == NE);
10194 sign_bit_comparison_p = ((code == LT || code == GE) && const_op == 0);
10195 unsigned_comparison_p = (code == LTU || code == LEU || code == GTU
d5010e66 10196 || code == GEU);
230d793d 10197
6139ff20
RK
10198 /* If this is a sign bit comparison and we can do arithmetic in
10199 MODE, say that we will only be needing the sign bit of OP0. */
10200 if (sign_bit_comparison_p
10201 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
10202 op0 = force_to_mode (op0, mode,
10203 ((HOST_WIDE_INT) 1
10204 << (GET_MODE_BITSIZE (mode) - 1)),
e3d616e3 10205 NULL_RTX, 0);
6139ff20 10206
230d793d
RS
10207 /* Now try cases based on the opcode of OP0. If none of the cases
10208 does a "continue", we exit this loop immediately after the
10209 switch. */
10210
10211 switch (GET_CODE (op0))
10212 {
10213 case ZERO_EXTRACT:
10214 /* If we are extracting a single bit from a variable position in
10215 a constant that has only a single bit set and are comparing it
663522cb 10216 with zero, we can convert this into an equality comparison
d7cd794f 10217 between the position and the location of the single bit. */
230d793d 10218
230d793d
RS
10219 if (GET_CODE (XEXP (op0, 0)) == CONST_INT
10220 && XEXP (op0, 1) == const1_rtx
10221 && equality_comparison_p && const_op == 0
d7cd794f 10222 && (i = exact_log2 (INTVAL (XEXP (op0, 0)))) >= 0)
230d793d 10223 {
f76b9db2 10224 if (BITS_BIG_ENDIAN)
0d8e55d8 10225 {
d7cd794f 10226#ifdef HAVE_extzv
a995e389 10227 mode = insn_data[(int) CODE_FOR_extzv].operand[1].mode;
0d8e55d8
JL
10228 if (mode == VOIDmode)
10229 mode = word_mode;
10230 i = (GET_MODE_BITSIZE (mode) - 1 - i);
d7cd794f 10231#else
663522cb 10232 i = BITS_PER_WORD - 1 - i;
230d793d 10233#endif
0d8e55d8 10234 }
230d793d
RS
10235
10236 op0 = XEXP (op0, 2);
5f4f0e22 10237 op1 = GEN_INT (i);
230d793d
RS
10238 const_op = i;
10239
10240 /* Result is nonzero iff shift count is equal to I. */
10241 code = reverse_condition (code);
10242 continue;
10243 }
230d793d 10244
0f41302f 10245 /* ... fall through ... */
230d793d
RS
10246
10247 case SIGN_EXTRACT:
10248 tem = expand_compound_operation (op0);
10249 if (tem != op0)
10250 {
10251 op0 = tem;
10252 continue;
10253 }
10254 break;
10255
10256 case NOT:
10257 /* If testing for equality, we can take the NOT of the constant. */
10258 if (equality_comparison_p
10259 && (tem = simplify_unary_operation (NOT, mode, op1, mode)) != 0)
10260 {
10261 op0 = XEXP (op0, 0);
10262 op1 = tem;
10263 continue;
10264 }
10265
10266 /* If just looking at the sign bit, reverse the sense of the
10267 comparison. */
10268 if (sign_bit_comparison_p)
10269 {
10270 op0 = XEXP (op0, 0);
10271 code = (code == GE ? LT : GE);
10272 continue;
10273 }
10274 break;
10275
10276 case NEG:
10277 /* If testing for equality, we can take the NEG of the constant. */
10278 if (equality_comparison_p
10279 && (tem = simplify_unary_operation (NEG, mode, op1, mode)) != 0)
10280 {
10281 op0 = XEXP (op0, 0);
10282 op1 = tem;
10283 continue;
10284 }
10285
10286 /* The remaining cases only apply to comparisons with zero. */
10287 if (const_op != 0)
10288 break;
10289
10290 /* When X is ABS or is known positive,
10291 (neg X) is < 0 if and only if X != 0. */
10292
10293 if (sign_bit_comparison_p
10294 && (GET_CODE (XEXP (op0, 0)) == ABS
5f4f0e22 10295 || (mode_width <= HOST_BITS_PER_WIDE_INT
951553af 10296 && (nonzero_bits (XEXP (op0, 0), mode)
5f4f0e22 10297 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)))
230d793d
RS
10298 {
10299 op0 = XEXP (op0, 0);
10300 code = (code == LT ? NE : EQ);
10301 continue;
10302 }
10303
3bed8141 10304 /* If we have NEG of something whose two high-order bits are the
0f41302f 10305 same, we know that "(-a) < 0" is equivalent to "a > 0". */
3bed8141 10306 if (num_sign_bit_copies (op0, mode) >= 2)
230d793d
RS
10307 {
10308 op0 = XEXP (op0, 0);
10309 code = swap_condition (code);
10310 continue;
10311 }
10312 break;
10313
10314 case ROTATE:
10315 /* If we are testing equality and our count is a constant, we
10316 can perform the inverse operation on our RHS. */
10317 if (equality_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
10318 && (tem = simplify_binary_operation (ROTATERT, mode,
10319 op1, XEXP (op0, 1))) != 0)
10320 {
10321 op0 = XEXP (op0, 0);
10322 op1 = tem;
10323 continue;
10324 }
10325
10326 /* If we are doing a < 0 or >= 0 comparison, it means we are testing
10327 a particular bit. Convert it to an AND of a constant of that
10328 bit. This will be converted into a ZERO_EXTRACT. */
10329 if (const_op == 0 && sign_bit_comparison_p
10330 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5f4f0e22 10331 && mode_width <= HOST_BITS_PER_WIDE_INT)
230d793d 10332 {
5f4f0e22
CH
10333 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
10334 ((HOST_WIDE_INT) 1
10335 << (mode_width - 1
10336 - INTVAL (XEXP (op0, 1)))));
230d793d
RS
10337 code = (code == LT ? NE : EQ);
10338 continue;
10339 }
10340
663522cb 10341 /* Fall through. */
230d793d
RS
10342
10343 case ABS:
10344 /* ABS is ignorable inside an equality comparison with zero. */
10345 if (const_op == 0 && equality_comparison_p)
10346 {
10347 op0 = XEXP (op0, 0);
10348 continue;
10349 }
10350 break;
230d793d
RS
10351
10352 case SIGN_EXTEND:
10353 /* Can simplify (compare (zero/sign_extend FOO) CONST)
663522cb 10354 to (compare FOO CONST) if CONST fits in FOO's mode and we
230d793d
RS
10355 are either testing inequality or have an unsigned comparison
10356 with ZERO_EXTEND or a signed comparison with SIGN_EXTEND. */
10357 if (! unsigned_comparison_p
10358 && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
5f4f0e22
CH
10359 <= HOST_BITS_PER_WIDE_INT)
10360 && ((unsigned HOST_WIDE_INT) const_op
e51712db 10361 < (((unsigned HOST_WIDE_INT) 1
5f4f0e22 10362 << (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0))) - 1)))))
230d793d
RS
10363 {
10364 op0 = XEXP (op0, 0);
10365 continue;
10366 }
10367 break;
10368
10369 case SUBREG:
a687e897 10370 /* Check for the case where we are comparing A - C1 with C2,
abc95ed3 10371 both constants are smaller than 1/2 the maximum positive
a687e897
RK
10372 value in MODE, and the comparison is equality or unsigned.
10373 In that case, if A is either zero-extended to MODE or has
10374 sufficient sign bits so that the high-order bit in MODE
10375 is a copy of the sign in the inner mode, we can prove that it is
10376 safe to do the operation in the wider mode. This simplifies
10377 many range checks. */
10378
10379 if (mode_width <= HOST_BITS_PER_WIDE_INT
10380 && subreg_lowpart_p (op0)
10381 && GET_CODE (SUBREG_REG (op0)) == PLUS
10382 && GET_CODE (XEXP (SUBREG_REG (op0), 1)) == CONST_INT
10383 && INTVAL (XEXP (SUBREG_REG (op0), 1)) < 0
663522cb
KH
10384 && (-INTVAL (XEXP (SUBREG_REG (op0), 1))
10385 < (HOST_WIDE_INT) (GET_MODE_MASK (mode) / 2))
adb7a1cb 10386 && (unsigned HOST_WIDE_INT) const_op < GET_MODE_MASK (mode) / 2
951553af
RK
10387 && (0 == (nonzero_bits (XEXP (SUBREG_REG (op0), 0),
10388 GET_MODE (SUBREG_REG (op0)))
663522cb 10389 & ~GET_MODE_MASK (mode))
a687e897
RK
10390 || (num_sign_bit_copies (XEXP (SUBREG_REG (op0), 0),
10391 GET_MODE (SUBREG_REG (op0)))
10392 > (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
10393 - GET_MODE_BITSIZE (mode)))))
10394 {
10395 op0 = SUBREG_REG (op0);
10396 continue;
10397 }
10398
fe0cf571
RK
10399 /* If the inner mode is narrower and we are extracting the low part,
10400 we can treat the SUBREG as if it were a ZERO_EXTEND. */
10401 if (subreg_lowpart_p (op0)
89f1c7f2
RS
10402 && GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) < mode_width)
10403 /* Fall through */ ;
10404 else
230d793d
RS
10405 break;
10406
0f41302f 10407 /* ... fall through ... */
230d793d
RS
10408
10409 case ZERO_EXTEND:
10410 if ((unsigned_comparison_p || equality_comparison_p)
10411 && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
5f4f0e22
CH
10412 <= HOST_BITS_PER_WIDE_INT)
10413 && ((unsigned HOST_WIDE_INT) const_op
230d793d
RS
10414 < GET_MODE_MASK (GET_MODE (XEXP (op0, 0)))))
10415 {
10416 op0 = XEXP (op0, 0);
10417 continue;
10418 }
10419 break;
10420
10421 case PLUS:
20fdd649 10422 /* (eq (plus X A) B) -> (eq X (minus B A)). We can only do
5089e22e 10423 this for equality comparisons due to pathological cases involving
230d793d 10424 overflows. */
20fdd649
RK
10425 if (equality_comparison_p
10426 && 0 != (tem = simplify_binary_operation (MINUS, mode,
10427 op1, XEXP (op0, 1))))
230d793d
RS
10428 {
10429 op0 = XEXP (op0, 0);
10430 op1 = tem;
10431 continue;
10432 }
10433
10434 /* (plus (abs X) (const_int -1)) is < 0 if and only if X == 0. */
10435 if (const_op == 0 && XEXP (op0, 1) == constm1_rtx
10436 && GET_CODE (XEXP (op0, 0)) == ABS && sign_bit_comparison_p)
10437 {
10438 op0 = XEXP (XEXP (op0, 0), 0);
10439 code = (code == LT ? EQ : NE);
10440 continue;
10441 }
10442 break;
10443
10444 case MINUS:
65945ec1
HPN
10445 /* We used to optimize signed comparisons against zero, but that
10446 was incorrect. Unsigned comparisons against zero (GTU, LEU)
10447 arrive here as equality comparisons, or (GEU, LTU) are
10448 optimized away. No need to special-case them. */
0bd4b461 10449
20fdd649
RK
10450 /* (eq (minus A B) C) -> (eq A (plus B C)) or
10451 (eq B (minus A C)), whichever simplifies. We can only do
10452 this for equality comparisons due to pathological cases involving
10453 overflows. */
10454 if (equality_comparison_p
10455 && 0 != (tem = simplify_binary_operation (PLUS, mode,
10456 XEXP (op0, 1), op1)))
10457 {
10458 op0 = XEXP (op0, 0);
10459 op1 = tem;
10460 continue;
10461 }
10462
10463 if (equality_comparison_p
10464 && 0 != (tem = simplify_binary_operation (MINUS, mode,
10465 XEXP (op0, 0), op1)))
10466 {
10467 op0 = XEXP (op0, 1);
10468 op1 = tem;
10469 continue;
10470 }
10471
230d793d
RS
10472 /* The sign bit of (minus (ashiftrt X C) X), where C is the number
10473 of bits in X minus 1, is one iff X > 0. */
10474 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == ASHIFTRT
10475 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
10476 && INTVAL (XEXP (XEXP (op0, 0), 1)) == mode_width - 1
10477 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
10478 {
10479 op0 = XEXP (op0, 1);
10480 code = (code == GE ? LE : GT);
10481 continue;
10482 }
10483 break;
10484
10485 case XOR:
10486 /* (eq (xor A B) C) -> (eq A (xor B C)). This is a simplification
10487 if C is zero or B is a constant. */
10488 if (equality_comparison_p
10489 && 0 != (tem = simplify_binary_operation (XOR, mode,
10490 XEXP (op0, 1), op1)))
10491 {
10492 op0 = XEXP (op0, 0);
10493 op1 = tem;
10494 continue;
10495 }
10496 break;
10497
10498 case EQ: case NE:
69bc0a1f
JH
10499 case UNEQ: case LTGT:
10500 case LT: case LTU: case UNLT: case LE: case LEU: case UNLE:
10501 case GT: case GTU: case UNGT: case GE: case GEU: case UNGE:
10502 case UNORDERED: case ORDERED:
230d793d
RS
10503 /* We can't do anything if OP0 is a condition code value, rather
10504 than an actual data value. */
10505 if (const_op != 0
10506#ifdef HAVE_cc0
10507 || XEXP (op0, 0) == cc0_rtx
10508#endif
10509 || GET_MODE_CLASS (GET_MODE (XEXP (op0, 0))) == MODE_CC)
10510 break;
10511
10512 /* Get the two operands being compared. */
10513 if (GET_CODE (XEXP (op0, 0)) == COMPARE)
10514 tem = XEXP (XEXP (op0, 0), 0), tem1 = XEXP (XEXP (op0, 0), 1);
10515 else
10516 tem = XEXP (op0, 0), tem1 = XEXP (op0, 1);
10517
10518 /* Check for the cases where we simply want the result of the
10519 earlier test or the opposite of that result. */
9a915772 10520 if (code == NE || code == EQ
5f4f0e22 10521 || (GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
3f508eca 10522 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
230d793d 10523 && (STORE_FLAG_VALUE
5f4f0e22
CH
10524 & (((HOST_WIDE_INT) 1
10525 << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
aa6683f7 10526 && (code == LT || code == GE)))
230d793d 10527 {
aa6683f7
GK
10528 enum rtx_code new_code;
10529 if (code == LT || code == NE)
10530 new_code = GET_CODE (op0);
10531 else
10532 new_code = combine_reversed_comparison_code (op0);
10533
10534 if (new_code != UNKNOWN)
9a915772 10535 {
aa6683f7
GK
10536 code = new_code;
10537 op0 = tem;
10538 op1 = tem1;
9a915772
JH
10539 continue;
10540 }
230d793d
RS
10541 }
10542 break;
10543
10544 case IOR:
10545 /* The sign bit of (ior (plus X (const_int -1)) X) is non-zero
10546 iff X <= 0. */
10547 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == PLUS
10548 && XEXP (XEXP (op0, 0), 1) == constm1_rtx
10549 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
10550 {
10551 op0 = XEXP (op0, 1);
10552 code = (code == GE ? GT : LE);
10553 continue;
10554 }
10555 break;
10556
10557 case AND:
10558 /* Convert (and (xshift 1 X) Y) to (and (lshiftrt Y X) 1). This
10559 will be converted to a ZERO_EXTRACT later. */
10560 if (const_op == 0 && equality_comparison_p
45620ed4 10561 && GET_CODE (XEXP (op0, 0)) == ASHIFT
230d793d
RS
10562 && XEXP (XEXP (op0, 0), 0) == const1_rtx)
10563 {
10564 op0 = simplify_and_const_int
f1c6ba8b
RK
10565 (op0, mode, gen_rtx_LSHIFTRT (mode,
10566 XEXP (op0, 1),
10567 XEXP (XEXP (op0, 0), 1)),
5f4f0e22 10568 (HOST_WIDE_INT) 1);
230d793d
RS
10569 continue;
10570 }
10571
10572 /* If we are comparing (and (lshiftrt X C1) C2) for equality with
10573 zero and X is a comparison and C1 and C2 describe only bits set
10574 in STORE_FLAG_VALUE, we can compare with X. */
10575 if (const_op == 0 && equality_comparison_p
5f4f0e22 10576 && mode_width <= HOST_BITS_PER_WIDE_INT
230d793d
RS
10577 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10578 && GET_CODE (XEXP (op0, 0)) == LSHIFTRT
10579 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
10580 && INTVAL (XEXP (XEXP (op0, 0), 1)) >= 0
5f4f0e22 10581 && INTVAL (XEXP (XEXP (op0, 0), 1)) < HOST_BITS_PER_WIDE_INT)
230d793d
RS
10582 {
10583 mask = ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
10584 << INTVAL (XEXP (XEXP (op0, 0), 1)));
663522cb 10585 if ((~STORE_FLAG_VALUE & mask) == 0
230d793d
RS
10586 && (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (op0, 0), 0))) == '<'
10587 || ((tem = get_last_value (XEXP (XEXP (op0, 0), 0))) != 0
10588 && GET_RTX_CLASS (GET_CODE (tem)) == '<')))
10589 {
10590 op0 = XEXP (XEXP (op0, 0), 0);
10591 continue;
10592 }
10593 }
10594
10595 /* If we are doing an equality comparison of an AND of a bit equal
10596 to the sign bit, replace this with a LT or GE comparison of
10597 the underlying value. */
10598 if (equality_comparison_p
10599 && const_op == 0
10600 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5f4f0e22 10601 && mode_width <= HOST_BITS_PER_WIDE_INT
230d793d 10602 && ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
e51712db 10603 == (unsigned HOST_WIDE_INT) 1 << (mode_width - 1)))
230d793d
RS
10604 {
10605 op0 = XEXP (op0, 0);
10606 code = (code == EQ ? GE : LT);
10607 continue;
10608 }
10609
10610 /* If this AND operation is really a ZERO_EXTEND from a narrower
10611 mode, the constant fits within that mode, and this is either an
10612 equality or unsigned comparison, try to do this comparison in
10613 the narrower mode. */
10614 if ((equality_comparison_p || unsigned_comparison_p)
10615 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10616 && (i = exact_log2 ((INTVAL (XEXP (op0, 1))
10617 & GET_MODE_MASK (mode))
10618 + 1)) >= 0
10619 && const_op >> i == 0
10620 && (tmode = mode_for_size (i, MODE_INT, 1)) != BLKmode)
10621 {
10622 op0 = gen_lowpart_for_combine (tmode, XEXP (op0, 0));
10623 continue;
10624 }
e5e809f4
JL
10625
10626 /* If this is (and:M1 (subreg:M2 X 0) (const_int C1)) where C1 fits
10627 in both M1 and M2 and the SUBREG is either paradoxical or
10628 represents the low part, permute the SUBREG and the AND and
10629 try again. */
10630 if (GET_CODE (XEXP (op0, 0)) == SUBREG
c5c76735 10631 && (0
9ec36da5 10632#ifdef WORD_REGISTER_OPERATIONS
c5c76735
JL
10633 || ((mode_width
10634 > (GET_MODE_BITSIZE
10635 (GET_MODE (SUBREG_REG (XEXP (op0, 0))))))
10636 && mode_width <= BITS_PER_WORD)
9ec36da5 10637#endif
c5c76735
JL
10638 || ((mode_width
10639 <= (GET_MODE_BITSIZE
10640 (GET_MODE (SUBREG_REG (XEXP (op0, 0))))))
10641 && subreg_lowpart_p (XEXP (op0, 0))))
adc05e6c
JL
10642#ifndef WORD_REGISTER_OPERATIONS
10643 /* It is unsafe to commute the AND into the SUBREG if the SUBREG
10644 is paradoxical and WORD_REGISTER_OPERATIONS is not defined.
10645 As originally written the upper bits have a defined value
10646 due to the AND operation. However, if we commute the AND
10647 inside the SUBREG then they no longer have defined values
10648 and the meaning of the code has been changed. */
10649 && (GET_MODE_SIZE (GET_MODE (XEXP (op0, 0)))
10650 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (op0, 0)))))
10651#endif
e5e809f4
JL
10652 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10653 && mode_width <= HOST_BITS_PER_WIDE_INT
10654 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (XEXP (op0, 0))))
10655 <= HOST_BITS_PER_WIDE_INT)
663522cb
KH
10656 && (INTVAL (XEXP (op0, 1)) & ~mask) == 0
10657 && 0 == (~GET_MODE_MASK (GET_MODE (SUBREG_REG (XEXP (op0, 0))))
9ec36da5 10658 & INTVAL (XEXP (op0, 1)))
e51712db
KG
10659 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (op0, 1)) != mask
10660 && ((unsigned HOST_WIDE_INT) INTVAL (XEXP (op0, 1))
9ec36da5 10661 != GET_MODE_MASK (GET_MODE (SUBREG_REG (XEXP (op0, 0))))))
663522cb 10662
e5e809f4
JL
10663 {
10664 op0
10665 = gen_lowpart_for_combine
10666 (mode,
10667 gen_binary (AND, GET_MODE (SUBREG_REG (XEXP (op0, 0))),
10668 SUBREG_REG (XEXP (op0, 0)), XEXP (op0, 1)));
10669 continue;
10670 }
10671
9f8e169e
RH
10672 /* Convert (ne (and (lshiftrt (not X)) 1) 0) to
10673 (eq (and (lshiftrt X) 1) 0). */
10674 if (const_op == 0 && equality_comparison_p
10675 && XEXP (op0, 1) == const1_rtx
10676 && GET_CODE (XEXP (op0, 0)) == LSHIFTRT
10677 && GET_CODE (XEXP (XEXP (op0, 0), 0)) == NOT)
10678 {
10679 op0 = simplify_and_const_int
f1c6ba8b
RK
10680 (op0, mode,
10681 gen_rtx_LSHIFTRT (mode, XEXP (XEXP (XEXP (op0, 0), 0), 0),
10682 XEXP (XEXP (op0, 0), 1)),
9f8e169e
RH
10683 (HOST_WIDE_INT) 1);
10684 code = (code == NE ? EQ : NE);
10685 continue;
10686 }
230d793d
RS
10687 break;
10688
10689 case ASHIFT:
45620ed4 10690 /* If we have (compare (ashift FOO N) (const_int C)) and
230d793d 10691 the high order N bits of FOO (N+1 if an inequality comparison)
951553af 10692 are known to be zero, we can do this by comparing FOO with C
230d793d
RS
10693 shifted right N bits so long as the low-order N bits of C are
10694 zero. */
10695 if (GET_CODE (XEXP (op0, 1)) == CONST_INT
10696 && INTVAL (XEXP (op0, 1)) >= 0
10697 && ((INTVAL (XEXP (op0, 1)) + ! equality_comparison_p)
5f4f0e22
CH
10698 < HOST_BITS_PER_WIDE_INT)
10699 && ((const_op
34785d05 10700 & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0)
5f4f0e22 10701 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 10702 && (nonzero_bits (XEXP (op0, 0), mode)
663522cb
KH
10703 & ~(mask >> (INTVAL (XEXP (op0, 1))
10704 + ! equality_comparison_p))) == 0)
230d793d 10705 {
7ce787fe
NC
10706 /* We must perform a logical shift, not an arithmetic one,
10707 as we want the top N bits of C to be zero. */
aaaec114 10708 unsigned HOST_WIDE_INT temp = const_op & GET_MODE_MASK (mode);
663522cb 10709
7ce787fe 10710 temp >>= INTVAL (XEXP (op0, 1));
aaaec114 10711 op1 = GEN_INT (trunc_int_for_mode (temp, mode));
230d793d
RS
10712 op0 = XEXP (op0, 0);
10713 continue;
10714 }
10715
dfbe1b2f 10716 /* If we are doing a sign bit comparison, it means we are testing
230d793d 10717 a particular bit. Convert it to the appropriate AND. */
dfbe1b2f 10718 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
5f4f0e22 10719 && mode_width <= HOST_BITS_PER_WIDE_INT)
230d793d 10720 {
5f4f0e22
CH
10721 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
10722 ((HOST_WIDE_INT) 1
10723 << (mode_width - 1
10724 - INTVAL (XEXP (op0, 1)))));
230d793d
RS
10725 code = (code == LT ? NE : EQ);
10726 continue;
10727 }
dfbe1b2f
RK
10728
10729 /* If this an equality comparison with zero and we are shifting
10730 the low bit to the sign bit, we can convert this to an AND of the
10731 low-order bit. */
10732 if (const_op == 0 && equality_comparison_p
10733 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10734 && INTVAL (XEXP (op0, 1)) == mode_width - 1)
10735 {
5f4f0e22
CH
10736 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
10737 (HOST_WIDE_INT) 1);
dfbe1b2f
RK
10738 continue;
10739 }
230d793d
RS
10740 break;
10741
10742 case ASHIFTRT:
d0ab8cd3
RK
10743 /* If this is an equality comparison with zero, we can do this
10744 as a logical shift, which might be much simpler. */
10745 if (equality_comparison_p && const_op == 0
10746 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
10747 {
10748 op0 = simplify_shift_const (NULL_RTX, LSHIFTRT, mode,
10749 XEXP (op0, 0),
10750 INTVAL (XEXP (op0, 1)));
10751 continue;
10752 }
10753
230d793d
RS
10754 /* If OP0 is a sign extension and CODE is not an unsigned comparison,
10755 do the comparison in a narrower mode. */
10756 if (! unsigned_comparison_p
10757 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10758 && GET_CODE (XEXP (op0, 0)) == ASHIFT
10759 && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1)
10760 && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)),
22331794 10761 MODE_INT, 1)) != BLKmode
5f4f0e22 10762 && ((unsigned HOST_WIDE_INT) const_op <= GET_MODE_MASK (tmode)
663522cb 10763 || ((unsigned HOST_WIDE_INT) -const_op
5f4f0e22 10764 <= GET_MODE_MASK (tmode))))
230d793d
RS
10765 {
10766 op0 = gen_lowpart_for_combine (tmode, XEXP (XEXP (op0, 0), 0));
10767 continue;
10768 }
10769
14a774a9
RK
10770 /* Likewise if OP0 is a PLUS of a sign extension with a
10771 constant, which is usually represented with the PLUS
10772 between the shifts. */
10773 if (! unsigned_comparison_p
10774 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10775 && GET_CODE (XEXP (op0, 0)) == PLUS
10776 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
10777 && GET_CODE (XEXP (XEXP (op0, 0), 0)) == ASHIFT
10778 && XEXP (op0, 1) == XEXP (XEXP (XEXP (op0, 0), 0), 1)
10779 && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)),
10780 MODE_INT, 1)) != BLKmode
10781 && ((unsigned HOST_WIDE_INT) const_op <= GET_MODE_MASK (tmode)
663522cb 10782 || ((unsigned HOST_WIDE_INT) -const_op
14a774a9
RK
10783 <= GET_MODE_MASK (tmode))))
10784 {
10785 rtx inner = XEXP (XEXP (XEXP (op0, 0), 0), 0);
10786 rtx add_const = XEXP (XEXP (op0, 0), 1);
10787 rtx new_const = gen_binary (ASHIFTRT, GET_MODE (op0), add_const,
10788 XEXP (op0, 1));
10789
10790 op0 = gen_binary (PLUS, tmode,
10791 gen_lowpart_for_combine (tmode, inner),
10792 new_const);
10793 continue;
10794 }
10795
0f41302f 10796 /* ... fall through ... */
230d793d
RS
10797 case LSHIFTRT:
10798 /* If we have (compare (xshiftrt FOO N) (const_int C)) and
951553af 10799 the low order N bits of FOO are known to be zero, we can do this
230d793d
RS
10800 by comparing FOO with C shifted left N bits so long as no
10801 overflow occurs. */
10802 if (GET_CODE (XEXP (op0, 1)) == CONST_INT
10803 && INTVAL (XEXP (op0, 1)) >= 0
5f4f0e22
CH
10804 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
10805 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 10806 && (nonzero_bits (XEXP (op0, 0), mode)
5f4f0e22 10807 & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0
230d793d
RS
10808 && (const_op == 0
10809 || (floor_log2 (const_op) + INTVAL (XEXP (op0, 1))
10810 < mode_width)))
10811 {
10812 const_op <<= INTVAL (XEXP (op0, 1));
5f4f0e22 10813 op1 = GEN_INT (const_op);
230d793d
RS
10814 op0 = XEXP (op0, 0);
10815 continue;
10816 }
10817
10818 /* If we are using this shift to extract just the sign bit, we
10819 can replace this with an LT or GE comparison. */
10820 if (const_op == 0
10821 && (equality_comparison_p || sign_bit_comparison_p)
10822 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10823 && INTVAL (XEXP (op0, 1)) == mode_width - 1)
10824 {
10825 op0 = XEXP (op0, 0);
10826 code = (code == NE || code == GT ? LT : GE);
10827 continue;
10828 }
10829 break;
663522cb 10830
e9a25f70
JL
10831 default:
10832 break;
230d793d
RS
10833 }
10834
10835 break;
10836 }
10837
10838 /* Now make any compound operations involved in this comparison. Then,
76d31c63 10839 check for an outmost SUBREG on OP0 that is not doing anything or is
230d793d
RS
10840 paradoxical. The latter case can only occur when it is known that the
10841 "extra" bits will be zero. Therefore, it is safe to remove the SUBREG.
10842 We can never remove a SUBREG for a non-equality comparison because the
10843 sign bit is in a different place in the underlying object. */
10844
10845 op0 = make_compound_operation (op0, op1 == const0_rtx ? COMPARE : SET);
10846 op1 = make_compound_operation (op1, SET);
10847
10848 if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
10849 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
10850 && (code == NE || code == EQ)
10851 && ((GET_MODE_SIZE (GET_MODE (op0))
10852 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0))))))
10853 {
10854 op0 = SUBREG_REG (op0);
10855 op1 = gen_lowpart_for_combine (GET_MODE (op0), op1);
10856 }
10857
10858 else if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
10859 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
10860 && (code == NE || code == EQ)
ac49a949
RS
10861 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
10862 <= HOST_BITS_PER_WIDE_INT)
951553af 10863 && (nonzero_bits (SUBREG_REG (op0), GET_MODE (SUBREG_REG (op0)))
663522cb 10864 & ~GET_MODE_MASK (GET_MODE (op0))) == 0
230d793d
RS
10865 && (tem = gen_lowpart_for_combine (GET_MODE (SUBREG_REG (op0)),
10866 op1),
951553af 10867 (nonzero_bits (tem, GET_MODE (SUBREG_REG (op0)))
663522cb 10868 & ~GET_MODE_MASK (GET_MODE (op0))) == 0))
230d793d
RS
10869 op0 = SUBREG_REG (op0), op1 = tem;
10870
10871 /* We now do the opposite procedure: Some machines don't have compare
10872 insns in all modes. If OP0's mode is an integer mode smaller than a
10873 word and we can't do a compare in that mode, see if there is a larger
a687e897
RK
10874 mode for which we can do the compare. There are a number of cases in
10875 which we can use the wider mode. */
230d793d
RS
10876
10877 mode = GET_MODE (op0);
10878 if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT
10879 && GET_MODE_SIZE (mode) < UNITS_PER_WORD
10880 && cmp_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
10881 for (tmode = GET_MODE_WIDER_MODE (mode);
5f4f0e22
CH
10882 (tmode != VOIDmode
10883 && GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT);
230d793d 10884 tmode = GET_MODE_WIDER_MODE (tmode))
a687e897 10885 if (cmp_optab->handlers[(int) tmode].insn_code != CODE_FOR_nothing)
230d793d 10886 {
951553af 10887 /* If the only nonzero bits in OP0 and OP1 are those in the
a687e897
RK
10888 narrower mode and this is an equality or unsigned comparison,
10889 we can use the wider mode. Similarly for sign-extended
7e4dc511 10890 values, in which case it is true for all comparisons. */
a687e897
RK
10891 if (((code == EQ || code == NE
10892 || code == GEU || code == GTU || code == LEU || code == LTU)
663522cb
KH
10893 && (nonzero_bits (op0, tmode) & ~GET_MODE_MASK (mode)) == 0
10894 && (nonzero_bits (op1, tmode) & ~GET_MODE_MASK (mode)) == 0)
7e4dc511
RK
10895 || ((num_sign_bit_copies (op0, tmode)
10896 > GET_MODE_BITSIZE (tmode) - GET_MODE_BITSIZE (mode))
a687e897 10897 && (num_sign_bit_copies (op1, tmode)
58744483 10898 > GET_MODE_BITSIZE (tmode) - GET_MODE_BITSIZE (mode))))
a687e897 10899 {
14a774a9
RK
10900 /* If OP0 is an AND and we don't have an AND in MODE either,
10901 make a new AND in the proper mode. */
10902 if (GET_CODE (op0) == AND
10903 && (add_optab->handlers[(int) mode].insn_code
10904 == CODE_FOR_nothing))
10905 op0 = gen_binary (AND, tmode,
10906 gen_lowpart_for_combine (tmode,
10907 XEXP (op0, 0)),
10908 gen_lowpart_for_combine (tmode,
10909 XEXP (op0, 1)));
10910
a687e897
RK
10911 op0 = gen_lowpart_for_combine (tmode, op0);
10912 op1 = gen_lowpart_for_combine (tmode, op1);
10913 break;
10914 }
230d793d 10915
a687e897
RK
10916 /* If this is a test for negative, we can make an explicit
10917 test of the sign bit. */
10918
10919 if (op1 == const0_rtx && (code == LT || code == GE)
10920 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
230d793d 10921 {
a687e897
RK
10922 op0 = gen_binary (AND, tmode,
10923 gen_lowpart_for_combine (tmode, op0),
5f4f0e22
CH
10924 GEN_INT ((HOST_WIDE_INT) 1
10925 << (GET_MODE_BITSIZE (mode) - 1)));
230d793d 10926 code = (code == LT) ? NE : EQ;
a687e897 10927 break;
230d793d 10928 }
230d793d
RS
10929 }
10930
b7a775b2
RK
10931#ifdef CANONICALIZE_COMPARISON
10932 /* If this machine only supports a subset of valid comparisons, see if we
10933 can convert an unsupported one into a supported one. */
10934 CANONICALIZE_COMPARISON (code, op0, op1);
10935#endif
10936
230d793d
RS
10937 *pop0 = op0;
10938 *pop1 = op1;
10939
10940 return code;
10941}
10942\f
9a915772
JH
10943/* Like jump.c' reversed_comparison_code, but use combine infrastructure for
10944 searching backward. */
c3ffea50 10945static enum rtx_code
9a915772
JH
10946combine_reversed_comparison_code (exp)
10947 rtx exp;
230d793d 10948{
9a915772
JH
10949 enum rtx_code code1 = reversed_comparison_code (exp, NULL);
10950 rtx x;
10951
10952 if (code1 != UNKNOWN
10953 || GET_MODE_CLASS (GET_MODE (XEXP (exp, 0))) != MODE_CC)
10954 return code1;
10955 /* Otherwise try and find where the condition codes were last set and
10956 use that. */
c3ffea50
AJ
10957 x = get_last_value (XEXP (exp, 0));
10958 if (!x || GET_CODE (x) != COMPARE)
9a915772
JH
10959 return UNKNOWN;
10960 return reversed_comparison_code_parts (GET_CODE (exp),
10961 XEXP (x, 0), XEXP (x, 1), NULL);
10962}
10963/* Return comparison with reversed code of EXP and operands OP0 and OP1.
10964 Return NULL_RTX in case we fail to do the reversal. */
10965static rtx
10966reversed_comparison (exp, mode, op0, op1)
10967 rtx exp, op0, op1;
10968 enum machine_mode mode;
10969{
10970 enum rtx_code reversed_code = combine_reversed_comparison_code (exp);
10971 if (reversed_code == UNKNOWN)
10972 return NULL_RTX;
10973 else
10974 return gen_binary (reversed_code, mode, op0, op1);
230d793d
RS
10975}
10976\f
10977/* Utility function for following routine. Called when X is part of a value
10978 being stored into reg_last_set_value. Sets reg_last_set_table_tick
10979 for each register mentioned. Similar to mention_regs in cse.c */
10980
10981static void
10982update_table_tick (x)
10983 rtx x;
10984{
10985 register enum rtx_code code = GET_CODE (x);
6f7d635c 10986 register const char *fmt = GET_RTX_FORMAT (code);
230d793d
RS
10987 register int i;
10988
10989 if (code == REG)
10990 {
770ae6cc
RK
10991 unsigned int regno = REGNO (x);
10992 unsigned int endregno
10993 = regno + (regno < FIRST_PSEUDO_REGISTER
10994 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
10995 unsigned int r;
230d793d 10996
770ae6cc
RK
10997 for (r = regno; r < endregno; r++)
10998 reg_last_set_table_tick[r] = label_tick;
230d793d
RS
10999
11000 return;
11001 }
663522cb 11002
230d793d
RS
11003 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
11004 /* Note that we can't have an "E" in values stored; see
11005 get_last_value_validate. */
11006 if (fmt[i] == 'e')
11007 update_table_tick (XEXP (x, i));
11008}
11009
11010/* Record that REG is set to VALUE in insn INSN. If VALUE is zero, we
11011 are saying that the register is clobbered and we no longer know its
7988fd36
RK
11012 value. If INSN is zero, don't update reg_last_set; this is only permitted
11013 with VALUE also zero and is used to invalidate the register. */
230d793d
RS
11014
11015static void
11016record_value_for_reg (reg, insn, value)
11017 rtx reg;
11018 rtx insn;
11019 rtx value;
11020{
770ae6cc
RK
11021 unsigned int regno = REGNO (reg);
11022 unsigned int endregno
11023 = regno + (regno < FIRST_PSEUDO_REGISTER
11024 ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1);
11025 unsigned int i;
230d793d
RS
11026
11027 /* If VALUE contains REG and we have a previous value for REG, substitute
11028 the previous value. */
11029 if (value && insn && reg_overlap_mentioned_p (reg, value))
11030 {
11031 rtx tem;
11032
11033 /* Set things up so get_last_value is allowed to see anything set up to
11034 our insn. */
11035 subst_low_cuid = INSN_CUID (insn);
663522cb 11036 tem = get_last_value (reg);
230d793d 11037
14a774a9
RK
11038 /* If TEM is simply a binary operation with two CLOBBERs as operands,
11039 it isn't going to be useful and will take a lot of time to process,
11040 so just use the CLOBBER. */
11041
230d793d 11042 if (tem)
14a774a9
RK
11043 {
11044 if ((GET_RTX_CLASS (GET_CODE (tem)) == '2'
11045 || GET_RTX_CLASS (GET_CODE (tem)) == 'c')
11046 && GET_CODE (XEXP (tem, 0)) == CLOBBER
11047 && GET_CODE (XEXP (tem, 1)) == CLOBBER)
11048 tem = XEXP (tem, 0);
11049
11050 value = replace_rtx (copy_rtx (value), reg, tem);
11051 }
230d793d
RS
11052 }
11053
11054 /* For each register modified, show we don't know its value, that
ef026f91
RS
11055 we don't know about its bitwise content, that its value has been
11056 updated, and that we don't know the location of the death of the
11057 register. */
770ae6cc 11058 for (i = regno; i < endregno; i++)
230d793d
RS
11059 {
11060 if (insn)
11061 reg_last_set[i] = insn;
770ae6cc 11062
230d793d 11063 reg_last_set_value[i] = 0;
ef026f91
RS
11064 reg_last_set_mode[i] = 0;
11065 reg_last_set_nonzero_bits[i] = 0;
11066 reg_last_set_sign_bit_copies[i] = 0;
230d793d
RS
11067 reg_last_death[i] = 0;
11068 }
11069
11070 /* Mark registers that are being referenced in this value. */
11071 if (value)
11072 update_table_tick (value);
11073
11074 /* Now update the status of each register being set.
11075 If someone is using this register in this block, set this register
11076 to invalid since we will get confused between the two lives in this
11077 basic block. This makes using this register always invalid. In cse, we
11078 scan the table to invalidate all entries using this register, but this
11079 is too much work for us. */
11080
11081 for (i = regno; i < endregno; i++)
11082 {
11083 reg_last_set_label[i] = label_tick;
11084 if (value && reg_last_set_table_tick[i] == label_tick)
11085 reg_last_set_invalid[i] = 1;
11086 else
11087 reg_last_set_invalid[i] = 0;
11088 }
11089
11090 /* The value being assigned might refer to X (like in "x++;"). In that
11091 case, we must replace it with (clobber (const_int 0)) to prevent
11092 infinite loops. */
9a893315 11093 if (value && ! get_last_value_validate (&value, insn,
230d793d
RS
11094 reg_last_set_label[regno], 0))
11095 {
11096 value = copy_rtx (value);
9a893315
JW
11097 if (! get_last_value_validate (&value, insn,
11098 reg_last_set_label[regno], 1))
230d793d
RS
11099 value = 0;
11100 }
11101
55310dad
RK
11102 /* For the main register being modified, update the value, the mode, the
11103 nonzero bits, and the number of sign bit copies. */
11104
230d793d
RS
11105 reg_last_set_value[regno] = value;
11106
55310dad
RK
11107 if (value)
11108 {
2afabb48 11109 subst_low_cuid = INSN_CUID (insn);
55310dad
RK
11110 reg_last_set_mode[regno] = GET_MODE (reg);
11111 reg_last_set_nonzero_bits[regno] = nonzero_bits (value, GET_MODE (reg));
11112 reg_last_set_sign_bit_copies[regno]
11113 = num_sign_bit_copies (value, GET_MODE (reg));
11114 }
230d793d
RS
11115}
11116
230d793d 11117/* Called via note_stores from record_dead_and_set_regs to handle one
84832317
MM
11118 SET or CLOBBER in an insn. DATA is the instruction in which the
11119 set is occurring. */
230d793d
RS
11120
11121static void
84832317 11122record_dead_and_set_regs_1 (dest, setter, data)
230d793d 11123 rtx dest, setter;
84832317 11124 void *data;
230d793d 11125{
84832317
MM
11126 rtx record_dead_insn = (rtx) data;
11127
ca89d290
RK
11128 if (GET_CODE (dest) == SUBREG)
11129 dest = SUBREG_REG (dest);
11130
230d793d
RS
11131 if (GET_CODE (dest) == REG)
11132 {
11133 /* If we are setting the whole register, we know its value. Otherwise
11134 show that we don't know the value. We can handle SUBREG in
11135 some cases. */
11136 if (GET_CODE (setter) == SET && dest == SET_DEST (setter))
11137 record_value_for_reg (dest, record_dead_insn, SET_SRC (setter));
11138 else if (GET_CODE (setter) == SET
11139 && GET_CODE (SET_DEST (setter)) == SUBREG
11140 && SUBREG_REG (SET_DEST (setter)) == dest
90bf8081 11141 && GET_MODE_BITSIZE (GET_MODE (dest)) <= BITS_PER_WORD
230d793d 11142 && subreg_lowpart_p (SET_DEST (setter)))
d0ab8cd3
RK
11143 record_value_for_reg (dest, record_dead_insn,
11144 gen_lowpart_for_combine (GET_MODE (dest),
11145 SET_SRC (setter)));
230d793d 11146 else
5f4f0e22 11147 record_value_for_reg (dest, record_dead_insn, NULL_RTX);
230d793d
RS
11148 }
11149 else if (GET_CODE (dest) == MEM
11150 /* Ignore pushes, they clobber nothing. */
11151 && ! push_operand (dest, GET_MODE (dest)))
11152 mem_last_set = INSN_CUID (record_dead_insn);
11153}
11154
11155/* Update the records of when each REG was most recently set or killed
11156 for the things done by INSN. This is the last thing done in processing
11157 INSN in the combiner loop.
11158
ef026f91
RS
11159 We update reg_last_set, reg_last_set_value, reg_last_set_mode,
11160 reg_last_set_nonzero_bits, reg_last_set_sign_bit_copies, reg_last_death,
11161 and also the similar information mem_last_set (which insn most recently
11162 modified memory) and last_call_cuid (which insn was the most recent
11163 subroutine call). */
230d793d
RS
11164
11165static void
11166record_dead_and_set_regs (insn)
11167 rtx insn;
11168{
11169 register rtx link;
770ae6cc 11170 unsigned int i;
55310dad 11171
230d793d
RS
11172 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
11173 {
dbc131f3
RK
11174 if (REG_NOTE_KIND (link) == REG_DEAD
11175 && GET_CODE (XEXP (link, 0)) == REG)
11176 {
770ae6cc
RK
11177 unsigned int regno = REGNO (XEXP (link, 0));
11178 unsigned int endregno
dbc131f3
RK
11179 = regno + (regno < FIRST_PSEUDO_REGISTER
11180 ? HARD_REGNO_NREGS (regno, GET_MODE (XEXP (link, 0)))
11181 : 1);
dbc131f3
RK
11182
11183 for (i = regno; i < endregno; i++)
11184 reg_last_death[i] = insn;
11185 }
230d793d 11186 else if (REG_NOTE_KIND (link) == REG_INC)
5f4f0e22 11187 record_value_for_reg (XEXP (link, 0), insn, NULL_RTX);
230d793d
RS
11188 }
11189
11190 if (GET_CODE (insn) == CALL_INSN)
55310dad
RK
11191 {
11192 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
11193 if (call_used_regs[i])
11194 {
11195 reg_last_set_value[i] = 0;
ef026f91
RS
11196 reg_last_set_mode[i] = 0;
11197 reg_last_set_nonzero_bits[i] = 0;
11198 reg_last_set_sign_bit_copies[i] = 0;
55310dad
RK
11199 reg_last_death[i] = 0;
11200 }
11201
11202 last_call_cuid = mem_last_set = INSN_CUID (insn);
11203 }
230d793d 11204
84832317 11205 note_stores (PATTERN (insn), record_dead_and_set_regs_1, insn);
230d793d 11206}
732f2ac9 11207
732f2ac9
JJ
11208/* If a SUBREG has the promoted bit set, it is in fact a property of the
11209 register present in the SUBREG, so for each such SUBREG go back and
11210 adjust nonzero and sign bit information of the registers that are
11211 known to have some zero/sign bits set.
11212
11213 This is needed because when combine blows the SUBREGs away, the
11214 information on zero/sign bits is lost and further combines can be
11215 missed because of that. */
11216
11217static void
11218record_promoted_value (insn, subreg)
663522cb
KH
11219 rtx insn;
11220 rtx subreg;
732f2ac9 11221{
4a71b24f 11222 rtx links, set;
770ae6cc 11223 unsigned int regno = REGNO (SUBREG_REG (subreg));
732f2ac9
JJ
11224 enum machine_mode mode = GET_MODE (subreg);
11225
25af74a0 11226 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
732f2ac9
JJ
11227 return;
11228
663522cb 11229 for (links = LOG_LINKS (insn); links;)
732f2ac9
JJ
11230 {
11231 insn = XEXP (links, 0);
11232 set = single_set (insn);
11233
11234 if (! set || GET_CODE (SET_DEST (set)) != REG
11235 || REGNO (SET_DEST (set)) != regno
11236 || GET_MODE (SET_DEST (set)) != GET_MODE (SUBREG_REG (subreg)))
11237 {
11238 links = XEXP (links, 1);
11239 continue;
11240 }
11241
663522cb
KH
11242 if (reg_last_set[regno] == insn)
11243 {
732f2ac9 11244 if (SUBREG_PROMOTED_UNSIGNED_P (subreg))
663522cb
KH
11245 reg_last_set_nonzero_bits[regno] &= GET_MODE_MASK (mode);
11246 }
732f2ac9
JJ
11247
11248 if (GET_CODE (SET_SRC (set)) == REG)
11249 {
11250 regno = REGNO (SET_SRC (set));
11251 links = LOG_LINKS (insn);
11252 }
11253 else
11254 break;
11255 }
11256}
11257
11258/* Scan X for promoted SUBREGs. For each one found,
11259 note what it implies to the registers used in it. */
11260
11261static void
11262check_promoted_subreg (insn, x)
663522cb
KH
11263 rtx insn;
11264 rtx x;
732f2ac9
JJ
11265{
11266 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
11267 && GET_CODE (SUBREG_REG (x)) == REG)
11268 record_promoted_value (insn, x);
11269 else
11270 {
11271 const char *format = GET_RTX_FORMAT (GET_CODE (x));
11272 int i, j;
11273
11274 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (x)); i++)
663522cb 11275 switch (format[i])
732f2ac9
JJ
11276 {
11277 case 'e':
11278 check_promoted_subreg (insn, XEXP (x, i));
11279 break;
11280 case 'V':
11281 case 'E':
11282 if (XVEC (x, i) != 0)
11283 for (j = 0; j < XVECLEN (x, i); j++)
11284 check_promoted_subreg (insn, XVECEXP (x, i, j));
11285 break;
11286 }
11287 }
11288}
230d793d
RS
11289\f
11290/* Utility routine for the following function. Verify that all the registers
11291 mentioned in *LOC are valid when *LOC was part of a value set when
11292 label_tick == TICK. Return 0 if some are not.
11293
11294 If REPLACE is non-zero, replace the invalid reference with
11295 (clobber (const_int 0)) and return 1. This replacement is useful because
11296 we often can get useful information about the form of a value (e.g., if
11297 it was produced by a shift that always produces -1 or 0) even though
11298 we don't know exactly what registers it was produced from. */
11299
11300static int
9a893315 11301get_last_value_validate (loc, insn, tick, replace)
230d793d 11302 rtx *loc;
9a893315 11303 rtx insn;
230d793d
RS
11304 int tick;
11305 int replace;
11306{
11307 rtx x = *loc;
6f7d635c 11308 const char *fmt = GET_RTX_FORMAT (GET_CODE (x));
230d793d
RS
11309 int len = GET_RTX_LENGTH (GET_CODE (x));
11310 int i;
11311
11312 if (GET_CODE (x) == REG)
11313 {
770ae6cc
RK
11314 unsigned int regno = REGNO (x);
11315 unsigned int endregno
11316 = regno + (regno < FIRST_PSEUDO_REGISTER
11317 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
11318 unsigned int j;
230d793d
RS
11319
11320 for (j = regno; j < endregno; j++)
11321 if (reg_last_set_invalid[j]
57cf50a4
GRK
11322 /* If this is a pseudo-register that was only set once and not
11323 live at the beginning of the function, it is always valid. */
663522cb 11324 || (! (regno >= FIRST_PSEUDO_REGISTER
57cf50a4 11325 && REG_N_SETS (regno) == 1
770ae6cc
RK
11326 && (! REGNO_REG_SET_P
11327 (BASIC_BLOCK (0)->global_live_at_start, regno)))
230d793d
RS
11328 && reg_last_set_label[j] > tick))
11329 {
11330 if (replace)
38a448ca 11331 *loc = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
230d793d
RS
11332 return replace;
11333 }
11334
11335 return 1;
11336 }
9a893315
JW
11337 /* If this is a memory reference, make sure that there were
11338 no stores after it that might have clobbered the value. We don't
11339 have alias info, so we assume any store invalidates it. */
11340 else if (GET_CODE (x) == MEM && ! RTX_UNCHANGING_P (x)
11341 && INSN_CUID (insn) <= mem_last_set)
11342 {
11343 if (replace)
38a448ca 11344 *loc = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
9a893315
JW
11345 return replace;
11346 }
230d793d
RS
11347
11348 for (i = 0; i < len; i++)
11349 if ((fmt[i] == 'e'
9a893315 11350 && get_last_value_validate (&XEXP (x, i), insn, tick, replace) == 0)
230d793d
RS
11351 /* Don't bother with these. They shouldn't occur anyway. */
11352 || fmt[i] == 'E')
11353 return 0;
11354
11355 /* If we haven't found a reason for it to be invalid, it is valid. */
11356 return 1;
11357}
11358
11359/* Get the last value assigned to X, if known. Some registers
11360 in the value may be replaced with (clobber (const_int 0)) if their value
11361 is known longer known reliably. */
11362
11363static rtx
11364get_last_value (x)
11365 rtx x;
11366{
770ae6cc 11367 unsigned int regno;
230d793d
RS
11368 rtx value;
11369
11370 /* If this is a non-paradoxical SUBREG, get the value of its operand and
11371 then convert it to the desired mode. If this is a paradoxical SUBREG,
0f41302f 11372 we cannot predict what values the "extra" bits might have. */
230d793d
RS
11373 if (GET_CODE (x) == SUBREG
11374 && subreg_lowpart_p (x)
11375 && (GET_MODE_SIZE (GET_MODE (x))
11376 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
11377 && (value = get_last_value (SUBREG_REG (x))) != 0)
11378 return gen_lowpart_for_combine (GET_MODE (x), value);
11379
11380 if (GET_CODE (x) != REG)
11381 return 0;
11382
11383 regno = REGNO (x);
11384 value = reg_last_set_value[regno];
11385
57cf50a4
GRK
11386 /* If we don't have a value, or if it isn't for this basic block and
11387 it's either a hard register, set more than once, or it's a live
663522cb 11388 at the beginning of the function, return 0.
57cf50a4 11389
663522cb 11390 Because if it's not live at the beginnning of the function then the reg
57cf50a4
GRK
11391 is always set before being used (is never used without being set).
11392 And, if it's set only once, and it's always set before use, then all
11393 uses must have the same last value, even if it's not from this basic
11394 block. */
230d793d
RS
11395
11396 if (value == 0
57cf50a4
GRK
11397 || (reg_last_set_label[regno] != label_tick
11398 && (regno < FIRST_PSEUDO_REGISTER
11399 || REG_N_SETS (regno) != 1
770ae6cc
RK
11400 || (REGNO_REG_SET_P
11401 (BASIC_BLOCK (0)->global_live_at_start, regno)))))
230d793d
RS
11402 return 0;
11403
4255220d 11404 /* If the value was set in a later insn than the ones we are processing,
ca4cd906 11405 we can't use it even if the register was only set once. */
bcd49eb7 11406 if (INSN_CUID (reg_last_set[regno]) >= subst_low_cuid)
ca4cd906 11407 return 0;
d0ab8cd3
RK
11408
11409 /* If the value has all its registers valid, return it. */
9a893315
JW
11410 if (get_last_value_validate (&value, reg_last_set[regno],
11411 reg_last_set_label[regno], 0))
230d793d
RS
11412 return value;
11413
11414 /* Otherwise, make a copy and replace any invalid register with
11415 (clobber (const_int 0)). If that fails for some reason, return 0. */
11416
11417 value = copy_rtx (value);
9a893315
JW
11418 if (get_last_value_validate (&value, reg_last_set[regno],
11419 reg_last_set_label[regno], 1))
230d793d
RS
11420 return value;
11421
11422 return 0;
11423}
11424\f
11425/* Return nonzero if expression X refers to a REG or to memory
11426 that is set in an instruction more recent than FROM_CUID. */
11427
11428static int
11429use_crosses_set_p (x, from_cuid)
11430 register rtx x;
11431 int from_cuid;
11432{
6f7d635c 11433 register const char *fmt;
230d793d
RS
11434 register int i;
11435 register enum rtx_code code = GET_CODE (x);
11436
11437 if (code == REG)
11438 {
770ae6cc
RK
11439 unsigned int regno = REGNO (x);
11440 unsigned endreg = regno + (regno < FIRST_PSEUDO_REGISTER
663522cb
KH
11441 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
11442
230d793d
RS
11443#ifdef PUSH_ROUNDING
11444 /* Don't allow uses of the stack pointer to be moved,
11445 because we don't know whether the move crosses a push insn. */
f73ad30e 11446 if (regno == STACK_POINTER_REGNUM && PUSH_ARGS)
230d793d
RS
11447 return 1;
11448#endif
770ae6cc 11449 for (; regno < endreg; regno++)
e28f5732
RK
11450 if (reg_last_set[regno]
11451 && INSN_CUID (reg_last_set[regno]) > from_cuid)
11452 return 1;
11453 return 0;
230d793d
RS
11454 }
11455
11456 if (code == MEM && mem_last_set > from_cuid)
11457 return 1;
11458
11459 fmt = GET_RTX_FORMAT (code);
11460
11461 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
11462 {
11463 if (fmt[i] == 'E')
11464 {
11465 register int j;
11466 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
11467 if (use_crosses_set_p (XVECEXP (x, i, j), from_cuid))
11468 return 1;
11469 }
11470 else if (fmt[i] == 'e'
11471 && use_crosses_set_p (XEXP (x, i), from_cuid))
11472 return 1;
11473 }
11474 return 0;
11475}
11476\f
11477/* Define three variables used for communication between the following
11478 routines. */
11479
770ae6cc 11480static unsigned int reg_dead_regno, reg_dead_endregno;
230d793d
RS
11481static int reg_dead_flag;
11482
11483/* Function called via note_stores from reg_dead_at_p.
11484
663522cb 11485 If DEST is within [reg_dead_regno, reg_dead_endregno), set
230d793d
RS
11486 reg_dead_flag to 1 if X is a CLOBBER and to -1 it is a SET. */
11487
11488static void
84832317 11489reg_dead_at_p_1 (dest, x, data)
230d793d
RS
11490 rtx dest;
11491 rtx x;
84832317 11492 void *data ATTRIBUTE_UNUSED;
230d793d 11493{
770ae6cc 11494 unsigned int regno, endregno;
230d793d
RS
11495
11496 if (GET_CODE (dest) != REG)
11497 return;
11498
11499 regno = REGNO (dest);
663522cb 11500 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
230d793d
RS
11501 ? HARD_REGNO_NREGS (regno, GET_MODE (dest)) : 1);
11502
11503 if (reg_dead_endregno > regno && reg_dead_regno < endregno)
11504 reg_dead_flag = (GET_CODE (x) == CLOBBER) ? 1 : -1;
11505}
11506
11507/* Return non-zero if REG is known to be dead at INSN.
11508
11509 We scan backwards from INSN. If we hit a REG_DEAD note or a CLOBBER
11510 referencing REG, it is dead. If we hit a SET referencing REG, it is
11511 live. Otherwise, see if it is live or dead at the start of the basic
6e25d159
RK
11512 block we are in. Hard regs marked as being live in NEWPAT_USED_REGS
11513 must be assumed to be always live. */
230d793d
RS
11514
11515static int
11516reg_dead_at_p (reg, insn)
11517 rtx reg;
11518 rtx insn;
11519{
770ae6cc
RK
11520 int block;
11521 unsigned int i;
230d793d
RS
11522
11523 /* Set variables for reg_dead_at_p_1. */
11524 reg_dead_regno = REGNO (reg);
11525 reg_dead_endregno = reg_dead_regno + (reg_dead_regno < FIRST_PSEUDO_REGISTER
11526 ? HARD_REGNO_NREGS (reg_dead_regno,
11527 GET_MODE (reg))
11528 : 1);
11529
11530 reg_dead_flag = 0;
11531
6e25d159
RK
11532 /* Check that reg isn't mentioned in NEWPAT_USED_REGS. */
11533 if (reg_dead_regno < FIRST_PSEUDO_REGISTER)
11534 {
11535 for (i = reg_dead_regno; i < reg_dead_endregno; i++)
11536 if (TEST_HARD_REG_BIT (newpat_used_regs, i))
11537 return 0;
11538 }
11539
230d793d
RS
11540 /* Scan backwards until we find a REG_DEAD note, SET, CLOBBER, label, or
11541 beginning of function. */
60715d0b 11542 for (; insn && GET_CODE (insn) != CODE_LABEL && GET_CODE (insn) != BARRIER;
230d793d
RS
11543 insn = prev_nonnote_insn (insn))
11544 {
84832317 11545 note_stores (PATTERN (insn), reg_dead_at_p_1, NULL);
230d793d
RS
11546 if (reg_dead_flag)
11547 return reg_dead_flag == 1 ? 1 : 0;
11548
11549 if (find_regno_note (insn, REG_DEAD, reg_dead_regno))
11550 return 1;
11551 }
11552
11553 /* Get the basic block number that we were in. */
11554 if (insn == 0)
11555 block = 0;
11556 else
11557 {
11558 for (block = 0; block < n_basic_blocks; block++)
3b413743 11559 if (insn == BLOCK_HEAD (block))
230d793d
RS
11560 break;
11561
11562 if (block == n_basic_blocks)
11563 return 0;
11564 }
11565
11566 for (i = reg_dead_regno; i < reg_dead_endregno; i++)
e881bb1b 11567 if (REGNO_REG_SET_P (BASIC_BLOCK (block)->global_live_at_start, i))
230d793d
RS
11568 return 0;
11569
11570 return 1;
11571}
6e25d159
RK
11572\f
11573/* Note hard registers in X that are used. This code is similar to
11574 that in flow.c, but much simpler since we don't care about pseudos. */
11575
11576static void
11577mark_used_regs_combine (x)
11578 rtx x;
11579{
770ae6cc
RK
11580 RTX_CODE code = GET_CODE (x);
11581 unsigned int regno;
6e25d159
RK
11582 int i;
11583
11584 switch (code)
11585 {
11586 case LABEL_REF:
11587 case SYMBOL_REF:
11588 case CONST_INT:
11589 case CONST:
11590 case CONST_DOUBLE:
11591 case PC:
11592 case ADDR_VEC:
11593 case ADDR_DIFF_VEC:
11594 case ASM_INPUT:
11595#ifdef HAVE_cc0
11596 /* CC0 must die in the insn after it is set, so we don't need to take
11597 special note of it here. */
11598 case CC0:
11599#endif
11600 return;
11601
11602 case CLOBBER:
11603 /* If we are clobbering a MEM, mark any hard registers inside the
11604 address as used. */
11605 if (GET_CODE (XEXP (x, 0)) == MEM)
11606 mark_used_regs_combine (XEXP (XEXP (x, 0), 0));
11607 return;
11608
11609 case REG:
11610 regno = REGNO (x);
11611 /* A hard reg in a wide mode may really be multiple registers.
11612 If so, mark all of them just like the first. */
11613 if (regno < FIRST_PSEUDO_REGISTER)
11614 {
770ae6cc
RK
11615 unsigned int endregno, r;
11616
6e25d159
RK
11617 /* None of this applies to the stack, frame or arg pointers */
11618 if (regno == STACK_POINTER_REGNUM
11619#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
11620 || regno == HARD_FRAME_POINTER_REGNUM
11621#endif
11622#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
11623 || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
11624#endif
11625 || regno == FRAME_POINTER_REGNUM)
11626 return;
11627
770ae6cc
RK
11628 endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
11629 for (r = regno; r < endregno; r++)
11630 SET_HARD_REG_BIT (newpat_used_regs, r);
6e25d159
RK
11631 }
11632 return;
11633
11634 case SET:
11635 {
11636 /* If setting a MEM, or a SUBREG of a MEM, then note any hard regs in
11637 the address. */
11638 register rtx testreg = SET_DEST (x);
11639
e048778f
RK
11640 while (GET_CODE (testreg) == SUBREG
11641 || GET_CODE (testreg) == ZERO_EXTRACT
11642 || GET_CODE (testreg) == SIGN_EXTRACT
11643 || GET_CODE (testreg) == STRICT_LOW_PART)
6e25d159
RK
11644 testreg = XEXP (testreg, 0);
11645
11646 if (GET_CODE (testreg) == MEM)
11647 mark_used_regs_combine (XEXP (testreg, 0));
11648
11649 mark_used_regs_combine (SET_SRC (x));
6e25d159 11650 }
e9a25f70
JL
11651 return;
11652
11653 default:
11654 break;
6e25d159
RK
11655 }
11656
11657 /* Recursively scan the operands of this expression. */
11658
11659 {
6f7d635c 11660 register const char *fmt = GET_RTX_FORMAT (code);
6e25d159
RK
11661
11662 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
11663 {
663522cb 11664 if (fmt[i] == 'e')
6e25d159 11665 mark_used_regs_combine (XEXP (x, i));
663522cb
KH
11666 else if (fmt[i] == 'E')
11667 {
11668 register int j;
6e25d159 11669
663522cb
KH
11670 for (j = 0; j < XVECLEN (x, i); j++)
11671 mark_used_regs_combine (XVECEXP (x, i, j));
11672 }
6e25d159
RK
11673 }
11674 }
11675}
230d793d
RS
11676\f
11677/* Remove register number REGNO from the dead registers list of INSN.
11678
11679 Return the note used to record the death, if there was one. */
11680
11681rtx
11682remove_death (regno, insn)
770ae6cc 11683 unsigned int regno;
230d793d
RS
11684 rtx insn;
11685{
11686 register rtx note = find_regno_note (insn, REG_DEAD, regno);
11687
11688 if (note)
1a26b032 11689 {
b1f21e0a 11690 REG_N_DEATHS (regno)--;
1a26b032
RK
11691 remove_note (insn, note);
11692 }
230d793d
RS
11693
11694 return note;
11695}
11696
11697/* For each register (hardware or pseudo) used within expression X, if its
11698 death is in an instruction with cuid between FROM_CUID (inclusive) and
11699 TO_INSN (exclusive), put a REG_DEAD note for that register in the
663522cb 11700 list headed by PNOTES.
230d793d 11701
6eb12cef
RK
11702 That said, don't move registers killed by maybe_kill_insn.
11703
230d793d
RS
11704 This is done when X is being merged by combination into TO_INSN. These
11705 notes will then be distributed as needed. */
11706
11707static void
6eb12cef 11708move_deaths (x, maybe_kill_insn, from_cuid, to_insn, pnotes)
230d793d 11709 rtx x;
6eb12cef 11710 rtx maybe_kill_insn;
230d793d
RS
11711 int from_cuid;
11712 rtx to_insn;
11713 rtx *pnotes;
11714{
6f7d635c 11715 register const char *fmt;
230d793d
RS
11716 register int len, i;
11717 register enum rtx_code code = GET_CODE (x);
11718
11719 if (code == REG)
11720 {
770ae6cc 11721 unsigned int regno = REGNO (x);
230d793d 11722 register rtx where_dead = reg_last_death[regno];
e340018d
JW
11723 register rtx before_dead, after_dead;
11724
6eb12cef
RK
11725 /* Don't move the register if it gets killed in between from and to */
11726 if (maybe_kill_insn && reg_set_p (x, maybe_kill_insn)
770ae6cc 11727 && ! reg_referenced_p (x, maybe_kill_insn))
6eb12cef
RK
11728 return;
11729
e340018d
JW
11730 /* WHERE_DEAD could be a USE insn made by combine, so first we
11731 make sure that we have insns with valid INSN_CUID values. */
11732 before_dead = where_dead;
11733 while (before_dead && INSN_UID (before_dead) > max_uid_cuid)
11734 before_dead = PREV_INSN (before_dead);
770ae6cc 11735
e340018d
JW
11736 after_dead = where_dead;
11737 while (after_dead && INSN_UID (after_dead) > max_uid_cuid)
11738 after_dead = NEXT_INSN (after_dead);
11739
11740 if (before_dead && after_dead
11741 && INSN_CUID (before_dead) >= from_cuid
11742 && (INSN_CUID (after_dead) < INSN_CUID (to_insn)
11743 || (where_dead != after_dead
11744 && INSN_CUID (after_dead) == INSN_CUID (to_insn))))
230d793d 11745 {
dbc131f3 11746 rtx note = remove_death (regno, where_dead);
230d793d
RS
11747
11748 /* It is possible for the call above to return 0. This can occur
11749 when reg_last_death points to I2 or I1 that we combined with.
dbc131f3
RK
11750 In that case make a new note.
11751
11752 We must also check for the case where X is a hard register
11753 and NOTE is a death note for a range of hard registers
11754 including X. In that case, we must put REG_DEAD notes for
11755 the remaining registers in place of NOTE. */
11756
11757 if (note != 0 && regno < FIRST_PSEUDO_REGISTER
11758 && (GET_MODE_SIZE (GET_MODE (XEXP (note, 0)))
24e46fc4 11759 > GET_MODE_SIZE (GET_MODE (x))))
dbc131f3 11760 {
770ae6cc
RK
11761 unsigned int deadregno = REGNO (XEXP (note, 0));
11762 unsigned int deadend
dbc131f3
RK
11763 = (deadregno + HARD_REGNO_NREGS (deadregno,
11764 GET_MODE (XEXP (note, 0))));
770ae6cc
RK
11765 unsigned int ourend
11766 = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
11767 unsigned int i;
dbc131f3
RK
11768
11769 for (i = deadregno; i < deadend; i++)
11770 if (i < regno || i >= ourend)
11771 REG_NOTES (where_dead)
38a448ca
RH
11772 = gen_rtx_EXPR_LIST (REG_DEAD,
11773 gen_rtx_REG (reg_raw_mode[i], i),
11774 REG_NOTES (where_dead));
dbc131f3 11775 }
770ae6cc 11776
24e46fc4
JW
11777 /* If we didn't find any note, or if we found a REG_DEAD note that
11778 covers only part of the given reg, and we have a multi-reg hard
fabd69e8
RK
11779 register, then to be safe we must check for REG_DEAD notes
11780 for each register other than the first. They could have
11781 their own REG_DEAD notes lying around. */
24e46fc4
JW
11782 else if ((note == 0
11783 || (note != 0
11784 && (GET_MODE_SIZE (GET_MODE (XEXP (note, 0)))
11785 < GET_MODE_SIZE (GET_MODE (x)))))
11786 && regno < FIRST_PSEUDO_REGISTER
fabd69e8
RK
11787 && HARD_REGNO_NREGS (regno, GET_MODE (x)) > 1)
11788 {
770ae6cc
RK
11789 unsigned int ourend
11790 = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
11791 unsigned int i, offset;
fabd69e8
RK
11792 rtx oldnotes = 0;
11793
24e46fc4
JW
11794 if (note)
11795 offset = HARD_REGNO_NREGS (regno, GET_MODE (XEXP (note, 0)));
11796 else
11797 offset = 1;
11798
11799 for (i = regno + offset; i < ourend; i++)
38a448ca 11800 move_deaths (gen_rtx_REG (reg_raw_mode[i], i),
6eb12cef 11801 maybe_kill_insn, from_cuid, to_insn, &oldnotes);
fabd69e8 11802 }
230d793d 11803
dbc131f3 11804 if (note != 0 && GET_MODE (XEXP (note, 0)) == GET_MODE (x))
230d793d
RS
11805 {
11806 XEXP (note, 1) = *pnotes;
11807 *pnotes = note;
11808 }
11809 else
38a448ca 11810 *pnotes = gen_rtx_EXPR_LIST (REG_DEAD, x, *pnotes);
1a26b032 11811
b1f21e0a 11812 REG_N_DEATHS (regno)++;
230d793d
RS
11813 }
11814
11815 return;
11816 }
11817
11818 else if (GET_CODE (x) == SET)
11819 {
11820 rtx dest = SET_DEST (x);
11821
6eb12cef 11822 move_deaths (SET_SRC (x), maybe_kill_insn, from_cuid, to_insn, pnotes);
230d793d 11823
a7c99304
RK
11824 /* In the case of a ZERO_EXTRACT, a STRICT_LOW_PART, or a SUBREG
11825 that accesses one word of a multi-word item, some
11826 piece of everything register in the expression is used by
11827 this insn, so remove any old death. */
ddef6bc7 11828 /* ??? So why do we test for equality of the sizes? */
a7c99304
RK
11829
11830 if (GET_CODE (dest) == ZERO_EXTRACT
11831 || GET_CODE (dest) == STRICT_LOW_PART
11832 || (GET_CODE (dest) == SUBREG
11833 && (((GET_MODE_SIZE (GET_MODE (dest))
11834 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
11835 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
11836 + UNITS_PER_WORD - 1) / UNITS_PER_WORD))))
230d793d 11837 {
6eb12cef 11838 move_deaths (dest, maybe_kill_insn, from_cuid, to_insn, pnotes);
a7c99304 11839 return;
230d793d
RS
11840 }
11841
a7c99304
RK
11842 /* If this is some other SUBREG, we know it replaces the entire
11843 value, so use that as the destination. */
11844 if (GET_CODE (dest) == SUBREG)
11845 dest = SUBREG_REG (dest);
11846
11847 /* If this is a MEM, adjust deaths of anything used in the address.
11848 For a REG (the only other possibility), the entire value is
11849 being replaced so the old value is not used in this insn. */
230d793d
RS
11850
11851 if (GET_CODE (dest) == MEM)
6eb12cef
RK
11852 move_deaths (XEXP (dest, 0), maybe_kill_insn, from_cuid,
11853 to_insn, pnotes);
230d793d
RS
11854 return;
11855 }
11856
11857 else if (GET_CODE (x) == CLOBBER)
11858 return;
11859
11860 len = GET_RTX_LENGTH (code);
11861 fmt = GET_RTX_FORMAT (code);
11862
11863 for (i = 0; i < len; i++)
11864 {
11865 if (fmt[i] == 'E')
11866 {
11867 register int j;
11868 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6eb12cef
RK
11869 move_deaths (XVECEXP (x, i, j), maybe_kill_insn, from_cuid,
11870 to_insn, pnotes);
230d793d
RS
11871 }
11872 else if (fmt[i] == 'e')
6eb12cef 11873 move_deaths (XEXP (x, i), maybe_kill_insn, from_cuid, to_insn, pnotes);
230d793d
RS
11874 }
11875}
11876\f
a7c99304
RK
11877/* Return 1 if X is the target of a bit-field assignment in BODY, the
11878 pattern of an insn. X must be a REG. */
230d793d
RS
11879
11880static int
a7c99304
RK
11881reg_bitfield_target_p (x, body)
11882 rtx x;
230d793d
RS
11883 rtx body;
11884{
11885 int i;
11886
11887 if (GET_CODE (body) == SET)
a7c99304
RK
11888 {
11889 rtx dest = SET_DEST (body);
11890 rtx target;
770ae6cc 11891 unsigned int regno, tregno, endregno, endtregno;
a7c99304
RK
11892
11893 if (GET_CODE (dest) == ZERO_EXTRACT)
11894 target = XEXP (dest, 0);
11895 else if (GET_CODE (dest) == STRICT_LOW_PART)
11896 target = SUBREG_REG (XEXP (dest, 0));
11897 else
11898 return 0;
11899
11900 if (GET_CODE (target) == SUBREG)
11901 target = SUBREG_REG (target);
11902
11903 if (GET_CODE (target) != REG)
11904 return 0;
11905
11906 tregno = REGNO (target), regno = REGNO (x);
11907 if (tregno >= FIRST_PSEUDO_REGISTER || regno >= FIRST_PSEUDO_REGISTER)
11908 return target == x;
11909
11910 endtregno = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (target));
11911 endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
11912
11913 return endregno > tregno && regno < endtregno;
11914 }
230d793d
RS
11915
11916 else if (GET_CODE (body) == PARALLEL)
11917 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
a7c99304 11918 if (reg_bitfield_target_p (x, XVECEXP (body, 0, i)))
230d793d
RS
11919 return 1;
11920
11921 return 0;
663522cb 11922}
230d793d
RS
11923\f
11924/* Given a chain of REG_NOTES originally from FROM_INSN, try to place them
11925 as appropriate. I3 and I2 are the insns resulting from the combination
11926 insns including FROM (I2 may be zero).
11927
11928 ELIM_I2 and ELIM_I1 are either zero or registers that we know will
11929 not need REG_DEAD notes because they are being substituted for. This
11930 saves searching in the most common cases.
11931
11932 Each note in the list is either ignored or placed on some insns, depending
11933 on the type of note. */
11934
11935static void
11936distribute_notes (notes, from_insn, i3, i2, elim_i2, elim_i1)
11937 rtx notes;
11938 rtx from_insn;
11939 rtx i3, i2;
11940 rtx elim_i2, elim_i1;
11941{
11942 rtx note, next_note;
11943 rtx tem;
11944
11945 for (note = notes; note; note = next_note)
11946 {
11947 rtx place = 0, place2 = 0;
11948
11949 /* If this NOTE references a pseudo register, ensure it references
11950 the latest copy of that register. */
11951 if (XEXP (note, 0) && GET_CODE (XEXP (note, 0)) == REG
11952 && REGNO (XEXP (note, 0)) >= FIRST_PSEUDO_REGISTER)
11953 XEXP (note, 0) = regno_reg_rtx[REGNO (XEXP (note, 0))];
11954
11955 next_note = XEXP (note, 1);
11956 switch (REG_NOTE_KIND (note))
11957 {
c9903b44 11958 case REG_BR_PROB:
4db384c9 11959 case REG_BR_PRED:
c9903b44
DE
11960 case REG_EXEC_COUNT:
11961 /* Doesn't matter much where we put this, as long as it's somewhere.
11962 It is preferable to keep these notes on branches, which is most
11963 likely to be i3. */
11964 place = i3;
11965 break;
11966
f7cfa78d
GS
11967 case REG_NON_LOCAL_GOTO:
11968 if (GET_CODE (i3) == JUMP_INSN)
11969 place = i3;
11970 else if (i2 && GET_CODE (i2) == JUMP_INSN)
11971 place = i2;
11972 else
11973 abort();
11974 break;
11975
4b7c585f 11976 case REG_EH_REGION:
662795a8
RH
11977 /* These notes must remain with the call or trapping instruction. */
11978 if (GET_CODE (i3) == CALL_INSN)
11979 place = i3;
11980 else if (i2 && GET_CODE (i2) == CALL_INSN)
11981 place = i2;
11982 else if (flag_non_call_exceptions)
11983 {
11984 if (may_trap_p (i3))
11985 place = i3;
11986 else if (i2 && may_trap_p (i2))
11987 place = i2;
11988 /* ??? Otherwise assume we've combined things such that we
11989 can now prove that the instructions can't trap. Drop the
11990 note in this case. */
11991 }
11992 else
11993 abort ();
11994 break;
11995
0e403ec3 11996 case REG_EH_RETHROW:
ca3920ad 11997 case REG_NORETURN:
0e403ec3
AS
11998 /* These notes must remain with the call. It should not be
11999 possible for both I2 and I3 to be a call. */
663522cb 12000 if (GET_CODE (i3) == CALL_INSN)
4b7c585f
JL
12001 place = i3;
12002 else if (i2 && GET_CODE (i2) == CALL_INSN)
12003 place = i2;
12004 else
12005 abort ();
12006 break;
12007
230d793d 12008 case REG_UNUSED:
07d0cbdd 12009 /* Any clobbers for i3 may still exist, and so we must process
176c9e6b
JW
12010 REG_UNUSED notes from that insn.
12011
12012 Any clobbers from i2 or i1 can only exist if they were added by
12013 recog_for_combine. In that case, recog_for_combine created the
12014 necessary REG_UNUSED notes. Trying to keep any original
12015 REG_UNUSED notes from these insns can cause incorrect output
12016 if it is for the same register as the original i3 dest.
12017 In that case, we will notice that the register is set in i3,
12018 and then add a REG_UNUSED note for the destination of i3, which
07d0cbdd
JW
12019 is wrong. However, it is possible to have REG_UNUSED notes from
12020 i2 or i1 for register which were both used and clobbered, so
12021 we keep notes from i2 or i1 if they will turn into REG_DEAD
12022 notes. */
176c9e6b 12023
230d793d
RS
12024 /* If this register is set or clobbered in I3, put the note there
12025 unless there is one already. */
07d0cbdd 12026 if (reg_set_p (XEXP (note, 0), PATTERN (i3)))
230d793d 12027 {
07d0cbdd
JW
12028 if (from_insn != i3)
12029 break;
12030
230d793d
RS
12031 if (! (GET_CODE (XEXP (note, 0)) == REG
12032 ? find_regno_note (i3, REG_UNUSED, REGNO (XEXP (note, 0)))
12033 : find_reg_note (i3, REG_UNUSED, XEXP (note, 0))))
12034 place = i3;
12035 }
12036 /* Otherwise, if this register is used by I3, then this register
12037 now dies here, so we must put a REG_DEAD note here unless there
12038 is one already. */
12039 else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3))
12040 && ! (GET_CODE (XEXP (note, 0)) == REG
770ae6cc
RK
12041 ? find_regno_note (i3, REG_DEAD,
12042 REGNO (XEXP (note, 0)))
230d793d
RS
12043 : find_reg_note (i3, REG_DEAD, XEXP (note, 0))))
12044 {
12045 PUT_REG_NOTE_KIND (note, REG_DEAD);
12046 place = i3;
12047 }
12048 break;
12049
12050 case REG_EQUAL:
12051 case REG_EQUIV:
9ae8ffe7 12052 case REG_NOALIAS:
230d793d
RS
12053 /* These notes say something about results of an insn. We can
12054 only support them if they used to be on I3 in which case they
a687e897
RK
12055 remain on I3. Otherwise they are ignored.
12056
12057 If the note refers to an expression that is not a constant, we
12058 must also ignore the note since we cannot tell whether the
12059 equivalence is still true. It might be possible to do
12060 slightly better than this (we only have a problem if I2DEST
12061 or I1DEST is present in the expression), but it doesn't
12062 seem worth the trouble. */
12063
12064 if (from_insn == i3
12065 && (XEXP (note, 0) == 0 || CONSTANT_P (XEXP (note, 0))))
230d793d
RS
12066 place = i3;
12067 break;
12068
12069 case REG_INC:
12070 case REG_NO_CONFLICT:
230d793d
RS
12071 /* These notes say something about how a register is used. They must
12072 be present on any use of the register in I2 or I3. */
12073 if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3)))
12074 place = i3;
12075
12076 if (i2 && reg_mentioned_p (XEXP (note, 0), PATTERN (i2)))
12077 {
12078 if (place)
12079 place2 = i2;
12080 else
12081 place = i2;
12082 }
12083 break;
12084
e55b4486
RH
12085 case REG_LABEL:
12086 /* This can show up in several ways -- either directly in the
12087 pattern, or hidden off in the constant pool with (or without?)
12088 a REG_EQUAL note. */
12089 /* ??? Ignore the without-reg_equal-note problem for now. */
12090 if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3))
12091 || ((tem = find_reg_note (i3, REG_EQUAL, NULL_RTX))
12092 && GET_CODE (XEXP (tem, 0)) == LABEL_REF
12093 && XEXP (XEXP (tem, 0), 0) == XEXP (note, 0)))
12094 place = i3;
12095
12096 if (i2
12097 && (reg_mentioned_p (XEXP (note, 0), PATTERN (i2))
663522cb 12098 || ((tem = find_reg_note (i2, REG_EQUAL, NULL_RTX))
e55b4486
RH
12099 && GET_CODE (XEXP (tem, 0)) == LABEL_REF
12100 && XEXP (XEXP (tem, 0), 0) == XEXP (note, 0))))
12101 {
12102 if (place)
12103 place2 = i2;
12104 else
12105 place = i2;
12106 }
12107 break;
12108
c1194d74 12109 case REG_NONNEG:
230d793d 12110 case REG_WAS_0:
c1194d74
JW
12111 /* These notes say something about the value of a register prior
12112 to the execution of an insn. It is too much trouble to see
12113 if the note is still correct in all situations. It is better
12114 to simply delete it. */
230d793d
RS
12115 break;
12116
12117 case REG_RETVAL:
12118 /* If the insn previously containing this note still exists,
12119 put it back where it was. Otherwise move it to the previous
12120 insn. Adjust the corresponding REG_LIBCALL note. */
12121 if (GET_CODE (from_insn) != NOTE)
12122 place = from_insn;
12123 else
12124 {
5f4f0e22 12125 tem = find_reg_note (XEXP (note, 0), REG_LIBCALL, NULL_RTX);
230d793d
RS
12126 place = prev_real_insn (from_insn);
12127 if (tem && place)
12128 XEXP (tem, 0) = place;
c71e1201
AO
12129 /* If we're deleting the last remaining instruction of a
12130 libcall sequence, don't add the notes. */
12131 else if (XEXP (note, 0) == from_insn)
12132 tem = place = 0;
230d793d
RS
12133 }
12134 break;
12135
12136 case REG_LIBCALL:
12137 /* This is handled similarly to REG_RETVAL. */
12138 if (GET_CODE (from_insn) != NOTE)
12139 place = from_insn;
12140 else
12141 {
5f4f0e22 12142 tem = find_reg_note (XEXP (note, 0), REG_RETVAL, NULL_RTX);
230d793d
RS
12143 place = next_real_insn (from_insn);
12144 if (tem && place)
12145 XEXP (tem, 0) = place;
c71e1201
AO
12146 /* If we're deleting the last remaining instruction of a
12147 libcall sequence, don't add the notes. */
12148 else if (XEXP (note, 0) == from_insn)
12149 tem = place = 0;
230d793d
RS
12150 }
12151 break;
12152
12153 case REG_DEAD:
12154 /* If the register is used as an input in I3, it dies there.
12155 Similarly for I2, if it is non-zero and adjacent to I3.
12156
12157 If the register is not used as an input in either I3 or I2
12158 and it is not one of the registers we were supposed to eliminate,
12159 there are two possibilities. We might have a non-adjacent I2
12160 or we might have somehow eliminated an additional register
12161 from a computation. For example, we might have had A & B where
12162 we discover that B will always be zero. In this case we will
12163 eliminate the reference to A.
12164
12165 In both cases, we must search to see if we can find a previous
12166 use of A and put the death note there. */
12167
6e2d1486
RK
12168 if (from_insn
12169 && GET_CODE (from_insn) == CALL_INSN
663522cb 12170 && find_reg_fusage (from_insn, USE, XEXP (note, 0)))
6e2d1486
RK
12171 place = from_insn;
12172 else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3)))
230d793d
RS
12173 place = i3;
12174 else if (i2 != 0 && next_nonnote_insn (i2) == i3
12175 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
12176 place = i2;
12177
03afaf36
R
12178 if (rtx_equal_p (XEXP (note, 0), elim_i2)
12179 || rtx_equal_p (XEXP (note, 0), elim_i1))
230d793d
RS
12180 break;
12181
12182 if (place == 0)
38d8473f 12183 {
d3a923ee
RH
12184 basic_block bb = BASIC_BLOCK (this_basic_block);
12185
12186 for (tem = PREV_INSN (i3); place == 0; tem = PREV_INSN (tem))
38d8473f 12187 {
2c3c49de 12188 if (! INSN_P (tem))
d3a923ee
RH
12189 {
12190 if (tem == bb->head)
12191 break;
12192 continue;
12193 }
12194
38d8473f
RK
12195 /* If the register is being set at TEM, see if that is all
12196 TEM is doing. If so, delete TEM. Otherwise, make this
12197 into a REG_UNUSED note instead. */
12198 if (reg_set_p (XEXP (note, 0), PATTERN (tem)))
12199 {
12200 rtx set = single_set (tem);
e5e809f4 12201 rtx inner_dest = 0;
e51712db 12202#ifdef HAVE_cc0
f5c97640 12203 rtx cc0_setter = NULL_RTX;
e51712db 12204#endif
e5e809f4
JL
12205
12206 if (set != 0)
12207 for (inner_dest = SET_DEST (set);
663522cb
KH
12208 (GET_CODE (inner_dest) == STRICT_LOW_PART
12209 || GET_CODE (inner_dest) == SUBREG
12210 || GET_CODE (inner_dest) == ZERO_EXTRACT);
e5e809f4
JL
12211 inner_dest = XEXP (inner_dest, 0))
12212 ;
38d8473f
RK
12213
12214 /* Verify that it was the set, and not a clobber that
663522cb 12215 modified the register.
f5c97640
RH
12216
12217 CC0 targets must be careful to maintain setter/user
12218 pairs. If we cannot delete the setter due to side
12219 effects, mark the user with an UNUSED note instead
12220 of deleting it. */
38d8473f
RK
12221
12222 if (set != 0 && ! side_effects_p (SET_SRC (set))
f5c97640
RH
12223 && rtx_equal_p (XEXP (note, 0), inner_dest)
12224#ifdef HAVE_cc0
12225 && (! reg_mentioned_p (cc0_rtx, SET_SRC (set))
12226 || ((cc0_setter = prev_cc0_setter (tem)) != NULL
12227 && sets_cc0_p (PATTERN (cc0_setter)) > 0))
12228#endif
12229 )
38d8473f
RK
12230 {
12231 /* Move the notes and links of TEM elsewhere.
663522cb 12232 This might delete other dead insns recursively.
38d8473f
RK
12233 First set the pattern to something that won't use
12234 any register. */
12235
12236 PATTERN (tem) = pc_rtx;
12237
12238 distribute_notes (REG_NOTES (tem), tem, tem,
12239 NULL_RTX, NULL_RTX, NULL_RTX);
12240 distribute_links (LOG_LINKS (tem));
12241
12242 PUT_CODE (tem, NOTE);
12243 NOTE_LINE_NUMBER (tem) = NOTE_INSN_DELETED;
12244 NOTE_SOURCE_FILE (tem) = 0;
f5c97640
RH
12245
12246#ifdef HAVE_cc0
12247 /* Delete the setter too. */
12248 if (cc0_setter)
12249 {
12250 PATTERN (cc0_setter) = pc_rtx;
12251
12252 distribute_notes (REG_NOTES (cc0_setter),
12253 cc0_setter, cc0_setter,
12254 NULL_RTX, NULL_RTX, NULL_RTX);
12255 distribute_links (LOG_LINKS (cc0_setter));
12256
12257 PUT_CODE (cc0_setter, NOTE);
d3a923ee
RH
12258 NOTE_LINE_NUMBER (cc0_setter)
12259 = NOTE_INSN_DELETED;
f5c97640
RH
12260 NOTE_SOURCE_FILE (cc0_setter) = 0;
12261 }
12262#endif
38d8473f 12263 }
e5e809f4
JL
12264 /* If the register is both set and used here, put the
12265 REG_DEAD note here, but place a REG_UNUSED note
12266 here too unless there already is one. */
12267 else if (reg_referenced_p (XEXP (note, 0),
12268 PATTERN (tem)))
12269 {
12270 place = tem;
12271
12272 if (! find_regno_note (tem, REG_UNUSED,
12273 REGNO (XEXP (note, 0))))
12274 REG_NOTES (tem)
c5c76735 12275 = gen_rtx_EXPR_LIST (REG_UNUSED, XEXP (note, 0),
9e6a5703 12276 REG_NOTES (tem));
e5e809f4 12277 }
38d8473f
RK
12278 else
12279 {
12280 PUT_REG_NOTE_KIND (note, REG_UNUSED);
663522cb 12281
38d8473f
RK
12282 /* If there isn't already a REG_UNUSED note, put one
12283 here. */
12284 if (! find_regno_note (tem, REG_UNUSED,
12285 REGNO (XEXP (note, 0))))
12286 place = tem;
12287 break;
d3a923ee
RH
12288 }
12289 }
12290 else if (reg_referenced_p (XEXP (note, 0), PATTERN (tem))
12291 || (GET_CODE (tem) == CALL_INSN
12292 && find_reg_fusage (tem, USE, XEXP (note, 0))))
12293 {
12294 place = tem;
12295
12296 /* If we are doing a 3->2 combination, and we have a
12297 register which formerly died in i3 and was not used
12298 by i2, which now no longer dies in i3 and is used in
12299 i2 but does not die in i2, and place is between i2
12300 and i3, then we may need to move a link from place to
12301 i2. */
12302 if (i2 && INSN_UID (place) <= max_uid_cuid
12303 && INSN_CUID (place) > INSN_CUID (i2)
663522cb
KH
12304 && from_insn
12305 && INSN_CUID (from_insn) > INSN_CUID (i2)
d3a923ee
RH
12306 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
12307 {
12308 rtx links = LOG_LINKS (place);
12309 LOG_LINKS (place) = 0;
12310 distribute_links (links);
12311 }
12312 break;
12313 }
12314
12315 if (tem == bb->head)
230d793d 12316 break;
38d8473f 12317 }
663522cb 12318
d3a923ee
RH
12319 /* We haven't found an insn for the death note and it
12320 is still a REG_DEAD note, but we have hit the beginning
12321 of the block. If the existing life info says the reg
715e7fbc
RH
12322 was dead, there's nothing left to do. Otherwise, we'll
12323 need to do a global life update after combine. */
770ae6cc
RK
12324 if (REG_NOTE_KIND (note) == REG_DEAD && place == 0
12325 && REGNO_REG_SET_P (bb->global_live_at_start,
12326 REGNO (XEXP (note, 0))))
e2cce0cf 12327 {
770ae6cc
RK
12328 SET_BIT (refresh_blocks, this_basic_block);
12329 need_refresh = 1;
e2cce0cf 12330 }
38d8473f 12331 }
230d793d
RS
12332
12333 /* If the register is set or already dead at PLACE, we needn't do
e5e809f4
JL
12334 anything with this note if it is still a REG_DEAD note.
12335 We can here if it is set at all, not if is it totally replace,
12336 which is what `dead_or_set_p' checks, so also check for it being
12337 set partially. */
12338
230d793d
RS
12339 if (place && REG_NOTE_KIND (note) == REG_DEAD)
12340 {
770ae6cc 12341 unsigned int regno = REGNO (XEXP (note, 0));
230d793d
RS
12342
12343 if (dead_or_set_p (place, XEXP (note, 0))
12344 || reg_bitfield_target_p (XEXP (note, 0), PATTERN (place)))
12345 {
12346 /* Unless the register previously died in PLACE, clear
12347 reg_last_death. [I no longer understand why this is
12348 being done.] */
12349 if (reg_last_death[regno] != place)
12350 reg_last_death[regno] = 0;
12351 place = 0;
12352 }
12353 else
12354 reg_last_death[regno] = place;
12355
12356 /* If this is a death note for a hard reg that is occupying
12357 multiple registers, ensure that we are still using all
12358 parts of the object. If we find a piece of the object
03afaf36
R
12359 that is unused, we must arrange for an appropriate REG_DEAD
12360 note to be added for it. However, we can't just emit a USE
12361 and tag the note to it, since the register might actually
12362 be dead; so we recourse, and the recursive call then finds
12363 the previous insn that used this register. */
230d793d
RS
12364
12365 if (place && regno < FIRST_PSEUDO_REGISTER
12366 && HARD_REGNO_NREGS (regno, GET_MODE (XEXP (note, 0))) > 1)
12367 {
770ae6cc 12368 unsigned int endregno
230d793d
RS
12369 = regno + HARD_REGNO_NREGS (regno,
12370 GET_MODE (XEXP (note, 0)));
12371 int all_used = 1;
770ae6cc 12372 unsigned int i;
230d793d
RS
12373
12374 for (i = regno; i < endregno; i++)
03afaf36
R
12375 if ((! refers_to_regno_p (i, i + 1, PATTERN (place), 0)
12376 && ! find_regno_fusage (place, USE, i))
12377 || dead_or_set_regno_p (place, i))
12378 all_used = 0;
a394b17b 12379
230d793d
RS
12380 if (! all_used)
12381 {
12382 /* Put only REG_DEAD notes for pieces that are
03afaf36 12383 not already dead or set. */
230d793d 12384
03afaf36
R
12385 for (i = regno; i < endregno;
12386 i += HARD_REGNO_NREGS (i, reg_raw_mode[i]))
230d793d 12387 {
38a448ca 12388 rtx piece = gen_rtx_REG (reg_raw_mode[i], i);
c762163e 12389 basic_block bb = BASIC_BLOCK (this_basic_block);
230d793d 12390
03afaf36 12391 if (! dead_or_set_p (place, piece)
230d793d
RS
12392 && ! reg_bitfield_target_p (piece,
12393 PATTERN (place)))
03afaf36
R
12394 {
12395 rtx new_note
12396 = gen_rtx_EXPR_LIST (REG_DEAD, piece, NULL_RTX);
12397
12398 distribute_notes (new_note, place, place,
12399 NULL_RTX, NULL_RTX, NULL_RTX);
12400 }
c762163e
R
12401 else if (! refers_to_regno_p (i, i + 1,
12402 PATTERN (place), 0)
12403 && ! find_regno_fusage (place, USE, i))
12404 for (tem = PREV_INSN (place); ;
12405 tem = PREV_INSN (tem))
12406 {
12407 if (! INSN_P (tem))
12408 {
12409 if (tem == bb->head)
12410 {
12411 SET_BIT (refresh_blocks,
12412 this_basic_block);
12413 need_refresh = 1;
12414 break;
12415 }
12416 continue;
12417 }
12418 if (dead_or_set_p (tem, piece)
12419 || reg_bitfield_target_p (piece,
12420 PATTERN (tem)))
12421 {
12422 REG_NOTES (tem)
71fd5a51 12423 = gen_rtx_EXPR_LIST (REG_UNUSED, piece,
c762163e
R
12424 REG_NOTES (tem));
12425 break;
12426 }
12427 }
12428
230d793d
RS
12429 }
12430
12431 place = 0;
12432 }
12433 }
12434 }
12435 break;
12436
12437 default:
12438 /* Any other notes should not be present at this point in the
12439 compilation. */
12440 abort ();
12441 }
12442
12443 if (place)
12444 {
12445 XEXP (note, 1) = REG_NOTES (place);
12446 REG_NOTES (place) = note;
12447 }
1a26b032
RK
12448 else if ((REG_NOTE_KIND (note) == REG_DEAD
12449 || REG_NOTE_KIND (note) == REG_UNUSED)
12450 && GET_CODE (XEXP (note, 0)) == REG)
b1f21e0a 12451 REG_N_DEATHS (REGNO (XEXP (note, 0)))--;
230d793d
RS
12452
12453 if (place2)
1a26b032
RK
12454 {
12455 if ((REG_NOTE_KIND (note) == REG_DEAD
12456 || REG_NOTE_KIND (note) == REG_UNUSED)
12457 && GET_CODE (XEXP (note, 0)) == REG)
b1f21e0a 12458 REG_N_DEATHS (REGNO (XEXP (note, 0)))++;
1a26b032 12459
38a448ca
RH
12460 REG_NOTES (place2) = gen_rtx_fmt_ee (GET_CODE (note),
12461 REG_NOTE_KIND (note),
12462 XEXP (note, 0),
12463 REG_NOTES (place2));
1a26b032 12464 }
230d793d
RS
12465 }
12466}
12467\f
12468/* Similarly to above, distribute the LOG_LINKS that used to be present on
5089e22e
RS
12469 I3, I2, and I1 to new locations. This is also called in one case to
12470 add a link pointing at I3 when I3's destination is changed. */
230d793d
RS
12471
12472static void
12473distribute_links (links)
12474 rtx links;
12475{
12476 rtx link, next_link;
12477
12478 for (link = links; link; link = next_link)
12479 {
12480 rtx place = 0;
12481 rtx insn;
12482 rtx set, reg;
12483
12484 next_link = XEXP (link, 1);
12485
12486 /* If the insn that this link points to is a NOTE or isn't a single
12487 set, ignore it. In the latter case, it isn't clear what we
663522cb 12488 can do other than ignore the link, since we can't tell which
230d793d
RS
12489 register it was for. Such links wouldn't be used by combine
12490 anyway.
12491
12492 It is not possible for the destination of the target of the link to
12493 have been changed by combine. The only potential of this is if we
12494 replace I3, I2, and I1 by I3 and I2. But in that case the
12495 destination of I2 also remains unchanged. */
12496
12497 if (GET_CODE (XEXP (link, 0)) == NOTE
12498 || (set = single_set (XEXP (link, 0))) == 0)
12499 continue;
12500
12501 reg = SET_DEST (set);
12502 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
12503 || GET_CODE (reg) == SIGN_EXTRACT
12504 || GET_CODE (reg) == STRICT_LOW_PART)
12505 reg = XEXP (reg, 0);
12506
12507 /* A LOG_LINK is defined as being placed on the first insn that uses
12508 a register and points to the insn that sets the register. Start
12509 searching at the next insn after the target of the link and stop
12510 when we reach a set of the register or the end of the basic block.
12511
12512 Note that this correctly handles the link that used to point from
5089e22e 12513 I3 to I2. Also note that not much searching is typically done here
230d793d
RS
12514 since most links don't point very far away. */
12515
12516 for (insn = NEXT_INSN (XEXP (link, 0));
0d4d42c3 12517 (insn && (this_basic_block == n_basic_blocks - 1
3b413743 12518 || BLOCK_HEAD (this_basic_block + 1) != insn));
230d793d 12519 insn = NEXT_INSN (insn))
2c3c49de 12520 if (INSN_P (insn) && reg_overlap_mentioned_p (reg, PATTERN (insn)))
230d793d
RS
12521 {
12522 if (reg_referenced_p (reg, PATTERN (insn)))
12523 place = insn;
12524 break;
12525 }
6e2d1486 12526 else if (GET_CODE (insn) == CALL_INSN
663522cb 12527 && find_reg_fusage (insn, USE, reg))
6e2d1486
RK
12528 {
12529 place = insn;
12530 break;
12531 }
230d793d
RS
12532
12533 /* If we found a place to put the link, place it there unless there
12534 is already a link to the same insn as LINK at that point. */
12535
12536 if (place)
12537 {
12538 rtx link2;
12539
12540 for (link2 = LOG_LINKS (place); link2; link2 = XEXP (link2, 1))
12541 if (XEXP (link2, 0) == XEXP (link, 0))
12542 break;
12543
12544 if (link2 == 0)
12545 {
12546 XEXP (link, 1) = LOG_LINKS (place);
12547 LOG_LINKS (place) = link;
abe6e52f
RK
12548
12549 /* Set added_links_insn to the earliest insn we added a
12550 link to. */
663522cb 12551 if (added_links_insn == 0
abe6e52f
RK
12552 || INSN_CUID (added_links_insn) > INSN_CUID (place))
12553 added_links_insn = place;
230d793d
RS
12554 }
12555 }
12556 }
12557}
12558\f
1427d6d2
RK
12559/* Compute INSN_CUID for INSN, which is an insn made by combine. */
12560
12561static int
12562insn_cuid (insn)
12563 rtx insn;
12564{
12565 while (insn != 0 && INSN_UID (insn) > max_uid_cuid
12566 && GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == USE)
12567 insn = NEXT_INSN (insn);
12568
12569 if (INSN_UID (insn) > max_uid_cuid)
12570 abort ();
12571
12572 return INSN_CUID (insn);
12573}
12574\f
230d793d
RS
12575void
12576dump_combine_stats (file)
12577 FILE *file;
12578{
ab87f8c8 12579 fnotice
230d793d
RS
12580 (file,
12581 ";; Combiner statistics: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n\n",
12582 combine_attempts, combine_merges, combine_extras, combine_successes);
12583}
12584
12585void
12586dump_combine_total_stats (file)
12587 FILE *file;
12588{
ab87f8c8 12589 fnotice
230d793d
RS
12590 (file,
12591 "\n;; Combiner totals: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n",
12592 total_attempts, total_merges, total_extras, total_successes);
12593}
This page took 3.280477 seconds and 5 git commands to generate.