]> gcc.gnu.org Git - gcc.git/blame - gcc/combine.c
* regclass.c (N_REG_INTS): Use only 32 bits per element.
[gcc.git] / gcc / combine.c
CommitLineData
230d793d 1/* Optimize by combining instructions for GNU compiler.
3c71940f 2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
8f8d8d6e 3 1999, 2000, 2001 Free Software Foundation, Inc.
230d793d
RS
4
5This file is part of GNU CC.
6
7GNU CC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 2, or (at your option)
10any later version.
11
12GNU CC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GNU CC; see the file COPYING. If not, write to
940d9d63
RK
19the Free Software Foundation, 59 Temple Place - Suite 330,
20Boston, MA 02111-1307, USA. */
230d793d 21
230d793d
RS
22/* This module is essentially the "combiner" phase of the U. of Arizona
23 Portable Optimizer, but redone to work on our list-structured
24 representation for RTL instead of their string representation.
25
26 The LOG_LINKS of each insn identify the most recent assignment
27 to each REG used in the insn. It is a list of previous insns,
28 each of which contains a SET for a REG that is used in this insn
29 and not used or set in between. LOG_LINKs never cross basic blocks.
30 They were set up by the preceding pass (lifetime analysis).
31
32 We try to combine each pair of insns joined by a logical link.
33 We also try to combine triples of insns A, B and C when
34 C has a link back to B and B has a link back to A.
35
36 LOG_LINKS does not have links for use of the CC0. They don't
37 need to, because the insn that sets the CC0 is always immediately
38 before the insn that tests it. So we always regard a branch
39 insn as having a logical link to the preceding insn. The same is true
40 for an insn explicitly using CC0.
41
42 We check (with use_crosses_set_p) to avoid combining in such a way
43 as to move a computation to a place where its value would be different.
44
45 Combination is done by mathematically substituting the previous
46 insn(s) values for the regs they set into the expressions in
47 the later insns that refer to these regs. If the result is a valid insn
48 for our target machine, according to the machine description,
49 we install it, delete the earlier insns, and update the data flow
50 information (LOG_LINKS and REG_NOTES) for what we did.
51
52 There are a few exceptions where the dataflow information created by
53 flow.c aren't completely updated:
54
55 - reg_live_length is not updated
56 - reg_n_refs is not adjusted in the rare case when a register is
57 no longer required in a computation
58 - there are extremely rare cases (see distribute_regnotes) when a
59 REG_DEAD note is lost
60 - a LOG_LINKS entry that refers to an insn with multiple SETs may be
663522cb 61 removed because there is no way to know which register it was
230d793d
RS
62 linking
63
64 To simplify substitution, we combine only when the earlier insn(s)
65 consist of only a single assignment. To simplify updating afterward,
66 we never combine when a subroutine call appears in the middle.
67
68 Since we do not represent assignments to CC0 explicitly except when that
69 is all an insn does, there is no LOG_LINKS entry in an insn that uses
70 the condition code for the insn that set the condition code.
71 Fortunately, these two insns must be consecutive.
72 Therefore, every JUMP_INSN is taken to have an implicit logical link
73 to the preceding insn. This is not quite right, since non-jumps can
74 also use the condition code; but in practice such insns would not
75 combine anyway. */
76
230d793d 77#include "config.h"
670ee920 78#include "system.h"
c5c76735 79#include "rtl.h"
a091679a 80#include "tm_p.h"
230d793d
RS
81#include "flags.h"
82#include "regs.h"
55310dad 83#include "hard-reg-set.h"
230d793d
RS
84#include "basic-block.h"
85#include "insn-config.h"
49ad7cfa 86#include "function.h"
d6f4ec51
KG
87/* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
88#include "expr.h"
230d793d
RS
89#include "insn-attr.h"
90#include "recog.h"
91#include "real.h"
2e107e9e 92#include "toplev.h"
f73ad30e 93
230d793d
RS
94/* It is not safe to use ordinary gen_lowpart in combine.
95 Use gen_lowpart_for_combine instead. See comments there. */
96#define gen_lowpart dont_use_gen_lowpart_you_dummy
97
98/* Number of attempts to combine instructions in this function. */
99
100static int combine_attempts;
101
102/* Number of attempts that got as far as substitution in this function. */
103
104static int combine_merges;
105
106/* Number of instructions combined with added SETs in this function. */
107
108static int combine_extras;
109
110/* Number of instructions combined in this function. */
111
112static int combine_successes;
113
114/* Totals over entire compilation. */
115
116static int total_attempts, total_merges, total_extras, total_successes;
9210df58 117
230d793d
RS
118\f
119/* Vector mapping INSN_UIDs to cuids.
5089e22e 120 The cuids are like uids but increase monotonically always.
230d793d
RS
121 Combine always uses cuids so that it can compare them.
122 But actually renumbering the uids, which we used to do,
123 proves to be a bad idea because it makes it hard to compare
124 the dumps produced by earlier passes with those from later passes. */
125
126static int *uid_cuid;
4255220d 127static int max_uid_cuid;
230d793d
RS
128
129/* Get the cuid of an insn. */
130
1427d6d2
RK
131#define INSN_CUID(INSN) \
132(INSN_UID (INSN) > max_uid_cuid ? insn_cuid (INSN) : uid_cuid[INSN_UID (INSN)])
230d793d 133
42a6ff51
AO
134/* In case BITS_PER_WORD == HOST_BITS_PER_WIDE_INT, shifting by
135 BITS_PER_WORD would invoke undefined behavior. Work around it. */
136
137#define UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD(val) \
138 (((unsigned HOST_WIDE_INT)(val) << (BITS_PER_WORD - 1)) << 1)
139
230d793d
RS
140/* Maximum register number, which is the size of the tables below. */
141
770ae6cc 142static unsigned int combine_max_regno;
230d793d
RS
143
144/* Record last point of death of (hard or pseudo) register n. */
145
146static rtx *reg_last_death;
147
148/* Record last point of modification of (hard or pseudo) register n. */
149
150static rtx *reg_last_set;
151
152/* Record the cuid of the last insn that invalidated memory
153 (anything that writes memory, and subroutine calls, but not pushes). */
154
155static int mem_last_set;
156
157/* Record the cuid of the last CALL_INSN
158 so we can tell whether a potential combination crosses any calls. */
159
160static int last_call_cuid;
161
162/* When `subst' is called, this is the insn that is being modified
163 (by combining in a previous insn). The PATTERN of this insn
164 is still the old pattern partially modified and it should not be
165 looked at, but this may be used to examine the successors of the insn
166 to judge whether a simplification is valid. */
167
168static rtx subst_insn;
169
0d9641d1
JW
170/* This is an insn that belongs before subst_insn, but is not currently
171 on the insn chain. */
172
173static rtx subst_prev_insn;
174
230d793d
RS
175/* This is the lowest CUID that `subst' is currently dealing with.
176 get_last_value will not return a value if the register was set at or
177 after this CUID. If not for this mechanism, we could get confused if
178 I2 or I1 in try_combine were an insn that used the old value of a register
179 to obtain a new value. In that case, we might erroneously get the
180 new value of the register when we wanted the old one. */
181
182static int subst_low_cuid;
183
6e25d159
RK
184/* This contains any hard registers that are used in newpat; reg_dead_at_p
185 must consider all these registers to be always live. */
186
187static HARD_REG_SET newpat_used_regs;
188
abe6e52f
RK
189/* This is an insn to which a LOG_LINKS entry has been added. If this
190 insn is the earlier than I2 or I3, combine should rescan starting at
191 that location. */
192
193static rtx added_links_insn;
194
0d4d42c3
RK
195/* Basic block number of the block in which we are performing combines. */
196static int this_basic_block;
715e7fbc 197
663522cb
KH
198/* A bitmap indicating which blocks had registers go dead at entry.
199 After combine, we'll need to re-do global life analysis with
715e7fbc
RH
200 those blocks as starting points. */
201static sbitmap refresh_blocks;
202static int need_refresh;
230d793d
RS
203\f
204/* The next group of arrays allows the recording of the last value assigned
205 to (hard or pseudo) register n. We use this information to see if a
5089e22e 206 operation being processed is redundant given a prior operation performed
230d793d
RS
207 on the register. For example, an `and' with a constant is redundant if
208 all the zero bits are already known to be turned off.
209
210 We use an approach similar to that used by cse, but change it in the
211 following ways:
212
213 (1) We do not want to reinitialize at each label.
214 (2) It is useful, but not critical, to know the actual value assigned
215 to a register. Often just its form is helpful.
216
217 Therefore, we maintain the following arrays:
218
219 reg_last_set_value the last value assigned
220 reg_last_set_label records the value of label_tick when the
221 register was assigned
222 reg_last_set_table_tick records the value of label_tick when a
223 value using the register is assigned
224 reg_last_set_invalid set to non-zero when it is not valid
225 to use the value of this register in some
226 register's value
227
228 To understand the usage of these tables, it is important to understand
229 the distinction between the value in reg_last_set_value being valid
230 and the register being validly contained in some other expression in the
231 table.
232
233 Entry I in reg_last_set_value is valid if it is non-zero, and either
234 reg_n_sets[i] is 1 or reg_last_set_label[i] == label_tick.
235
236 Register I may validly appear in any expression returned for the value
237 of another register if reg_n_sets[i] is 1. It may also appear in the
238 value for register J if reg_last_set_label[i] < reg_last_set_label[j] or
239 reg_last_set_invalid[j] is zero.
240
241 If an expression is found in the table containing a register which may
242 not validly appear in an expression, the register is replaced by
243 something that won't match, (clobber (const_int 0)).
244
245 reg_last_set_invalid[i] is set non-zero when register I is being assigned
246 to and reg_last_set_table_tick[i] == label_tick. */
247
0f41302f 248/* Record last value assigned to (hard or pseudo) register n. */
230d793d
RS
249
250static rtx *reg_last_set_value;
251
252/* Record the value of label_tick when the value for register n is placed in
253 reg_last_set_value[n]. */
254
568356af 255static int *reg_last_set_label;
230d793d
RS
256
257/* Record the value of label_tick when an expression involving register n
0f41302f 258 is placed in reg_last_set_value. */
230d793d 259
568356af 260static int *reg_last_set_table_tick;
230d793d
RS
261
262/* Set non-zero if references to register n in expressions should not be
263 used. */
264
265static char *reg_last_set_invalid;
266
0f41302f 267/* Incremented for each label. */
230d793d 268
568356af 269static int label_tick;
230d793d
RS
270
271/* Some registers that are set more than once and used in more than one
272 basic block are nevertheless always set in similar ways. For example,
273 a QImode register may be loaded from memory in two places on a machine
274 where byte loads zero extend.
275
951553af 276 We record in the following array what we know about the nonzero
230d793d
RS
277 bits of a register, specifically which bits are known to be zero.
278
279 If an entry is zero, it means that we don't know anything special. */
280
55310dad 281static unsigned HOST_WIDE_INT *reg_nonzero_bits;
230d793d 282
951553af 283/* Mode used to compute significance in reg_nonzero_bits. It is the largest
5f4f0e22 284 integer mode that can fit in HOST_BITS_PER_WIDE_INT. */
230d793d 285
951553af 286static enum machine_mode nonzero_bits_mode;
230d793d 287
d0ab8cd3
RK
288/* Nonzero if we know that a register has some leading bits that are always
289 equal to the sign bit. */
290
770ae6cc 291static unsigned char *reg_sign_bit_copies;
d0ab8cd3 292
951553af 293/* Nonzero when reg_nonzero_bits and reg_sign_bit_copies can be safely used.
1a26b032
RK
294 It is zero while computing them and after combine has completed. This
295 former test prevents propagating values based on previously set values,
296 which can be incorrect if a variable is modified in a loop. */
230d793d 297
951553af 298static int nonzero_sign_valid;
55310dad
RK
299
300/* These arrays are maintained in parallel with reg_last_set_value
301 and are used to store the mode in which the register was last set,
302 the bits that were known to be zero when it was last set, and the
303 number of sign bits copies it was known to have when it was last set. */
304
305static enum machine_mode *reg_last_set_mode;
306static unsigned HOST_WIDE_INT *reg_last_set_nonzero_bits;
307static char *reg_last_set_sign_bit_copies;
230d793d
RS
308\f
309/* Record one modification to rtl structure
310 to be undone by storing old_contents into *where.
311 is_int is 1 if the contents are an int. */
312
313struct undo
314{
241cea85 315 struct undo *next;
230d793d 316 int is_int;
0345195a
RK
317 union {rtx r; unsigned int i;} old_contents;
318 union {rtx *r; unsigned int *i;} where;
230d793d
RS
319};
320
321/* Record a bunch of changes to be undone, up to MAX_UNDO of them.
322 num_undo says how many are currently recorded.
323
230d793d 324 other_insn is nonzero if we have modified some other insn in the process
f1c6ba8b 325 of working on subst_insn. It must be verified too. */
230d793d
RS
326
327struct undobuf
328{
241cea85
RK
329 struct undo *undos;
330 struct undo *frees;
230d793d
RS
331 rtx other_insn;
332};
333
334static struct undobuf undobuf;
335
230d793d
RS
336/* Number of times the pseudo being substituted for
337 was found and replaced. */
338
339static int n_occurrences;
340
83d2b3b9 341static void do_SUBST PARAMS ((rtx *, rtx));
0345195a
RK
342static void do_SUBST_INT PARAMS ((unsigned int *,
343 unsigned int));
83d2b3b9
KG
344static void init_reg_last_arrays PARAMS ((void));
345static void setup_incoming_promotions PARAMS ((void));
346static void set_nonzero_bits_and_sign_copies PARAMS ((rtx, rtx, void *));
c3410241 347static int cant_combine_insn_p PARAMS ((rtx));
83d2b3b9
KG
348static int can_combine_p PARAMS ((rtx, rtx, rtx, rtx, rtx *, rtx *));
349static int sets_function_arg_p PARAMS ((rtx));
350static int combinable_i3pat PARAMS ((rtx, rtx *, rtx, rtx, int, rtx *));
351static int contains_muldiv PARAMS ((rtx));
44a76fc8 352static rtx try_combine PARAMS ((rtx, rtx, rtx, int *));
83d2b3b9
KG
353static void undo_all PARAMS ((void));
354static void undo_commit PARAMS ((void));
355static rtx *find_split_point PARAMS ((rtx *, rtx));
356static rtx subst PARAMS ((rtx, rtx, rtx, int, int));
357static rtx combine_simplify_rtx PARAMS ((rtx, enum machine_mode, int, int));
358static rtx simplify_if_then_else PARAMS ((rtx));
359static rtx simplify_set PARAMS ((rtx));
360static rtx simplify_logical PARAMS ((rtx, int));
361static rtx expand_compound_operation PARAMS ((rtx));
362static rtx expand_field_assignment PARAMS ((rtx));
770ae6cc
RK
363static rtx make_extraction PARAMS ((enum machine_mode, rtx, HOST_WIDE_INT,
364 rtx, unsigned HOST_WIDE_INT, int,
365 int, int));
83d2b3b9
KG
366static rtx extract_left_shift PARAMS ((rtx, int));
367static rtx make_compound_operation PARAMS ((rtx, enum rtx_code));
770ae6cc
RK
368static int get_pos_from_mask PARAMS ((unsigned HOST_WIDE_INT,
369 unsigned HOST_WIDE_INT *));
83d2b3b9
KG
370static rtx force_to_mode PARAMS ((rtx, enum machine_mode,
371 unsigned HOST_WIDE_INT, rtx, int));
372static rtx if_then_else_cond PARAMS ((rtx, rtx *, rtx *));
373static rtx known_cond PARAMS ((rtx, enum rtx_code, rtx, rtx));
374static int rtx_equal_for_field_assignment_p PARAMS ((rtx, rtx));
375static rtx make_field_assignment PARAMS ((rtx));
376static rtx apply_distributive_law PARAMS ((rtx));
377static rtx simplify_and_const_int PARAMS ((rtx, enum machine_mode, rtx,
378 unsigned HOST_WIDE_INT));
379static unsigned HOST_WIDE_INT nonzero_bits PARAMS ((rtx, enum machine_mode));
770ae6cc 380static unsigned int num_sign_bit_copies PARAMS ((rtx, enum machine_mode));
83d2b3b9
KG
381static int merge_outer_ops PARAMS ((enum rtx_code *, HOST_WIDE_INT *,
382 enum rtx_code, HOST_WIDE_INT,
383 enum machine_mode, int *));
384static rtx simplify_shift_const PARAMS ((rtx, enum rtx_code, enum machine_mode,
385 rtx, int));
386static int recog_for_combine PARAMS ((rtx *, rtx, rtx *));
387static rtx gen_lowpart_for_combine PARAMS ((enum machine_mode, rtx));
83d2b3b9
KG
388static rtx gen_binary PARAMS ((enum rtx_code, enum machine_mode,
389 rtx, rtx));
83d2b3b9 390static enum rtx_code simplify_comparison PARAMS ((enum rtx_code, rtx *, rtx *));
83d2b3b9
KG
391static void update_table_tick PARAMS ((rtx));
392static void record_value_for_reg PARAMS ((rtx, rtx, rtx));
393static void check_promoted_subreg PARAMS ((rtx, rtx));
394static void record_dead_and_set_regs_1 PARAMS ((rtx, rtx, void *));
395static void record_dead_and_set_regs PARAMS ((rtx));
396static int get_last_value_validate PARAMS ((rtx *, rtx, int, int));
397static rtx get_last_value PARAMS ((rtx));
398static int use_crosses_set_p PARAMS ((rtx, int));
399static void reg_dead_at_p_1 PARAMS ((rtx, rtx, void *));
400static int reg_dead_at_p PARAMS ((rtx, rtx));
401static void move_deaths PARAMS ((rtx, rtx, int, rtx, rtx *));
402static int reg_bitfield_target_p PARAMS ((rtx, rtx));
403static void distribute_notes PARAMS ((rtx, rtx, rtx, rtx, rtx, rtx));
404static void distribute_links PARAMS ((rtx));
405static void mark_used_regs_combine PARAMS ((rtx));
406static int insn_cuid PARAMS ((rtx));
c6991660 407static void record_promoted_value PARAMS ((rtx, rtx));
9a915772
JH
408static rtx reversed_comparison PARAMS ((rtx, enum machine_mode, rtx, rtx));
409static enum rtx_code combine_reversed_comparison_code PARAMS ((rtx));
230d793d 410\f
76095e2f
RH
411/* Substitute NEWVAL, an rtx expression, into INTO, a place in some
412 insn. The substitution can be undone by undo_all. If INTO is already
413 set to NEWVAL, do not record this change. Because computing NEWVAL might
414 also call SUBST, we have to compute it before we put anything into
415 the undo table. */
416
417static void
663522cb 418do_SUBST (into, newval)
76095e2f
RH
419 rtx *into, newval;
420{
421 struct undo *buf;
422 rtx oldval = *into;
423
424 if (oldval == newval)
425 return;
426
427 if (undobuf.frees)
428 buf = undobuf.frees, undobuf.frees = buf->next;
429 else
430 buf = (struct undo *) xmalloc (sizeof (struct undo));
431
432 buf->is_int = 0;
433 buf->where.r = into;
434 buf->old_contents.r = oldval;
435 *into = newval;
436
437 buf->next = undobuf.undos, undobuf.undos = buf;
438}
439
440#define SUBST(INTO, NEWVAL) do_SUBST(&(INTO), (NEWVAL))
441
442/* Similar to SUBST, but NEWVAL is an int expression. Note that substitution
443 for the value of a HOST_WIDE_INT value (including CONST_INT) is
444 not safe. */
445
446static void
663522cb 447do_SUBST_INT (into, newval)
0345195a 448 unsigned int *into, newval;
76095e2f
RH
449{
450 struct undo *buf;
0345195a 451 unsigned int oldval = *into;
76095e2f
RH
452
453 if (oldval == newval)
454 return;
455
456 if (undobuf.frees)
457 buf = undobuf.frees, undobuf.frees = buf->next;
458 else
459 buf = (struct undo *) xmalloc (sizeof (struct undo));
460
461 buf->is_int = 1;
462 buf->where.i = into;
463 buf->old_contents.i = oldval;
464 *into = newval;
465
466 buf->next = undobuf.undos, undobuf.undos = buf;
467}
468
469#define SUBST_INT(INTO, NEWVAL) do_SUBST_INT(&(INTO), (NEWVAL))
470\f
230d793d 471/* Main entry point for combiner. F is the first insn of the function.
663522cb 472 NREGS is the first unused pseudo-reg number.
230d793d 473
44a76fc8
AG
474 Return non-zero if the combiner has turned an indirect jump
475 instruction into a direct jump. */
476int
230d793d
RS
477combine_instructions (f, nregs)
478 rtx f;
770ae6cc 479 unsigned int nregs;
230d793d 480{
b729186a
JL
481 register rtx insn, next;
482#ifdef HAVE_cc0
483 register rtx prev;
484#endif
230d793d
RS
485 register int i;
486 register rtx links, nextlinks;
487
44a76fc8
AG
488 int new_direct_jump_p = 0;
489
230d793d
RS
490 combine_attempts = 0;
491 combine_merges = 0;
492 combine_extras = 0;
493 combine_successes = 0;
494
495 combine_max_regno = nregs;
496
663522cb 497 reg_nonzero_bits = ((unsigned HOST_WIDE_INT *)
c05ddfa7 498 xcalloc (nregs, sizeof (unsigned HOST_WIDE_INT)));
770ae6cc
RK
499 reg_sign_bit_copies
500 = (unsigned char *) xcalloc (nregs, sizeof (unsigned char));
c05ddfa7
MM
501
502 reg_last_death = (rtx *) xmalloc (nregs * sizeof (rtx));
503 reg_last_set = (rtx *) xmalloc (nregs * sizeof (rtx));
504 reg_last_set_value = (rtx *) xmalloc (nregs * sizeof (rtx));
505 reg_last_set_table_tick = (int *) xmalloc (nregs * sizeof (int));
506 reg_last_set_label = (int *) xmalloc (nregs * sizeof (int));
507 reg_last_set_invalid = (char *) xmalloc (nregs * sizeof (char));
55310dad 508 reg_last_set_mode
c05ddfa7 509 = (enum machine_mode *) xmalloc (nregs * sizeof (enum machine_mode));
55310dad 510 reg_last_set_nonzero_bits
c05ddfa7 511 = (unsigned HOST_WIDE_INT *) xmalloc (nregs * sizeof (HOST_WIDE_INT));
55310dad 512 reg_last_set_sign_bit_copies
c05ddfa7 513 = (char *) xmalloc (nregs * sizeof (char));
55310dad 514
ef026f91 515 init_reg_last_arrays ();
230d793d
RS
516
517 init_recog_no_volatile ();
518
519 /* Compute maximum uid value so uid_cuid can be allocated. */
520
521 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
522 if (INSN_UID (insn) > i)
523 i = INSN_UID (insn);
524
c05ddfa7 525 uid_cuid = (int *) xmalloc ((i + 1) * sizeof (int));
4255220d 526 max_uid_cuid = i;
230d793d 527
951553af 528 nonzero_bits_mode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
230d793d 529
951553af 530 /* Don't use reg_nonzero_bits when computing it. This can cause problems
230d793d
RS
531 when, for example, we have j <<= 1 in a loop. */
532
951553af 533 nonzero_sign_valid = 0;
230d793d
RS
534
535 /* Compute the mapping from uids to cuids.
536 Cuids are numbers assigned to insns, like uids,
663522cb 537 except that cuids increase monotonically through the code.
230d793d
RS
538
539 Scan all SETs and see if we can deduce anything about what
951553af 540 bits are known to be zero for some registers and how many copies
d79f08e0
RK
541 of the sign bit are known to exist for those registers.
542
543 Also set any known values so that we can use it while searching
544 for what bits are known to be set. */
545
546 label_tick = 1;
230d793d 547
bcd49eb7
JW
548 /* We need to initialize it here, because record_dead_and_set_regs may call
549 get_last_value. */
550 subst_prev_insn = NULL_RTX;
551
7988fd36
RK
552 setup_incoming_promotions ();
553
715e7fbc
RH
554 refresh_blocks = sbitmap_alloc (n_basic_blocks);
555 sbitmap_zero (refresh_blocks);
556 need_refresh = 0;
557
230d793d
RS
558 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
559 {
4255220d 560 uid_cuid[INSN_UID (insn)] = ++i;
d79f08e0
RK
561 subst_low_cuid = i;
562 subst_insn = insn;
563
2c3c49de 564 if (INSN_P (insn))
d79f08e0 565 {
663522cb 566 note_stores (PATTERN (insn), set_nonzero_bits_and_sign_copies,
84832317 567 NULL);
d79f08e0 568 record_dead_and_set_regs (insn);
2dab894a
RK
569
570#ifdef AUTO_INC_DEC
571 for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
572 if (REG_NOTE_KIND (links) == REG_INC)
84832317
MM
573 set_nonzero_bits_and_sign_copies (XEXP (links, 0), NULL_RTX,
574 NULL);
2dab894a 575#endif
d79f08e0
RK
576 }
577
578 if (GET_CODE (insn) == CODE_LABEL)
579 label_tick++;
230d793d
RS
580 }
581
951553af 582 nonzero_sign_valid = 1;
230d793d
RS
583
584 /* Now scan all the insns in forward order. */
585
0d4d42c3 586 this_basic_block = -1;
230d793d
RS
587 label_tick = 1;
588 last_call_cuid = 0;
589 mem_last_set = 0;
ef026f91 590 init_reg_last_arrays ();
7988fd36
RK
591 setup_incoming_promotions ();
592
230d793d
RS
593 for (insn = f; insn; insn = next ? next : NEXT_INSN (insn))
594 {
595 next = 0;
596
0d4d42c3 597 /* If INSN starts a new basic block, update our basic block number. */
f085c9cd 598 if (this_basic_block + 1 < n_basic_blocks
3b413743 599 && BLOCK_HEAD (this_basic_block + 1) == insn)
0d4d42c3
RK
600 this_basic_block++;
601
230d793d
RS
602 if (GET_CODE (insn) == CODE_LABEL)
603 label_tick++;
604
2c3c49de 605 else if (INSN_P (insn))
230d793d 606 {
732f2ac9
JJ
607 /* See if we know about function return values before this
608 insn based upon SUBREG flags. */
609 check_promoted_subreg (insn, PATTERN (insn));
732f2ac9 610
230d793d
RS
611 /* Try this insn with each insn it links back to. */
612
613 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
663522cb 614 if ((next = try_combine (insn, XEXP (links, 0),
44a76fc8 615 NULL_RTX, &new_direct_jump_p)) != 0)
230d793d
RS
616 goto retry;
617
618 /* Try each sequence of three linked insns ending with this one. */
619
620 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
aabb6c74
NC
621 {
622 rtx link = XEXP (links, 0);
623
624 /* If the linked insn has been replaced by a note, then there
625 is no point in persuing this chain any further. */
626 if (GET_CODE (link) == NOTE)
627 break;
628
629 for (nextlinks = LOG_LINKS (link);
630 nextlinks;
631 nextlinks = XEXP (nextlinks, 1))
632 if ((next = try_combine (insn, XEXP (links, 0),
865f50c5
RH
633 XEXP (nextlinks, 0),
634 &new_direct_jump_p)) != 0)
aabb6c74
NC
635 goto retry;
636 }
230d793d
RS
637
638#ifdef HAVE_cc0
639 /* Try to combine a jump insn that uses CC0
640 with a preceding insn that sets CC0, and maybe with its
641 logical predecessor as well.
642 This is how we make decrement-and-branch insns.
643 We need this special code because data flow connections
644 via CC0 do not get entered in LOG_LINKS. */
645
646 if (GET_CODE (insn) == JUMP_INSN
647 && (prev = prev_nonnote_insn (insn)) != 0
648 && GET_CODE (prev) == INSN
649 && sets_cc0_p (PATTERN (prev)))
650 {
663522cb 651 if ((next = try_combine (insn, prev,
44a76fc8 652 NULL_RTX, &new_direct_jump_p)) != 0)
230d793d
RS
653 goto retry;
654
655 for (nextlinks = LOG_LINKS (prev); nextlinks;
656 nextlinks = XEXP (nextlinks, 1))
657 if ((next = try_combine (insn, prev,
44a76fc8
AG
658 XEXP (nextlinks, 0),
659 &new_direct_jump_p)) != 0)
230d793d
RS
660 goto retry;
661 }
662
663 /* Do the same for an insn that explicitly references CC0. */
664 if (GET_CODE (insn) == INSN
665 && (prev = prev_nonnote_insn (insn)) != 0
666 && GET_CODE (prev) == INSN
667 && sets_cc0_p (PATTERN (prev))
668 && GET_CODE (PATTERN (insn)) == SET
669 && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (insn))))
670 {
663522cb 671 if ((next = try_combine (insn, prev,
44a76fc8 672 NULL_RTX, &new_direct_jump_p)) != 0)
230d793d
RS
673 goto retry;
674
675 for (nextlinks = LOG_LINKS (prev); nextlinks;
676 nextlinks = XEXP (nextlinks, 1))
677 if ((next = try_combine (insn, prev,
44a76fc8
AG
678 XEXP (nextlinks, 0),
679 &new_direct_jump_p)) != 0)
230d793d
RS
680 goto retry;
681 }
682
683 /* Finally, see if any of the insns that this insn links to
684 explicitly references CC0. If so, try this insn, that insn,
5089e22e 685 and its predecessor if it sets CC0. */
230d793d
RS
686 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
687 if (GET_CODE (XEXP (links, 0)) == INSN
688 && GET_CODE (PATTERN (XEXP (links, 0))) == SET
689 && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (XEXP (links, 0))))
690 && (prev = prev_nonnote_insn (XEXP (links, 0))) != 0
691 && GET_CODE (prev) == INSN
692 && sets_cc0_p (PATTERN (prev))
663522cb 693 && (next = try_combine (insn, XEXP (links, 0),
44a76fc8 694 prev, &new_direct_jump_p)) != 0)
230d793d
RS
695 goto retry;
696#endif
697
698 /* Try combining an insn with two different insns whose results it
699 uses. */
700 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
701 for (nextlinks = XEXP (links, 1); nextlinks;
702 nextlinks = XEXP (nextlinks, 1))
703 if ((next = try_combine (insn, XEXP (links, 0),
44a76fc8
AG
704 XEXP (nextlinks, 0),
705 &new_direct_jump_p)) != 0)
230d793d
RS
706 goto retry;
707
708 if (GET_CODE (insn) != NOTE)
709 record_dead_and_set_regs (insn);
710
711 retry:
712 ;
713 }
714 }
715
715e7fbc 716 if (need_refresh)
49c3bb12
RH
717 {
718 compute_bb_for_insn (get_max_uid ());
719 update_life_info (refresh_blocks, UPDATE_LIFE_GLOBAL_RM_NOTES,
663522cb 720 PROP_DEATH_NOTES);
49c3bb12 721 }
c05ddfa7
MM
722
723 /* Clean up. */
715e7fbc 724 sbitmap_free (refresh_blocks);
c05ddfa7
MM
725 free (reg_nonzero_bits);
726 free (reg_sign_bit_copies);
727 free (reg_last_death);
728 free (reg_last_set);
729 free (reg_last_set_value);
730 free (reg_last_set_table_tick);
731 free (reg_last_set_label);
732 free (reg_last_set_invalid);
733 free (reg_last_set_mode);
734 free (reg_last_set_nonzero_bits);
735 free (reg_last_set_sign_bit_copies);
736 free (uid_cuid);
715e7fbc 737
e7749837
RH
738 {
739 struct undo *undo, *next;
740 for (undo = undobuf.frees; undo; undo = next)
741 {
742 next = undo->next;
743 free (undo);
744 }
745 undobuf.frees = 0;
746 }
747
230d793d
RS
748 total_attempts += combine_attempts;
749 total_merges += combine_merges;
750 total_extras += combine_extras;
751 total_successes += combine_successes;
1a26b032 752
951553af 753 nonzero_sign_valid = 0;
972b320c
R
754
755 /* Make recognizer allow volatile MEMs again. */
756 init_recog ();
44a76fc8
AG
757
758 return new_direct_jump_p;
230d793d 759}
ef026f91
RS
760
761/* Wipe the reg_last_xxx arrays in preparation for another pass. */
762
763static void
764init_reg_last_arrays ()
765{
770ae6cc 766 unsigned int nregs = combine_max_regno;
ef026f91 767
961192e1
JM
768 memset ((char *) reg_last_death, 0, nregs * sizeof (rtx));
769 memset ((char *) reg_last_set, 0, nregs * sizeof (rtx));
770 memset ((char *) reg_last_set_value, 0, nregs * sizeof (rtx));
771 memset ((char *) reg_last_set_table_tick, 0, nregs * sizeof (int));
772 memset ((char *) reg_last_set_label, 0, nregs * sizeof (int));
773 memset (reg_last_set_invalid, 0, nregs * sizeof (char));
774 memset ((char *) reg_last_set_mode, 0, nregs * sizeof (enum machine_mode));
775 memset ((char *) reg_last_set_nonzero_bits, 0, nregs * sizeof (HOST_WIDE_INT));
776 memset (reg_last_set_sign_bit_copies, 0, nregs * sizeof (char));
ef026f91 777}
230d793d 778\f
7988fd36
RK
779/* Set up any promoted values for incoming argument registers. */
780
ee791cc3 781static void
7988fd36
RK
782setup_incoming_promotions ()
783{
784#ifdef PROMOTE_FUNCTION_ARGS
770ae6cc 785 unsigned int regno;
7988fd36
RK
786 rtx reg;
787 enum machine_mode mode;
788 int unsignedp;
789 rtx first = get_insns ();
790
c285f57a
JJ
791#ifndef OUTGOING_REGNO
792#define OUTGOING_REGNO(N) N
793#endif
7988fd36 794 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
c285f57a
JJ
795 /* Check whether this register can hold an incoming pointer
796 argument. FUNCTION_ARG_REGNO_P tests outgoing register
797 numbers, so translate if necessary due to register windows. */
798 if (FUNCTION_ARG_REGNO_P (OUTGOING_REGNO (regno))
7988fd36 799 && (reg = promoted_input_arg (regno, &mode, &unsignedp)) != 0)
38a448ca
RH
800 {
801 record_value_for_reg
802 (reg, first, gen_rtx_fmt_e ((unsignedp ? ZERO_EXTEND
803 : SIGN_EXTEND),
804 GET_MODE (reg),
805 gen_rtx_CLOBBER (mode, const0_rtx)));
806 }
7988fd36
RK
807#endif
808}
809\f
91102d5a
RK
810/* Called via note_stores. If X is a pseudo that is narrower than
811 HOST_BITS_PER_WIDE_INT and is being set, record what bits are known zero.
230d793d
RS
812
813 If we are setting only a portion of X and we can't figure out what
814 portion, assume all bits will be used since we don't know what will
d0ab8cd3
RK
815 be happening.
816
817 Similarly, set how many bits of X are known to be copies of the sign bit
663522cb 818 at all locations in the function. This is the smallest number implied
d0ab8cd3 819 by any set of X. */
230d793d
RS
820
821static void
84832317 822set_nonzero_bits_and_sign_copies (x, set, data)
230d793d
RS
823 rtx x;
824 rtx set;
84832317 825 void *data ATTRIBUTE_UNUSED;
230d793d 826{
770ae6cc 827 unsigned int num;
d0ab8cd3 828
230d793d
RS
829 if (GET_CODE (x) == REG
830 && REGNO (x) >= FIRST_PSEUDO_REGISTER
e8095e80
RK
831 /* If this register is undefined at the start of the file, we can't
832 say what its contents were. */
e881bb1b 833 && ! REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, REGNO (x))
5f4f0e22 834 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
230d793d 835 {
2dab894a 836 if (set == 0 || GET_CODE (set) == CLOBBER)
e8095e80
RK
837 {
838 reg_nonzero_bits[REGNO (x)] = GET_MODE_MASK (GET_MODE (x));
88306d12 839 reg_sign_bit_copies[REGNO (x)] = 1;
e8095e80
RK
840 return;
841 }
230d793d
RS
842
843 /* If this is a complex assignment, see if we can convert it into a
5089e22e 844 simple assignment. */
230d793d 845 set = expand_field_assignment (set);
d79f08e0
RK
846
847 /* If this is a simple assignment, or we have a paradoxical SUBREG,
848 set what we know about X. */
849
850 if (SET_DEST (set) == x
851 || (GET_CODE (SET_DEST (set)) == SUBREG
705c7b3b
JW
852 && (GET_MODE_SIZE (GET_MODE (SET_DEST (set)))
853 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (set)))))
d79f08e0 854 && SUBREG_REG (SET_DEST (set)) == x))
d0ab8cd3 855 {
9afa3d54
RK
856 rtx src = SET_SRC (set);
857
858#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
859 /* If X is narrower than a word and SRC is a non-negative
860 constant that would appear negative in the mode of X,
861 sign-extend it for use in reg_nonzero_bits because some
862 machines (maybe most) will actually do the sign-extension
663522cb 863 and this is the conservative approach.
9afa3d54
RK
864
865 ??? For 2.5, try to tighten up the MD files in this regard
866 instead of this kludge. */
867
868 if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
869 && GET_CODE (src) == CONST_INT
870 && INTVAL (src) > 0
871 && 0 != (INTVAL (src)
872 & ((HOST_WIDE_INT) 1
9e69be8c 873 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
9afa3d54
RK
874 src = GEN_INT (INTVAL (src)
875 | ((HOST_WIDE_INT) (-1)
876 << GET_MODE_BITSIZE (GET_MODE (x))));
877#endif
878
951553af 879 reg_nonzero_bits[REGNO (x)]
9afa3d54 880 |= nonzero_bits (src, nonzero_bits_mode);
d0ab8cd3
RK
881 num = num_sign_bit_copies (SET_SRC (set), GET_MODE (x));
882 if (reg_sign_bit_copies[REGNO (x)] == 0
883 || reg_sign_bit_copies[REGNO (x)] > num)
884 reg_sign_bit_copies[REGNO (x)] = num;
885 }
230d793d 886 else
d0ab8cd3 887 {
951553af 888 reg_nonzero_bits[REGNO (x)] = GET_MODE_MASK (GET_MODE (x));
88306d12 889 reg_sign_bit_copies[REGNO (x)] = 1;
d0ab8cd3 890 }
230d793d
RS
891 }
892}
893\f
894/* See if INSN can be combined into I3. PRED and SUCC are optionally
895 insns that were previously combined into I3 or that will be combined
896 into the merger of INSN and I3.
897
898 Return 0 if the combination is not allowed for any reason.
899
663522cb 900 If the combination is allowed, *PDEST will be set to the single
230d793d
RS
901 destination of INSN and *PSRC to the single source, and this function
902 will return 1. */
903
904static int
905can_combine_p (insn, i3, pred, succ, pdest, psrc)
906 rtx insn;
907 rtx i3;
e51712db
KG
908 rtx pred ATTRIBUTE_UNUSED;
909 rtx succ;
230d793d
RS
910 rtx *pdest, *psrc;
911{
912 int i;
913 rtx set = 0, src, dest;
b729186a
JL
914 rtx p;
915#ifdef AUTO_INC_DEC
76d31c63 916 rtx link;
b729186a 917#endif
230d793d
RS
918 int all_adjacent = (succ ? (next_active_insn (insn) == succ
919 && next_active_insn (succ) == i3)
920 : next_active_insn (insn) == i3);
921
922 /* Can combine only if previous insn is a SET of a REG, a SUBREG or CC0.
663522cb 923 or a PARALLEL consisting of such a SET and CLOBBERs.
230d793d
RS
924
925 If INSN has CLOBBER parallel parts, ignore them for our processing.
926 By definition, these happen during the execution of the insn. When it
927 is merged with another insn, all bets are off. If they are, in fact,
928 needed and aren't also supplied in I3, they may be added by
663522cb 929 recog_for_combine. Otherwise, it won't match.
230d793d
RS
930
931 We can also ignore a SET whose SET_DEST is mentioned in a REG_UNUSED
932 note.
933
663522cb 934 Get the source and destination of INSN. If more than one, can't
230d793d 935 combine. */
663522cb 936
230d793d
RS
937 if (GET_CODE (PATTERN (insn)) == SET)
938 set = PATTERN (insn);
939 else if (GET_CODE (PATTERN (insn)) == PARALLEL
940 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
941 {
942 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
943 {
944 rtx elt = XVECEXP (PATTERN (insn), 0, i);
945
946 switch (GET_CODE (elt))
947 {
e3258cef
R
948 /* This is important to combine floating point insns
949 for the SH4 port. */
950 case USE:
951 /* Combining an isolated USE doesn't make sense.
952 We depend here on combinable_i3_pat to reject them. */
953 /* The code below this loop only verifies that the inputs of
954 the SET in INSN do not change. We call reg_set_between_p
955 to verify that the REG in the USE does not change betweeen
956 I3 and INSN.
957 If the USE in INSN was for a pseudo register, the matching
958 insn pattern will likely match any register; combining this
959 with any other USE would only be safe if we knew that the
960 used registers have identical values, or if there was
961 something to tell them apart, e.g. different modes. For
962 now, we forgo such compilcated tests and simply disallow
963 combining of USES of pseudo registers with any other USE. */
964 if (GET_CODE (XEXP (elt, 0)) == REG
965 && GET_CODE (PATTERN (i3)) == PARALLEL)
966 {
967 rtx i3pat = PATTERN (i3);
968 int i = XVECLEN (i3pat, 0) - 1;
770ae6cc
RK
969 unsigned int regno = REGNO (XEXP (elt, 0));
970
e3258cef
R
971 do
972 {
973 rtx i3elt = XVECEXP (i3pat, 0, i);
770ae6cc 974
e3258cef
R
975 if (GET_CODE (i3elt) == USE
976 && GET_CODE (XEXP (i3elt, 0)) == REG
977 && (REGNO (XEXP (i3elt, 0)) == regno
978 ? reg_set_between_p (XEXP (elt, 0),
979 PREV_INSN (insn), i3)
980 : regno >= FIRST_PSEUDO_REGISTER))
981 return 0;
982 }
983 while (--i >= 0);
984 }
985 break;
986
230d793d
RS
987 /* We can ignore CLOBBERs. */
988 case CLOBBER:
989 break;
990
991 case SET:
992 /* Ignore SETs whose result isn't used but not those that
993 have side-effects. */
994 if (find_reg_note (insn, REG_UNUSED, SET_DEST (elt))
995 && ! side_effects_p (elt))
996 break;
997
998 /* If we have already found a SET, this is a second one and
999 so we cannot combine with this insn. */
1000 if (set)
1001 return 0;
1002
1003 set = elt;
1004 break;
1005
1006 default:
1007 /* Anything else means we can't combine. */
1008 return 0;
1009 }
1010 }
1011
1012 if (set == 0
1013 /* If SET_SRC is an ASM_OPERANDS we can't throw away these CLOBBERs,
1014 so don't do anything with it. */
1015 || GET_CODE (SET_SRC (set)) == ASM_OPERANDS)
1016 return 0;
1017 }
1018 else
1019 return 0;
1020
1021 if (set == 0)
1022 return 0;
1023
1024 set = expand_field_assignment (set);
1025 src = SET_SRC (set), dest = SET_DEST (set);
1026
1027 /* Don't eliminate a store in the stack pointer. */
1028 if (dest == stack_pointer_rtx
230d793d
RS
1029 /* If we couldn't eliminate a field assignment, we can't combine. */
1030 || GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == STRICT_LOW_PART
1031 /* Don't combine with an insn that sets a register to itself if it has
1032 a REG_EQUAL note. This may be part of a REG_NO_CONFLICT sequence. */
5f4f0e22 1033 || (rtx_equal_p (src, dest) && find_reg_note (insn, REG_EQUAL, NULL_RTX))
62f7f1f5
GK
1034 /* Can't merge an ASM_OPERANDS. */
1035 || GET_CODE (src) == ASM_OPERANDS
230d793d
RS
1036 /* Can't merge a function call. */
1037 || GET_CODE (src) == CALL
cd5e8f1f 1038 /* Don't eliminate a function call argument. */
4dca5ec5
RK
1039 || (GET_CODE (i3) == CALL_INSN
1040 && (find_reg_fusage (i3, USE, dest)
1041 || (GET_CODE (dest) == REG
1042 && REGNO (dest) < FIRST_PSEUDO_REGISTER
1043 && global_regs[REGNO (dest)])))
230d793d
RS
1044 /* Don't substitute into an incremented register. */
1045 || FIND_REG_INC_NOTE (i3, dest)
1046 || (succ && FIND_REG_INC_NOTE (succ, dest))
ec35104c 1047#if 0
230d793d 1048 /* Don't combine the end of a libcall into anything. */
ec35104c
JL
1049 /* ??? This gives worse code, and appears to be unnecessary, since no
1050 pass after flow uses REG_LIBCALL/REG_RETVAL notes. Local-alloc does
1051 use REG_RETVAL notes for noconflict blocks, but other code here
1052 makes sure that those insns don't disappear. */
5f4f0e22 1053 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
ec35104c 1054#endif
230d793d
RS
1055 /* Make sure that DEST is not used after SUCC but before I3. */
1056 || (succ && ! all_adjacent
1057 && reg_used_between_p (dest, succ, i3))
1058 /* Make sure that the value that is to be substituted for the register
1059 does not use any registers whose values alter in between. However,
1060 If the insns are adjacent, a use can't cross a set even though we
1061 think it might (this can happen for a sequence of insns each setting
1062 the same destination; reg_last_set of that register might point to
d81481d3
RK
1063 a NOTE). If INSN has a REG_EQUIV note, the register is always
1064 equivalent to the memory so the substitution is valid even if there
1065 are intervening stores. Also, don't move a volatile asm or
1066 UNSPEC_VOLATILE across any other insns. */
230d793d 1067 || (! all_adjacent
d81481d3
RK
1068 && (((GET_CODE (src) != MEM
1069 || ! find_reg_note (insn, REG_EQUIV, src))
1070 && use_crosses_set_p (src, INSN_CUID (insn)))
a66a10c7
RS
1071 || (GET_CODE (src) == ASM_OPERANDS && MEM_VOLATILE_P (src))
1072 || GET_CODE (src) == UNSPEC_VOLATILE))
230d793d
RS
1073 /* If there is a REG_NO_CONFLICT note for DEST in I3 or SUCC, we get
1074 better register allocation by not doing the combine. */
1075 || find_reg_note (i3, REG_NO_CONFLICT, dest)
1076 || (succ && find_reg_note (succ, REG_NO_CONFLICT, dest))
1077 /* Don't combine across a CALL_INSN, because that would possibly
1078 change whether the life span of some REGs crosses calls or not,
1079 and it is a pain to update that information.
1080 Exception: if source is a constant, moving it later can't hurt.
1081 Accept that special case, because it helps -fforce-addr a lot. */
1082 || (INSN_CUID (insn) < last_call_cuid && ! CONSTANT_P (src)))
1083 return 0;
1084
1085 /* DEST must either be a REG or CC0. */
1086 if (GET_CODE (dest) == REG)
1087 {
1088 /* If register alignment is being enforced for multi-word items in all
1089 cases except for parameters, it is possible to have a register copy
1090 insn referencing a hard register that is not allowed to contain the
1091 mode being copied and which would not be valid as an operand of most
1092 insns. Eliminate this problem by not combining with such an insn.
1093
1094 Also, on some machines we don't want to extend the life of a hard
53895717 1095 register. */
230d793d
RS
1096
1097 if (GET_CODE (src) == REG
1098 && ((REGNO (dest) < FIRST_PSEUDO_REGISTER
1099 && ! HARD_REGNO_MODE_OK (REGNO (dest), GET_MODE (dest)))
c448a43e
RK
1100 /* Don't extend the life of a hard register unless it is
1101 user variable (if we have few registers) or it can't
1102 fit into the desired register (meaning something special
ecd40809
RK
1103 is going on).
1104 Also avoid substituting a return register into I3, because
1105 reload can't handle a conflict with constraints of other
1106 inputs. */
230d793d 1107 || (REGNO (src) < FIRST_PSEUDO_REGISTER
53895717 1108 && ! HARD_REGNO_MODE_OK (REGNO (src), GET_MODE (src)))))
230d793d
RS
1109 return 0;
1110 }
1111 else if (GET_CODE (dest) != CC0)
1112 return 0;
1113
5f96750d
RS
1114 /* Don't substitute for a register intended as a clobberable operand.
1115 Similarly, don't substitute an expression containing a register that
1116 will be clobbered in I3. */
230d793d
RS
1117 if (GET_CODE (PATTERN (i3)) == PARALLEL)
1118 for (i = XVECLEN (PATTERN (i3), 0) - 1; i >= 0; i--)
1119 if (GET_CODE (XVECEXP (PATTERN (i3), 0, i)) == CLOBBER
5f96750d
RS
1120 && (reg_overlap_mentioned_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0),
1121 src)
1122 || rtx_equal_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0), dest)))
230d793d
RS
1123 return 0;
1124
1125 /* If INSN contains anything volatile, or is an `asm' (whether volatile
d276f2bb 1126 or not), reject, unless nothing volatile comes between it and I3 */
230d793d
RS
1127
1128 if (GET_CODE (src) == ASM_OPERANDS || volatile_refs_p (src))
d276f2bb
CM
1129 {
1130 /* Make sure succ doesn't contain a volatile reference. */
1131 if (succ != 0 && volatile_refs_p (PATTERN (succ)))
1132 return 0;
663522cb 1133
d276f2bb 1134 for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
2c3c49de 1135 if (INSN_P (p) && p != succ && volatile_refs_p (PATTERN (p)))
23190837 1136 return 0;
d276f2bb 1137 }
230d793d 1138
b79ee7eb
RH
1139 /* If INSN is an asm, and DEST is a hard register, reject, since it has
1140 to be an explicit register variable, and was chosen for a reason. */
1141
1142 if (GET_CODE (src) == ASM_OPERANDS
1143 && GET_CODE (dest) == REG && REGNO (dest) < FIRST_PSEUDO_REGISTER)
1144 return 0;
1145
4b2cb4a2
RS
1146 /* If there are any volatile insns between INSN and I3, reject, because
1147 they might affect machine state. */
1148
1149 for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
2c3c49de 1150 if (INSN_P (p) && p != succ && volatile_insn_p (PATTERN (p)))
4b2cb4a2
RS
1151 return 0;
1152
230d793d
RS
1153 /* If INSN or I2 contains an autoincrement or autodecrement,
1154 make sure that register is not used between there and I3,
1155 and not already used in I3 either.
1156 Also insist that I3 not be a jump; if it were one
1157 and the incremented register were spilled, we would lose. */
1158
1159#ifdef AUTO_INC_DEC
1160 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1161 if (REG_NOTE_KIND (link) == REG_INC
1162 && (GET_CODE (i3) == JUMP_INSN
1163 || reg_used_between_p (XEXP (link, 0), insn, i3)
1164 || reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i3))))
1165 return 0;
1166#endif
1167
1168#ifdef HAVE_cc0
1169 /* Don't combine an insn that follows a CC0-setting insn.
1170 An insn that uses CC0 must not be separated from the one that sets it.
1171 We do, however, allow I2 to follow a CC0-setting insn if that insn
1172 is passed as I1; in that case it will be deleted also.
1173 We also allow combining in this case if all the insns are adjacent
1174 because that would leave the two CC0 insns adjacent as well.
1175 It would be more logical to test whether CC0 occurs inside I1 or I2,
1176 but that would be much slower, and this ought to be equivalent. */
1177
1178 p = prev_nonnote_insn (insn);
1179 if (p && p != pred && GET_CODE (p) == INSN && sets_cc0_p (PATTERN (p))
1180 && ! all_adjacent)
1181 return 0;
1182#endif
1183
1184 /* If we get here, we have passed all the tests and the combination is
1185 to be allowed. */
1186
1187 *pdest = dest;
1188 *psrc = src;
1189
1190 return 1;
1191}
1192\f
956d6950
JL
1193/* Check if PAT is an insn - or a part of it - used to set up an
1194 argument for a function in a hard register. */
1195
1196static int
1197sets_function_arg_p (pat)
1198 rtx pat;
1199{
1200 int i;
1201 rtx inner_dest;
1202
1203 switch (GET_CODE (pat))
1204 {
1205 case INSN:
1206 return sets_function_arg_p (PATTERN (pat));
1207
1208 case PARALLEL:
1209 for (i = XVECLEN (pat, 0); --i >= 0;)
1210 if (sets_function_arg_p (XVECEXP (pat, 0, i)))
1211 return 1;
1212
1213 break;
1214
1215 case SET:
1216 inner_dest = SET_DEST (pat);
1217 while (GET_CODE (inner_dest) == STRICT_LOW_PART
1218 || GET_CODE (inner_dest) == SUBREG
1219 || GET_CODE (inner_dest) == ZERO_EXTRACT)
1220 inner_dest = XEXP (inner_dest, 0);
1221
1222 return (GET_CODE (inner_dest) == REG
1223 && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER
1224 && FUNCTION_ARG_REGNO_P (REGNO (inner_dest)));
1d300e19
KG
1225
1226 default:
1227 break;
956d6950
JL
1228 }
1229
1230 return 0;
1231}
1232
230d793d
RS
1233/* LOC is the location within I3 that contains its pattern or the component
1234 of a PARALLEL of the pattern. We validate that it is valid for combining.
1235
1236 One problem is if I3 modifies its output, as opposed to replacing it
1237 entirely, we can't allow the output to contain I2DEST or I1DEST as doing
1238 so would produce an insn that is not equivalent to the original insns.
1239
1240 Consider:
1241
1242 (set (reg:DI 101) (reg:DI 100))
1243 (set (subreg:SI (reg:DI 101) 0) <foo>)
1244
1245 This is NOT equivalent to:
1246
1247 (parallel [(set (subreg:SI (reg:DI 100) 0) <foo>)
23190837 1248 (set (reg:DI 101) (reg:DI 100))])
230d793d
RS
1249
1250 Not only does this modify 100 (in which case it might still be valid
663522cb 1251 if 100 were dead in I2), it sets 101 to the ORIGINAL value of 100.
230d793d
RS
1252
1253 We can also run into a problem if I2 sets a register that I1
1254 uses and I1 gets directly substituted into I3 (not via I2). In that
1255 case, we would be getting the wrong value of I2DEST into I3, so we
1256 must reject the combination. This case occurs when I2 and I1 both
1257 feed into I3, rather than when I1 feeds into I2, which feeds into I3.
1258 If I1_NOT_IN_SRC is non-zero, it means that finding I1 in the source
1259 of a SET must prevent combination from occurring.
1260
230d793d
RS
1261 Before doing the above check, we first try to expand a field assignment
1262 into a set of logical operations.
1263
1264 If PI3_DEST_KILLED is non-zero, it is a pointer to a location in which
1265 we place a register that is both set and used within I3. If more than one
1266 such register is detected, we fail.
1267
1268 Return 1 if the combination is valid, zero otherwise. */
1269
1270static int
1271combinable_i3pat (i3, loc, i2dest, i1dest, i1_not_in_src, pi3dest_killed)
1272 rtx i3;
1273 rtx *loc;
1274 rtx i2dest;
1275 rtx i1dest;
1276 int i1_not_in_src;
1277 rtx *pi3dest_killed;
1278{
1279 rtx x = *loc;
1280
1281 if (GET_CODE (x) == SET)
1282 {
1283 rtx set = expand_field_assignment (x);
1284 rtx dest = SET_DEST (set);
1285 rtx src = SET_SRC (set);
29a82058 1286 rtx inner_dest = dest;
663522cb 1287
29a82058
JL
1288#if 0
1289 rtx inner_src = src;
1290#endif
230d793d
RS
1291
1292 SUBST (*loc, set);
1293
1294 while (GET_CODE (inner_dest) == STRICT_LOW_PART
1295 || GET_CODE (inner_dest) == SUBREG
1296 || GET_CODE (inner_dest) == ZERO_EXTRACT)
1297 inner_dest = XEXP (inner_dest, 0);
1298
1299 /* We probably don't need this any more now that LIMIT_RELOAD_CLASS
1300 was added. */
1301#if 0
1302 while (GET_CODE (inner_src) == STRICT_LOW_PART
1303 || GET_CODE (inner_src) == SUBREG
1304 || GET_CODE (inner_src) == ZERO_EXTRACT)
1305 inner_src = XEXP (inner_src, 0);
1306
1307 /* If it is better that two different modes keep two different pseudos,
1308 avoid combining them. This avoids producing the following pattern
1309 on a 386:
1310 (set (subreg:SI (reg/v:QI 21) 0)
1311 (lshiftrt:SI (reg/v:SI 20)
1312 (const_int 24)))
1313 If that were made, reload could not handle the pair of
1314 reg 20/21, since it would try to get any GENERAL_REGS
1315 but some of them don't handle QImode. */
1316
1317 if (rtx_equal_p (inner_src, i2dest)
1318 && GET_CODE (inner_dest) == REG
1319 && ! MODES_TIEABLE_P (GET_MODE (i2dest), GET_MODE (inner_dest)))
1320 return 0;
1321#endif
1322
1323 /* Check for the case where I3 modifies its output, as
1324 discussed above. */
1325 if ((inner_dest != dest
1326 && (reg_overlap_mentioned_p (i2dest, inner_dest)
1327 || (i1dest && reg_overlap_mentioned_p (i1dest, inner_dest))))
956d6950 1328
53895717
BS
1329 /* This is the same test done in can_combine_p except we can't test
1330 all_adjacent; we don't have to, since this instruction will stay
1331 in place, thus we are not considering increasing the lifetime of
1332 INNER_DEST.
956d6950
JL
1333
1334 Also, if this insn sets a function argument, combining it with
1335 something that might need a spill could clobber a previous
1336 function argument; the all_adjacent test in can_combine_p also
1337 checks this; here, we do a more specific test for this case. */
663522cb 1338
230d793d 1339 || (GET_CODE (inner_dest) == REG
dfbe1b2f 1340 && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER
c448a43e 1341 && (! HARD_REGNO_MODE_OK (REGNO (inner_dest),
53895717 1342 GET_MODE (inner_dest))))
230d793d
RS
1343 || (i1_not_in_src && reg_overlap_mentioned_p (i1dest, src)))
1344 return 0;
1345
1346 /* If DEST is used in I3, it is being killed in this insn,
663522cb 1347 so record that for later.
36a9c2e9
JL
1348 Never add REG_DEAD notes for the FRAME_POINTER_REGNUM or the
1349 STACK_POINTER_REGNUM, since these are always considered to be
1350 live. Similarly for ARG_POINTER_REGNUM if it is fixed. */
230d793d 1351 if (pi3dest_killed && GET_CODE (dest) == REG
36a9c2e9
JL
1352 && reg_referenced_p (dest, PATTERN (i3))
1353 && REGNO (dest) != FRAME_POINTER_REGNUM
6d7096b0
DE
1354#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1355 && REGNO (dest) != HARD_FRAME_POINTER_REGNUM
1356#endif
36a9c2e9
JL
1357#if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
1358 && (REGNO (dest) != ARG_POINTER_REGNUM
1359 || ! fixed_regs [REGNO (dest)])
1360#endif
1361 && REGNO (dest) != STACK_POINTER_REGNUM)
230d793d
RS
1362 {
1363 if (*pi3dest_killed)
1364 return 0;
1365
1366 *pi3dest_killed = dest;
1367 }
1368 }
1369
1370 else if (GET_CODE (x) == PARALLEL)
1371 {
1372 int i;
1373
1374 for (i = 0; i < XVECLEN (x, 0); i++)
1375 if (! combinable_i3pat (i3, &XVECEXP (x, 0, i), i2dest, i1dest,
1376 i1_not_in_src, pi3dest_killed))
1377 return 0;
1378 }
1379
1380 return 1;
1381}
1382\f
14a774a9
RK
1383/* Return 1 if X is an arithmetic expression that contains a multiplication
1384 and division. We don't count multiplications by powers of two here. */
1385
1386static int
1387contains_muldiv (x)
1388 rtx x;
1389{
1390 switch (GET_CODE (x))
1391 {
1392 case MOD: case DIV: case UMOD: case UDIV:
1393 return 1;
1394
1395 case MULT:
1396 return ! (GET_CODE (XEXP (x, 1)) == CONST_INT
1397 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0);
1398 default:
1399 switch (GET_RTX_CLASS (GET_CODE (x)))
1400 {
1401 case 'c': case '<': case '2':
1402 return contains_muldiv (XEXP (x, 0))
1403 || contains_muldiv (XEXP (x, 1));
1404
1405 case '1':
1406 return contains_muldiv (XEXP (x, 0));
1407
1408 default:
1409 return 0;
1410 }
1411 }
1412}
1413\f
c3410241
BS
1414/* Determine whether INSN can be used in a combination. Return nonzero if
1415 not. This is used in try_combine to detect early some cases where we
1416 can't perform combinations. */
1417
1418static int
1419cant_combine_insn_p (insn)
1420 rtx insn;
1421{
1422 rtx set;
1423 rtx src, dest;
23190837 1424
c3410241
BS
1425 /* If this isn't really an insn, we can't do anything.
1426 This can occur when flow deletes an insn that it has merged into an
1427 auto-increment address. */
1428 if (! INSN_P (insn))
1429 return 1;
1430
1431 /* Never combine loads and stores involving hard regs. The register
1432 allocator can usually handle such reg-reg moves by tying. If we allow
1433 the combiner to make substitutions of hard regs, we risk aborting in
1434 reload on machines that have SMALL_REGISTER_CLASSES.
1435 As an exception, we allow combinations involving fixed regs; these are
1436 not available to the register allocator so there's no risk involved. */
1437
1438 set = single_set (insn);
1439 if (! set)
1440 return 0;
1441 src = SET_SRC (set);
1442 dest = SET_DEST (set);
ad334b51
JH
1443 if (GET_CODE (src) == SUBREG)
1444 src = SUBREG_REG (src);
1445 if (GET_CODE (dest) == SUBREG)
1446 dest = SUBREG_REG (dest);
53895717
BS
1447 if (REG_P (src) && REG_P (dest)
1448 && ((REGNO (src) < FIRST_PSEUDO_REGISTER
1449 && ! fixed_regs[REGNO (src)])
1450 || (REGNO (dest) < FIRST_PSEUDO_REGISTER
1451 && ! fixed_regs[REGNO (dest)])))
c3410241 1452 return 1;
53895717 1453
c3410241
BS
1454 return 0;
1455}
1456
230d793d
RS
1457/* Try to combine the insns I1 and I2 into I3.
1458 Here I1 and I2 appear earlier than I3.
1459 I1 can be zero; then we combine just I2 into I3.
663522cb 1460
04956a1a 1461 If we are combining three insns and the resulting insn is not recognized,
230d793d
RS
1462 try splitting it into two insns. If that happens, I2 and I3 are retained
1463 and I1 is pseudo-deleted by turning it into a NOTE. Otherwise, I1 and I2
1464 are pseudo-deleted.
1465
663522cb 1466 Return 0 if the combination does not work. Then nothing is changed.
abe6e52f 1467 If we did the combination, return the insn at which combine should
663522cb
KH
1468 resume scanning.
1469
44a76fc8
AG
1470 Set NEW_DIRECT_JUMP_P to a non-zero value if try_combine creates a
1471 new direct jump instruction. */
230d793d
RS
1472
1473static rtx
44a76fc8 1474try_combine (i3, i2, i1, new_direct_jump_p)
230d793d 1475 register rtx i3, i2, i1;
44a76fc8 1476 register int *new_direct_jump_p;
230d793d 1477{
02359929 1478 /* New patterns for I3 and I2, respectively. */
230d793d
RS
1479 rtx newpat, newi2pat = 0;
1480 /* Indicates need to preserve SET in I1 or I2 in I3 if it is not dead. */
1481 int added_sets_1, added_sets_2;
1482 /* Total number of SETs to put into I3. */
1483 int total_sets;
1484 /* Nonzero is I2's body now appears in I3. */
1485 int i2_is_used;
1486 /* INSN_CODEs for new I3, new I2, and user of condition code. */
6a651371 1487 int insn_code_number, i2_code_number = 0, other_code_number = 0;
230d793d
RS
1488 /* Contains I3 if the destination of I3 is used in its source, which means
1489 that the old life of I3 is being killed. If that usage is placed into
1490 I2 and not in I3, a REG_DEAD note must be made. */
1491 rtx i3dest_killed = 0;
1492 /* SET_DEST and SET_SRC of I2 and I1. */
1493 rtx i2dest, i2src, i1dest = 0, i1src = 0;
1494 /* PATTERN (I2), or a copy of it in certain cases. */
1495 rtx i2pat;
1496 /* Indicates if I2DEST or I1DEST is in I2SRC or I1_SRC. */
c4e861e8 1497 int i2dest_in_i2src = 0, i1dest_in_i1src = 0, i2dest_in_i1src = 0;
230d793d
RS
1498 int i1_feeds_i3 = 0;
1499 /* Notes that must be added to REG_NOTES in I3 and I2. */
1500 rtx new_i3_notes, new_i2_notes;
176c9e6b
JW
1501 /* Notes that we substituted I3 into I2 instead of the normal case. */
1502 int i3_subst_into_i2 = 0;
df7d75de
RK
1503 /* Notes that I1, I2 or I3 is a MULT operation. */
1504 int have_mult = 0;
230d793d
RS
1505
1506 int maxreg;
1507 rtx temp;
1508 register rtx link;
1509 int i;
1510
c3410241
BS
1511 /* Exit early if one of the insns involved can't be used for
1512 combinations. */
1513 if (cant_combine_insn_p (i3)
1514 || cant_combine_insn_p (i2)
1515 || (i1 && cant_combine_insn_p (i1))
1516 /* We also can't do anything if I3 has a
1517 REG_LIBCALL note since we don't want to disrupt the contiguity of a
1518 libcall. */
ec35104c
JL
1519#if 0
1520 /* ??? This gives worse code, and appears to be unnecessary, since no
1521 pass after flow uses REG_LIBCALL/REG_RETVAL notes. */
1522 || find_reg_note (i3, REG_LIBCALL, NULL_RTX)
1523#endif
663522cb 1524 )
230d793d
RS
1525 return 0;
1526
1527 combine_attempts++;
230d793d
RS
1528 undobuf.other_insn = 0;
1529
6e25d159
RK
1530 /* Reset the hard register usage information. */
1531 CLEAR_HARD_REG_SET (newpat_used_regs);
1532
230d793d
RS
1533 /* If I1 and I2 both feed I3, they can be in any order. To simplify the
1534 code below, set I1 to be the earlier of the two insns. */
1535 if (i1 && INSN_CUID (i1) > INSN_CUID (i2))
1536 temp = i1, i1 = i2, i2 = temp;
1537
abe6e52f 1538 added_links_insn = 0;
137e889e 1539
230d793d 1540 /* First check for one important special-case that the code below will
c7be4f66 1541 not handle. Namely, the case where I1 is zero, I2 is a PARALLEL
230d793d
RS
1542 and I3 is a SET whose SET_SRC is a SET_DEST in I2. In that case,
1543 we may be able to replace that destination with the destination of I3.
1544 This occurs in the common code where we compute both a quotient and
1545 remainder into a structure, in which case we want to do the computation
1546 directly into the structure to avoid register-register copies.
1547
c7be4f66
RK
1548 Note that this case handles both multiple sets in I2 and also
1549 cases where I2 has a number of CLOBBER or PARALLELs.
1550
230d793d
RS
1551 We make very conservative checks below and only try to handle the
1552 most common cases of this. For example, we only handle the case
1553 where I2 and I3 are adjacent to avoid making difficult register
1554 usage tests. */
1555
1556 if (i1 == 0 && GET_CODE (i3) == INSN && GET_CODE (PATTERN (i3)) == SET
1557 && GET_CODE (SET_SRC (PATTERN (i3))) == REG
1558 && REGNO (SET_SRC (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER
230d793d
RS
1559 && find_reg_note (i3, REG_DEAD, SET_SRC (PATTERN (i3)))
1560 && GET_CODE (PATTERN (i2)) == PARALLEL
1561 && ! side_effects_p (SET_DEST (PATTERN (i3)))
5089e22e
RS
1562 /* If the dest of I3 is a ZERO_EXTRACT or STRICT_LOW_PART, the code
1563 below would need to check what is inside (and reg_overlap_mentioned_p
1564 doesn't support those codes anyway). Don't allow those destinations;
1565 the resulting insn isn't likely to be recognized anyway. */
1566 && GET_CODE (SET_DEST (PATTERN (i3))) != ZERO_EXTRACT
1567 && GET_CODE (SET_DEST (PATTERN (i3))) != STRICT_LOW_PART
230d793d
RS
1568 && ! reg_overlap_mentioned_p (SET_SRC (PATTERN (i3)),
1569 SET_DEST (PATTERN (i3)))
1570 && next_real_insn (i2) == i3)
5089e22e
RS
1571 {
1572 rtx p2 = PATTERN (i2);
1573
1574 /* Make sure that the destination of I3,
1575 which we are going to substitute into one output of I2,
1576 is not used within another output of I2. We must avoid making this:
1577 (parallel [(set (mem (reg 69)) ...)
1578 (set (reg 69) ...)])
1579 which is not well-defined as to order of actions.
1580 (Besides, reload can't handle output reloads for this.)
1581
1582 The problem can also happen if the dest of I3 is a memory ref,
1583 if another dest in I2 is an indirect memory ref. */
1584 for (i = 0; i < XVECLEN (p2, 0); i++)
7ca919b7
RK
1585 if ((GET_CODE (XVECEXP (p2, 0, i)) == SET
1586 || GET_CODE (XVECEXP (p2, 0, i)) == CLOBBER)
5089e22e
RS
1587 && reg_overlap_mentioned_p (SET_DEST (PATTERN (i3)),
1588 SET_DEST (XVECEXP (p2, 0, i))))
1589 break;
230d793d 1590
5089e22e
RS
1591 if (i == XVECLEN (p2, 0))
1592 for (i = 0; i < XVECLEN (p2, 0); i++)
481c7efa
FS
1593 if ((GET_CODE (XVECEXP (p2, 0, i)) == SET
1594 || GET_CODE (XVECEXP (p2, 0, i)) == CLOBBER)
1595 && SET_DEST (XVECEXP (p2, 0, i)) == SET_SRC (PATTERN (i3)))
5089e22e
RS
1596 {
1597 combine_merges++;
230d793d 1598
5089e22e
RS
1599 subst_insn = i3;
1600 subst_low_cuid = INSN_CUID (i2);
230d793d 1601
c4e861e8 1602 added_sets_2 = added_sets_1 = 0;
5089e22e 1603 i2dest = SET_SRC (PATTERN (i3));
230d793d 1604
5089e22e
RS
1605 /* Replace the dest in I2 with our dest and make the resulting
1606 insn the new pattern for I3. Then skip to where we
1607 validate the pattern. Everything was set up above. */
663522cb 1608 SUBST (SET_DEST (XVECEXP (p2, 0, i)),
5089e22e
RS
1609 SET_DEST (PATTERN (i3)));
1610
1611 newpat = p2;
176c9e6b 1612 i3_subst_into_i2 = 1;
5089e22e
RS
1613 goto validate_replacement;
1614 }
1615 }
230d793d 1616
667c1c2c
RK
1617 /* If I2 is setting a double-word pseudo to a constant and I3 is setting
1618 one of those words to another constant, merge them by making a new
1619 constant. */
1620 if (i1 == 0
1621 && (temp = single_set (i2)) != 0
1622 && (GET_CODE (SET_SRC (temp)) == CONST_INT
1623 || GET_CODE (SET_SRC (temp)) == CONST_DOUBLE)
1624 && GET_CODE (SET_DEST (temp)) == REG
1625 && GET_MODE_CLASS (GET_MODE (SET_DEST (temp))) == MODE_INT
1626 && GET_MODE_SIZE (GET_MODE (SET_DEST (temp))) == 2 * UNITS_PER_WORD
1627 && GET_CODE (PATTERN (i3)) == SET
1628 && GET_CODE (SET_DEST (PATTERN (i3))) == SUBREG
1629 && SUBREG_REG (SET_DEST (PATTERN (i3))) == SET_DEST (temp)
1630 && GET_MODE_CLASS (GET_MODE (SET_DEST (PATTERN (i3)))) == MODE_INT
1631 && GET_MODE_SIZE (GET_MODE (SET_DEST (PATTERN (i3)))) == UNITS_PER_WORD
1632 && GET_CODE (SET_SRC (PATTERN (i3))) == CONST_INT)
1633 {
1634 HOST_WIDE_INT lo, hi;
1635
1636 if (GET_CODE (SET_SRC (temp)) == CONST_INT)
1637 lo = INTVAL (SET_SRC (temp)), hi = lo < 0 ? -1 : 0;
1638 else
1639 {
1640 lo = CONST_DOUBLE_LOW (SET_SRC (temp));
1641 hi = CONST_DOUBLE_HIGH (SET_SRC (temp));
1642 }
1643
1644 if (subreg_lowpart_p (SET_DEST (PATTERN (i3))))
48b4d901
AO
1645 {
1646 /* We don't handle the case of the target word being wider
1647 than a host wide int. */
1648 if (HOST_BITS_PER_WIDE_INT < BITS_PER_WORD)
1649 abort ();
1650
42a6ff51 1651 lo &= ~(UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD (1) - 1);
48b4d901
AO
1652 lo |= INTVAL (SET_SRC (PATTERN (i3)));
1653 }
1654 else if (HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
667c1c2c 1655 hi = INTVAL (SET_SRC (PATTERN (i3)));
48b4d901
AO
1656 else if (HOST_BITS_PER_WIDE_INT >= 2 * BITS_PER_WORD)
1657 {
1658 int sign = -(int) ((unsigned HOST_WIDE_INT) lo
1659 >> (HOST_BITS_PER_WIDE_INT - 1));
1660
42a6ff51
AO
1661 lo &= ~ (UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD
1662 (UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD (1) - 1));
1663 lo |= (UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD
1664 (INTVAL (SET_SRC (PATTERN (i3)))));
48b4d901
AO
1665 if (hi == sign)
1666 hi = lo < 0 ? -1 : 0;
1667 }
1668 else
1669 /* We don't handle the case of the higher word not fitting
1670 entirely in either hi or lo. */
1671 abort ();
667c1c2c
RK
1672
1673 combine_merges++;
1674 subst_insn = i3;
1675 subst_low_cuid = INSN_CUID (i2);
1676 added_sets_2 = added_sets_1 = 0;
1677 i2dest = SET_DEST (temp);
1678
1679 SUBST (SET_SRC (temp),
1680 immed_double_const (lo, hi, GET_MODE (SET_DEST (temp))));
1681
1682 newpat = PATTERN (i2);
667c1c2c
RK
1683 goto validate_replacement;
1684 }
1685
230d793d
RS
1686#ifndef HAVE_cc0
1687 /* If we have no I1 and I2 looks like:
1688 (parallel [(set (reg:CC X) (compare:CC OP (const_int 0)))
1689 (set Y OP)])
1690 make up a dummy I1 that is
1691 (set Y OP)
1692 and change I2 to be
1693 (set (reg:CC X) (compare:CC Y (const_int 0)))
1694
1695 (We can ignore any trailing CLOBBERs.)
1696
1697 This undoes a previous combination and allows us to match a branch-and-
1698 decrement insn. */
1699
1700 if (i1 == 0 && GET_CODE (PATTERN (i2)) == PARALLEL
1701 && XVECLEN (PATTERN (i2), 0) >= 2
1702 && GET_CODE (XVECEXP (PATTERN (i2), 0, 0)) == SET
1703 && (GET_MODE_CLASS (GET_MODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 0))))
1704 == MODE_CC)
1705 && GET_CODE (SET_SRC (XVECEXP (PATTERN (i2), 0, 0))) == COMPARE
1706 && XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 1) == const0_rtx
1707 && GET_CODE (XVECEXP (PATTERN (i2), 0, 1)) == SET
1708 && GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 1))) == REG
1709 && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 0),
1710 SET_SRC (XVECEXP (PATTERN (i2), 0, 1))))
1711 {
663522cb 1712 for (i = XVECLEN (PATTERN (i2), 0) - 1; i >= 2; i--)
230d793d
RS
1713 if (GET_CODE (XVECEXP (PATTERN (i2), 0, i)) != CLOBBER)
1714 break;
1715
1716 if (i == 1)
1717 {
1718 /* We make I1 with the same INSN_UID as I2. This gives it
1719 the same INSN_CUID for value tracking. Our fake I1 will
1720 never appear in the insn stream so giving it the same INSN_UID
1721 as I2 will not cause a problem. */
1722
0d9641d1 1723 subst_prev_insn = i1
38a448ca
RH
1724 = gen_rtx_INSN (VOIDmode, INSN_UID (i2), NULL_RTX, i2,
1725 XVECEXP (PATTERN (i2), 0, 1), -1, NULL_RTX,
1726 NULL_RTX);
230d793d
RS
1727
1728 SUBST (PATTERN (i2), XVECEXP (PATTERN (i2), 0, 0));
1729 SUBST (XEXP (SET_SRC (PATTERN (i2)), 0),
1730 SET_DEST (PATTERN (i1)));
1731 }
1732 }
1733#endif
1734
1735 /* Verify that I2 and I1 are valid for combining. */
5f4f0e22
CH
1736 if (! can_combine_p (i2, i3, i1, NULL_RTX, &i2dest, &i2src)
1737 || (i1 && ! can_combine_p (i1, i3, NULL_RTX, i2, &i1dest, &i1src)))
230d793d
RS
1738 {
1739 undo_all ();
1740 return 0;
1741 }
1742
1743 /* Record whether I2DEST is used in I2SRC and similarly for the other
1744 cases. Knowing this will help in register status updating below. */
1745 i2dest_in_i2src = reg_overlap_mentioned_p (i2dest, i2src);
1746 i1dest_in_i1src = i1 && reg_overlap_mentioned_p (i1dest, i1src);
1747 i2dest_in_i1src = i1 && reg_overlap_mentioned_p (i2dest, i1src);
1748
916f14f1 1749 /* See if I1 directly feeds into I3. It does if I1DEST is not used
230d793d
RS
1750 in I2SRC. */
1751 i1_feeds_i3 = i1 && ! reg_overlap_mentioned_p (i1dest, i2src);
1752
1753 /* Ensure that I3's pattern can be the destination of combines. */
1754 if (! combinable_i3pat (i3, &PATTERN (i3), i2dest, i1dest,
1755 i1 && i2dest_in_i1src && i1_feeds_i3,
1756 &i3dest_killed))
1757 {
1758 undo_all ();
1759 return 0;
1760 }
1761
df7d75de
RK
1762 /* See if any of the insns is a MULT operation. Unless one is, we will
1763 reject a combination that is, since it must be slower. Be conservative
1764 here. */
1765 if (GET_CODE (i2src) == MULT
1766 || (i1 != 0 && GET_CODE (i1src) == MULT)
1767 || (GET_CODE (PATTERN (i3)) == SET
1768 && GET_CODE (SET_SRC (PATTERN (i3))) == MULT))
1769 have_mult = 1;
1770
230d793d
RS
1771 /* If I3 has an inc, then give up if I1 or I2 uses the reg that is inc'd.
1772 We used to do this EXCEPT in one case: I3 has a post-inc in an
1773 output operand. However, that exception can give rise to insns like
23190837 1774 mov r3,(r3)+
230d793d 1775 which is a famous insn on the PDP-11 where the value of r3 used as the
5089e22e 1776 source was model-dependent. Avoid this sort of thing. */
230d793d
RS
1777
1778#if 0
1779 if (!(GET_CODE (PATTERN (i3)) == SET
1780 && GET_CODE (SET_SRC (PATTERN (i3))) == REG
1781 && GET_CODE (SET_DEST (PATTERN (i3))) == MEM
1782 && (GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_INC
1783 || GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_DEC)))
1784 /* It's not the exception. */
1785#endif
1786#ifdef AUTO_INC_DEC
1787 for (link = REG_NOTES (i3); link; link = XEXP (link, 1))
1788 if (REG_NOTE_KIND (link) == REG_INC
1789 && (reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i2))
1790 || (i1 != 0
1791 && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i1)))))
1792 {
1793 undo_all ();
1794 return 0;
1795 }
1796#endif
1797
1798 /* See if the SETs in I1 or I2 need to be kept around in the merged
1799 instruction: whenever the value set there is still needed past I3.
1800 For the SETs in I2, this is easy: we see if I2DEST dies or is set in I3.
1801
1802 For the SET in I1, we have two cases: If I1 and I2 independently
1803 feed into I3, the set in I1 needs to be kept around if I1DEST dies
1804 or is set in I3. Otherwise (if I1 feeds I2 which feeds I3), the set
1805 in I1 needs to be kept around unless I1DEST dies or is set in either
1806 I2 or I3. We can distinguish these cases by seeing if I2SRC mentions
1807 I1DEST. If so, we know I1 feeds into I2. */
1808
1809 added_sets_2 = ! dead_or_set_p (i3, i2dest);
1810
1811 added_sets_1
1812 = i1 && ! (i1_feeds_i3 ? dead_or_set_p (i3, i1dest)
1813 : (dead_or_set_p (i3, i1dest) || dead_or_set_p (i2, i1dest)));
1814
1815 /* If the set in I2 needs to be kept around, we must make a copy of
1816 PATTERN (I2), so that when we substitute I1SRC for I1DEST in
5089e22e 1817 PATTERN (I2), we are only substituting for the original I1DEST, not into
230d793d
RS
1818 an already-substituted copy. This also prevents making self-referential
1819 rtx. If I2 is a PARALLEL, we just need the piece that assigns I2SRC to
1820 I2DEST. */
1821
1822 i2pat = (GET_CODE (PATTERN (i2)) == PARALLEL
38a448ca 1823 ? gen_rtx_SET (VOIDmode, i2dest, i2src)
230d793d
RS
1824 : PATTERN (i2));
1825
1826 if (added_sets_2)
1827 i2pat = copy_rtx (i2pat);
1828
1829 combine_merges++;
1830
1831 /* Substitute in the latest insn for the regs set by the earlier ones. */
1832
1833 maxreg = max_reg_num ();
1834
1835 subst_insn = i3;
230d793d
RS
1836
1837 /* It is possible that the source of I2 or I1 may be performing an
1838 unneeded operation, such as a ZERO_EXTEND of something that is known
1839 to have the high part zero. Handle that case by letting subst look at
1840 the innermost one of them.
1841
1842 Another way to do this would be to have a function that tries to
1843 simplify a single insn instead of merging two or more insns. We don't
1844 do this because of the potential of infinite loops and because
1845 of the potential extra memory required. However, doing it the way
1846 we are is a bit of a kludge and doesn't catch all cases.
1847
1848 But only do this if -fexpensive-optimizations since it slows things down
1849 and doesn't usually win. */
1850
1851 if (flag_expensive_optimizations)
1852 {
1853 /* Pass pc_rtx so no substitutions are done, just simplifications.
1854 The cases that we are interested in here do not involve the few
1855 cases were is_replaced is checked. */
1856 if (i1)
d0ab8cd3
RK
1857 {
1858 subst_low_cuid = INSN_CUID (i1);
1859 i1src = subst (i1src, pc_rtx, pc_rtx, 0, 0);
1860 }
230d793d 1861 else
d0ab8cd3
RK
1862 {
1863 subst_low_cuid = INSN_CUID (i2);
1864 i2src = subst (i2src, pc_rtx, pc_rtx, 0, 0);
1865 }
230d793d
RS
1866 }
1867
1868#ifndef HAVE_cc0
1869 /* Many machines that don't use CC0 have insns that can both perform an
1870 arithmetic operation and set the condition code. These operations will
1871 be represented as a PARALLEL with the first element of the vector
1872 being a COMPARE of an arithmetic operation with the constant zero.
1873 The second element of the vector will set some pseudo to the result
1874 of the same arithmetic operation. If we simplify the COMPARE, we won't
1875 match such a pattern and so will generate an extra insn. Here we test
1876 for this case, where both the comparison and the operation result are
1877 needed, and make the PARALLEL by just replacing I2DEST in I3SRC with
1878 I2SRC. Later we will make the PARALLEL that contains I2. */
1879
1880 if (i1 == 0 && added_sets_2 && GET_CODE (PATTERN (i3)) == SET
1881 && GET_CODE (SET_SRC (PATTERN (i3))) == COMPARE
1882 && XEXP (SET_SRC (PATTERN (i3)), 1) == const0_rtx
1883 && rtx_equal_p (XEXP (SET_SRC (PATTERN (i3)), 0), i2dest))
1884 {
081f5e7e 1885#ifdef EXTRA_CC_MODES
230d793d
RS
1886 rtx *cc_use;
1887 enum machine_mode compare_mode;
081f5e7e 1888#endif
230d793d
RS
1889
1890 newpat = PATTERN (i3);
1891 SUBST (XEXP (SET_SRC (newpat), 0), i2src);
1892
1893 i2_is_used = 1;
1894
1895#ifdef EXTRA_CC_MODES
1896 /* See if a COMPARE with the operand we substituted in should be done
1897 with the mode that is currently being used. If not, do the same
1898 processing we do in `subst' for a SET; namely, if the destination
1899 is used only once, try to replace it with a register of the proper
1900 mode and also replace the COMPARE. */
1901 if (undobuf.other_insn == 0
1902 && (cc_use = find_single_use (SET_DEST (newpat), i3,
1903 &undobuf.other_insn))
77fa0940
RK
1904 && ((compare_mode = SELECT_CC_MODE (GET_CODE (*cc_use),
1905 i2src, const0_rtx))
230d793d
RS
1906 != GET_MODE (SET_DEST (newpat))))
1907 {
770ae6cc 1908 unsigned int regno = REGNO (SET_DEST (newpat));
38a448ca 1909 rtx new_dest = gen_rtx_REG (compare_mode, regno);
230d793d
RS
1910
1911 if (regno < FIRST_PSEUDO_REGISTER
b1f21e0a 1912 || (REG_N_SETS (regno) == 1 && ! added_sets_2
230d793d
RS
1913 && ! REG_USERVAR_P (SET_DEST (newpat))))
1914 {
1915 if (regno >= FIRST_PSEUDO_REGISTER)
1916 SUBST (regno_reg_rtx[regno], new_dest);
1917
1918 SUBST (SET_DEST (newpat), new_dest);
1919 SUBST (XEXP (*cc_use, 0), new_dest);
1920 SUBST (SET_SRC (newpat),
f1c6ba8b 1921 gen_rtx_COMPARE (compare_mode, i2src, const0_rtx));
230d793d
RS
1922 }
1923 else
1924 undobuf.other_insn = 0;
1925 }
663522cb 1926#endif
230d793d
RS
1927 }
1928 else
1929#endif
1930 {
1931 n_occurrences = 0; /* `subst' counts here */
1932
1933 /* If I1 feeds into I2 (not into I3) and I1DEST is in I1SRC, we
1934 need to make a unique copy of I2SRC each time we substitute it
1935 to avoid self-referential rtl. */
1936
d0ab8cd3 1937 subst_low_cuid = INSN_CUID (i2);
230d793d
RS
1938 newpat = subst (PATTERN (i3), i2dest, i2src, 0,
1939 ! i1_feeds_i3 && i1dest_in_i1src);
230d793d
RS
1940
1941 /* Record whether i2's body now appears within i3's body. */
1942 i2_is_used = n_occurrences;
1943 }
1944
1945 /* If we already got a failure, don't try to do more. Otherwise,
1946 try to substitute in I1 if we have it. */
1947
1948 if (i1 && GET_CODE (newpat) != CLOBBER)
1949 {
1950 /* Before we can do this substitution, we must redo the test done
1951 above (see detailed comments there) that ensures that I1DEST
0f41302f 1952 isn't mentioned in any SETs in NEWPAT that are field assignments. */
230d793d 1953
5f4f0e22 1954 if (! combinable_i3pat (NULL_RTX, &newpat, i1dest, NULL_RTX,
6496a589 1955 0, (rtx*)0))
230d793d
RS
1956 {
1957 undo_all ();
1958 return 0;
1959 }
1960
1961 n_occurrences = 0;
d0ab8cd3 1962 subst_low_cuid = INSN_CUID (i1);
230d793d 1963 newpat = subst (newpat, i1dest, i1src, 0, 0);
230d793d
RS
1964 }
1965
916f14f1
RK
1966 /* Fail if an autoincrement side-effect has been duplicated. Be careful
1967 to count all the ways that I2SRC and I1SRC can be used. */
5f4f0e22 1968 if ((FIND_REG_INC_NOTE (i2, NULL_RTX) != 0
916f14f1 1969 && i2_is_used + added_sets_2 > 1)
5f4f0e22 1970 || (i1 != 0 && FIND_REG_INC_NOTE (i1, NULL_RTX) != 0
916f14f1
RK
1971 && (n_occurrences + added_sets_1 + (added_sets_2 && ! i1_feeds_i3)
1972 > 1))
230d793d
RS
1973 /* Fail if we tried to make a new register (we used to abort, but there's
1974 really no reason to). */
1975 || max_reg_num () != maxreg
1976 /* Fail if we couldn't do something and have a CLOBBER. */
df7d75de
RK
1977 || GET_CODE (newpat) == CLOBBER
1978 /* Fail if this new pattern is a MULT and we didn't have one before
1979 at the outer level. */
1980 || (GET_CODE (newpat) == SET && GET_CODE (SET_SRC (newpat)) == MULT
1981 && ! have_mult))
230d793d
RS
1982 {
1983 undo_all ();
1984 return 0;
1985 }
1986
1987 /* If the actions of the earlier insns must be kept
1988 in addition to substituting them into the latest one,
1989 we must make a new PARALLEL for the latest insn
1990 to hold additional the SETs. */
1991
1992 if (added_sets_1 || added_sets_2)
1993 {
1994 combine_extras++;
1995
1996 if (GET_CODE (newpat) == PARALLEL)
1997 {
1998 rtvec old = XVEC (newpat, 0);
1999 total_sets = XVECLEN (newpat, 0) + added_sets_1 + added_sets_2;
38a448ca 2000 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_sets));
d38a30c9
KG
2001 memcpy (XVEC (newpat, 0)->elem, &old->elem[0],
2002 sizeof (old->elem[0]) * old->num_elem);
230d793d
RS
2003 }
2004 else
2005 {
2006 rtx old = newpat;
2007 total_sets = 1 + added_sets_1 + added_sets_2;
38a448ca 2008 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_sets));
230d793d
RS
2009 XVECEXP (newpat, 0, 0) = old;
2010 }
2011
2012 if (added_sets_1)
2013 XVECEXP (newpat, 0, --total_sets)
2014 = (GET_CODE (PATTERN (i1)) == PARALLEL
38a448ca 2015 ? gen_rtx_SET (VOIDmode, i1dest, i1src) : PATTERN (i1));
230d793d
RS
2016
2017 if (added_sets_2)
c5c76735
JL
2018 {
2019 /* If there is no I1, use I2's body as is. We used to also not do
2020 the subst call below if I2 was substituted into I3,
2021 but that could lose a simplification. */
2022 if (i1 == 0)
2023 XVECEXP (newpat, 0, --total_sets) = i2pat;
2024 else
2025 /* See comment where i2pat is assigned. */
2026 XVECEXP (newpat, 0, --total_sets)
2027 = subst (i2pat, i1dest, i1src, 0, 0);
2028 }
230d793d
RS
2029 }
2030
2031 /* We come here when we are replacing a destination in I2 with the
2032 destination of I3. */
2033 validate_replacement:
2034
6e25d159
RK
2035 /* Note which hard regs this insn has as inputs. */
2036 mark_used_regs_combine (newpat);
2037
230d793d 2038 /* Is the result of combination a valid instruction? */
8e2f6e35 2039 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
230d793d
RS
2040
2041 /* If the result isn't valid, see if it is a PARALLEL of two SETs where
2042 the second SET's destination is a register that is unused. In that case,
2043 we just need the first SET. This can occur when simplifying a divmod
2044 insn. We *must* test for this case here because the code below that
2045 splits two independent SETs doesn't handle this case correctly when it
2046 updates the register status. Also check the case where the first
2047 SET's destination is unused. That would not cause incorrect code, but
2048 does cause an unneeded insn to remain. */
2049
2050 if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
2051 && XVECLEN (newpat, 0) == 2
2052 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
2053 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2054 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == REG
2055 && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 1)))
2056 && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 1)))
2057 && asm_noperands (newpat) < 0)
2058 {
2059 newpat = XVECEXP (newpat, 0, 0);
8e2f6e35 2060 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
230d793d
RS
2061 }
2062
2063 else if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
2064 && XVECLEN (newpat, 0) == 2
2065 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
2066 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2067 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) == REG
2068 && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 0)))
2069 && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 0)))
2070 && asm_noperands (newpat) < 0)
2071 {
2072 newpat = XVECEXP (newpat, 0, 1);
8e2f6e35 2073 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
230d793d
RS
2074 }
2075
2076 /* If we were combining three insns and the result is a simple SET
2077 with no ASM_OPERANDS that wasn't recognized, try to split it into two
663522cb 2078 insns. There are two ways to do this. It can be split using a
916f14f1
RK
2079 machine-specific method (like when you have an addition of a large
2080 constant) or by combine in the function find_split_point. */
2081
230d793d
RS
2082 if (i1 && insn_code_number < 0 && GET_CODE (newpat) == SET
2083 && asm_noperands (newpat) < 0)
2084 {
916f14f1 2085 rtx m_split, *split;
42495ca0 2086 rtx ni2dest = i2dest;
916f14f1
RK
2087
2088 /* See if the MD file can split NEWPAT. If it can't, see if letting it
42495ca0
RK
2089 use I2DEST as a scratch register will help. In the latter case,
2090 convert I2DEST to the mode of the source of NEWPAT if we can. */
916f14f1
RK
2091
2092 m_split = split_insns (newpat, i3);
a70c61d9
JW
2093
2094 /* We can only use I2DEST as a scratch reg if it doesn't overlap any
2095 inputs of NEWPAT. */
2096
2097 /* ??? If I2DEST is not safe, and I1DEST exists, then it would be
2098 possible to try that as a scratch reg. This would require adding
2099 more code to make it work though. */
2100
2101 if (m_split == 0 && ! reg_overlap_mentioned_p (ni2dest, newpat))
42495ca0
RK
2102 {
2103 /* If I2DEST is a hard register or the only use of a pseudo,
2104 we can change its mode. */
2105 if (GET_MODE (SET_DEST (newpat)) != GET_MODE (i2dest)
02f4ada4 2106 && GET_MODE (SET_DEST (newpat)) != VOIDmode
60654f77 2107 && GET_CODE (i2dest) == REG
42495ca0 2108 && (REGNO (i2dest) < FIRST_PSEUDO_REGISTER
b1f21e0a 2109 || (REG_N_SETS (REGNO (i2dest)) == 1 && ! added_sets_2
42495ca0 2110 && ! REG_USERVAR_P (i2dest))))
38a448ca 2111 ni2dest = gen_rtx_REG (GET_MODE (SET_DEST (newpat)),
c5c76735
JL
2112 REGNO (i2dest));
2113
2114 m_split = split_insns (gen_rtx_PARALLEL
2115 (VOIDmode,
2116 gen_rtvec (2, newpat,
2117 gen_rtx_CLOBBER (VOIDmode,
2118 ni2dest))),
2119 i3);
5dd3e650
R
2120 /* If the split with the mode-changed register didn't work, try
2121 the original register. */
2122 if (! m_split && ni2dest != i2dest)
c7ca5912
RK
2123 {
2124 ni2dest = i2dest;
2125 m_split = split_insns (gen_rtx_PARALLEL
2126 (VOIDmode,
2127 gen_rtvec (2, newpat,
2128 gen_rtx_CLOBBER (VOIDmode,
2129 i2dest))),
2130 i3);
2131 }
42495ca0 2132 }
916f14f1 2133
d340408c
RH
2134 if (m_split && GET_CODE (m_split) != SEQUENCE)
2135 {
2136 insn_code_number = recog_for_combine (&m_split, i3, &new_i3_notes);
2137 if (insn_code_number >= 0)
2138 newpat = m_split;
23190837 2139 }
d340408c
RH
2140 else if (m_split && GET_CODE (m_split) == SEQUENCE
2141 && XVECLEN (m_split, 0) == 2
2142 && (next_real_insn (i2) == i3
2143 || ! use_crosses_set_p (PATTERN (XVECEXP (m_split, 0, 0)),
2144 INSN_CUID (i2))))
916f14f1 2145 {
1a26b032 2146 rtx i2set, i3set;
d0ab8cd3 2147 rtx newi3pat = PATTERN (XVECEXP (m_split, 0, 1));
916f14f1 2148 newi2pat = PATTERN (XVECEXP (m_split, 0, 0));
916f14f1 2149
e4ba89be
RK
2150 i3set = single_set (XVECEXP (m_split, 0, 1));
2151 i2set = single_set (XVECEXP (m_split, 0, 0));
1a26b032 2152
42495ca0
RK
2153 /* In case we changed the mode of I2DEST, replace it in the
2154 pseudo-register table here. We can't do it above in case this
2155 code doesn't get executed and we do a split the other way. */
2156
2157 if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
2158 SUBST (regno_reg_rtx[REGNO (i2dest)], ni2dest);
2159
8e2f6e35 2160 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
1a26b032
RK
2161
2162 /* If I2 or I3 has multiple SETs, we won't know how to track
9cc96794
RK
2163 register status, so don't use these insns. If I2's destination
2164 is used between I2 and I3, we also can't use these insns. */
1a26b032 2165
9cc96794
RK
2166 if (i2_code_number >= 0 && i2set && i3set
2167 && (next_real_insn (i2) == i3
2168 || ! reg_used_between_p (SET_DEST (i2set), i2, i3)))
8e2f6e35
BS
2169 insn_code_number = recog_for_combine (&newi3pat, i3,
2170 &new_i3_notes);
d0ab8cd3
RK
2171 if (insn_code_number >= 0)
2172 newpat = newi3pat;
2173
c767f54b 2174 /* It is possible that both insns now set the destination of I3.
22609cbf 2175 If so, we must show an extra use of it. */
c767f54b 2176
393de53f
RK
2177 if (insn_code_number >= 0)
2178 {
2179 rtx new_i3_dest = SET_DEST (i3set);
2180 rtx new_i2_dest = SET_DEST (i2set);
2181
2182 while (GET_CODE (new_i3_dest) == ZERO_EXTRACT
2183 || GET_CODE (new_i3_dest) == STRICT_LOW_PART
2184 || GET_CODE (new_i3_dest) == SUBREG)
2185 new_i3_dest = XEXP (new_i3_dest, 0);
2186
d4096689
RK
2187 while (GET_CODE (new_i2_dest) == ZERO_EXTRACT
2188 || GET_CODE (new_i2_dest) == STRICT_LOW_PART
2189 || GET_CODE (new_i2_dest) == SUBREG)
2190 new_i2_dest = XEXP (new_i2_dest, 0);
2191
393de53f
RK
2192 if (GET_CODE (new_i3_dest) == REG
2193 && GET_CODE (new_i2_dest) == REG
2194 && REGNO (new_i3_dest) == REGNO (new_i2_dest))
b1f21e0a 2195 REG_N_SETS (REGNO (new_i2_dest))++;
393de53f 2196 }
916f14f1 2197 }
230d793d
RS
2198
2199 /* If we can split it and use I2DEST, go ahead and see if that
2200 helps things be recognized. Verify that none of the registers
2201 are set between I2 and I3. */
d0ab8cd3 2202 if (insn_code_number < 0 && (split = find_split_point (&newpat, i3)) != 0
230d793d
RS
2203#ifdef HAVE_cc0
2204 && GET_CODE (i2dest) == REG
2205#endif
2206 /* We need I2DEST in the proper mode. If it is a hard register
2207 or the only use of a pseudo, we can change its mode. */
2208 && (GET_MODE (*split) == GET_MODE (i2dest)
2209 || GET_MODE (*split) == VOIDmode
2210 || REGNO (i2dest) < FIRST_PSEUDO_REGISTER
b1f21e0a 2211 || (REG_N_SETS (REGNO (i2dest)) == 1 && ! added_sets_2
230d793d
RS
2212 && ! REG_USERVAR_P (i2dest)))
2213 && (next_real_insn (i2) == i3
2214 || ! use_crosses_set_p (*split, INSN_CUID (i2)))
2215 /* We can't overwrite I2DEST if its value is still used by
2216 NEWPAT. */
2217 && ! reg_referenced_p (i2dest, newpat))
2218 {
2219 rtx newdest = i2dest;
df7d75de
RK
2220 enum rtx_code split_code = GET_CODE (*split);
2221 enum machine_mode split_mode = GET_MODE (*split);
230d793d
RS
2222
2223 /* Get NEWDEST as a register in the proper mode. We have already
2224 validated that we can do this. */
df7d75de 2225 if (GET_MODE (i2dest) != split_mode && split_mode != VOIDmode)
230d793d 2226 {
38a448ca 2227 newdest = gen_rtx_REG (split_mode, REGNO (i2dest));
230d793d
RS
2228
2229 if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
2230 SUBST (regno_reg_rtx[REGNO (i2dest)], newdest);
2231 }
2232
2233 /* If *SPLIT is a (mult FOO (const_int pow2)), convert it to
2234 an ASHIFT. This can occur if it was inside a PLUS and hence
2235 appeared to be a memory address. This is a kludge. */
df7d75de 2236 if (split_code == MULT
230d793d 2237 && GET_CODE (XEXP (*split, 1)) == CONST_INT
1568d79b 2238 && INTVAL (XEXP (*split, 1)) > 0
230d793d 2239 && (i = exact_log2 (INTVAL (XEXP (*split, 1)))) >= 0)
1dc8a823 2240 {
f1c6ba8b
RK
2241 SUBST (*split, gen_rtx_ASHIFT (split_mode,
2242 XEXP (*split, 0), GEN_INT (i)));
1dc8a823
JW
2243 /* Update split_code because we may not have a multiply
2244 anymore. */
2245 split_code = GET_CODE (*split);
2246 }
230d793d
RS
2247
2248#ifdef INSN_SCHEDULING
2249 /* If *SPLIT is a paradoxical SUBREG, when we split it, it should
2250 be written as a ZERO_EXTEND. */
df7d75de 2251 if (split_code == SUBREG && GET_CODE (SUBREG_REG (*split)) == MEM)
f1c6ba8b 2252 SUBST (*split, gen_rtx_ZERO_EXTEND (split_mode,
ddef6bc7 2253 SUBREG_REG (*split)));
230d793d
RS
2254#endif
2255
f1c6ba8b 2256 newi2pat = gen_rtx_SET (VOIDmode, newdest, *split);
230d793d 2257 SUBST (*split, newdest);
8e2f6e35 2258 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
df7d75de
RK
2259
2260 /* If the split point was a MULT and we didn't have one before,
2261 don't use one now. */
2262 if (i2_code_number >= 0 && ! (split_code == MULT && ! have_mult))
8e2f6e35 2263 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
230d793d
RS
2264 }
2265 }
2266
2267 /* Check for a case where we loaded from memory in a narrow mode and
2268 then sign extended it, but we need both registers. In that case,
2269 we have a PARALLEL with both loads from the same memory location.
2270 We can split this into a load from memory followed by a register-register
2271 copy. This saves at least one insn, more if register allocation can
f0343c74
RK
2272 eliminate the copy.
2273
2274 We cannot do this if the destination of the second assignment is
2275 a register that we have already assumed is zero-extended. Similarly
2276 for a SUBREG of such a register. */
230d793d
RS
2277
2278 else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
2279 && GET_CODE (newpat) == PARALLEL
2280 && XVECLEN (newpat, 0) == 2
2281 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
2282 && GET_CODE (SET_SRC (XVECEXP (newpat, 0, 0))) == SIGN_EXTEND
2283 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2284 && rtx_equal_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2285 XEXP (SET_SRC (XVECEXP (newpat, 0, 0)), 0))
2286 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2287 INSN_CUID (i2))
2288 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
2289 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
f0343c74
RK
2290 && ! (temp = SET_DEST (XVECEXP (newpat, 0, 1)),
2291 (GET_CODE (temp) == REG
2292 && reg_nonzero_bits[REGNO (temp)] != 0
2293 && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
2294 && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
2295 && (reg_nonzero_bits[REGNO (temp)]
2296 != GET_MODE_MASK (word_mode))))
2297 && ! (GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == SUBREG
2298 && (temp = SUBREG_REG (SET_DEST (XVECEXP (newpat, 0, 1))),
2299 (GET_CODE (temp) == REG
2300 && reg_nonzero_bits[REGNO (temp)] != 0
2301 && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
2302 && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
2303 && (reg_nonzero_bits[REGNO (temp)]
2304 != GET_MODE_MASK (word_mode)))))
230d793d
RS
2305 && ! reg_overlap_mentioned_p (SET_DEST (XVECEXP (newpat, 0, 1)),
2306 SET_SRC (XVECEXP (newpat, 0, 1)))
2307 && ! find_reg_note (i3, REG_UNUSED,
2308 SET_DEST (XVECEXP (newpat, 0, 0))))
2309 {
472fbdd1
RK
2310 rtx ni2dest;
2311
230d793d 2312 newi2pat = XVECEXP (newpat, 0, 0);
472fbdd1 2313 ni2dest = SET_DEST (XVECEXP (newpat, 0, 0));
230d793d
RS
2314 newpat = XVECEXP (newpat, 0, 1);
2315 SUBST (SET_SRC (newpat),
472fbdd1 2316 gen_lowpart_for_combine (GET_MODE (SET_SRC (newpat)), ni2dest));
8e2f6e35 2317 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
a29ca9db 2318
230d793d 2319 if (i2_code_number >= 0)
8e2f6e35 2320 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
5089e22e
RS
2321
2322 if (insn_code_number >= 0)
2323 {
2324 rtx insn;
2325 rtx link;
2326
2327 /* If we will be able to accept this, we have made a change to the
2328 destination of I3. This can invalidate a LOG_LINKS pointing
2329 to I3. No other part of combine.c makes such a transformation.
2330
2331 The new I3 will have a destination that was previously the
2332 destination of I1 or I2 and which was used in i2 or I3. Call
2333 distribute_links to make a LOG_LINK from the next use of
2334 that destination. */
2335
2336 PATTERN (i3) = newpat;
38a448ca 2337 distribute_links (gen_rtx_INSN_LIST (VOIDmode, i3, NULL_RTX));
5089e22e
RS
2338
2339 /* I3 now uses what used to be its destination and which is
2340 now I2's destination. That means we need a LOG_LINK from
2341 I3 to I2. But we used to have one, so we still will.
2342
2343 However, some later insn might be using I2's dest and have
2344 a LOG_LINK pointing at I3. We must remove this link.
2345 The simplest way to remove the link is to point it at I1,
2346 which we know will be a NOTE. */
2347
2348 for (insn = NEXT_INSN (i3);
0d4d42c3 2349 insn && (this_basic_block == n_basic_blocks - 1
3b413743 2350 || insn != BLOCK_HEAD (this_basic_block + 1));
5089e22e
RS
2351 insn = NEXT_INSN (insn))
2352 {
2c3c49de 2353 if (INSN_P (insn) && reg_referenced_p (ni2dest, PATTERN (insn)))
5089e22e
RS
2354 {
2355 for (link = LOG_LINKS (insn); link;
2356 link = XEXP (link, 1))
2357 if (XEXP (link, 0) == i3)
2358 XEXP (link, 0) = i1;
2359
2360 break;
2361 }
2362 }
2363 }
230d793d 2364 }
663522cb 2365
230d793d
RS
2366 /* Similarly, check for a case where we have a PARALLEL of two independent
2367 SETs but we started with three insns. In this case, we can do the sets
2368 as two separate insns. This case occurs when some SET allows two
2369 other insns to combine, but the destination of that SET is still live. */
2370
2371 else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
2372 && GET_CODE (newpat) == PARALLEL
2373 && XVECLEN (newpat, 0) == 2
2374 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
2375 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != ZERO_EXTRACT
2376 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != STRICT_LOW_PART
2377 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2378 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
2379 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
2380 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2381 INSN_CUID (i2))
2382 /* Don't pass sets with (USE (MEM ...)) dests to the following. */
2383 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != USE
2384 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != USE
2385 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 1)),
2386 XVECEXP (newpat, 0, 0))
2387 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 0)),
14a774a9
RK
2388 XVECEXP (newpat, 0, 1))
2389 && ! (contains_muldiv (SET_SRC (XVECEXP (newpat, 0, 0)))
2390 && contains_muldiv (SET_SRC (XVECEXP (newpat, 0, 1)))))
230d793d 2391 {
e9a25f70
JL
2392 /* Normally, it doesn't matter which of the two is done first,
2393 but it does if one references cc0. In that case, it has to
2394 be first. */
2395#ifdef HAVE_cc0
2396 if (reg_referenced_p (cc0_rtx, XVECEXP (newpat, 0, 0)))
2397 {
2398 newi2pat = XVECEXP (newpat, 0, 0);
2399 newpat = XVECEXP (newpat, 0, 1);
2400 }
2401 else
2402#endif
2403 {
2404 newi2pat = XVECEXP (newpat, 0, 1);
2405 newpat = XVECEXP (newpat, 0, 0);
2406 }
230d793d 2407
8e2f6e35 2408 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
a29ca9db 2409
230d793d 2410 if (i2_code_number >= 0)
8e2f6e35 2411 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
230d793d
RS
2412 }
2413
2414 /* If it still isn't recognized, fail and change things back the way they
2415 were. */
2416 if ((insn_code_number < 0
2417 /* Is the result a reasonable ASM_OPERANDS? */
2418 && (! check_asm_operands (newpat) || added_sets_1 || added_sets_2)))
2419 {
2420 undo_all ();
2421 return 0;
2422 }
2423
2424 /* If we had to change another insn, make sure it is valid also. */
2425 if (undobuf.other_insn)
2426 {
230d793d
RS
2427 rtx other_pat = PATTERN (undobuf.other_insn);
2428 rtx new_other_notes;
2429 rtx note, next;
2430
6e25d159
RK
2431 CLEAR_HARD_REG_SET (newpat_used_regs);
2432
8e2f6e35
BS
2433 other_code_number = recog_for_combine (&other_pat, undobuf.other_insn,
2434 &new_other_notes);
230d793d
RS
2435
2436 if (other_code_number < 0 && ! check_asm_operands (other_pat))
2437 {
2438 undo_all ();
2439 return 0;
2440 }
2441
2442 PATTERN (undobuf.other_insn) = other_pat;
2443
2444 /* If any of the notes in OTHER_INSN were REG_UNUSED, ensure that they
2445 are still valid. Then add any non-duplicate notes added by
2446 recog_for_combine. */
2447 for (note = REG_NOTES (undobuf.other_insn); note; note = next)
2448 {
2449 next = XEXP (note, 1);
2450
2451 if (REG_NOTE_KIND (note) == REG_UNUSED
2452 && ! reg_set_p (XEXP (note, 0), PATTERN (undobuf.other_insn)))
1a26b032
RK
2453 {
2454 if (GET_CODE (XEXP (note, 0)) == REG)
b1f21e0a 2455 REG_N_DEATHS (REGNO (XEXP (note, 0)))--;
1a26b032
RK
2456
2457 remove_note (undobuf.other_insn, note);
2458 }
230d793d
RS
2459 }
2460
1a26b032
RK
2461 for (note = new_other_notes; note; note = XEXP (note, 1))
2462 if (GET_CODE (XEXP (note, 0)) == REG)
b1f21e0a 2463 REG_N_DEATHS (REGNO (XEXP (note, 0)))++;
1a26b032 2464
230d793d 2465 distribute_notes (new_other_notes, undobuf.other_insn,
5f4f0e22 2466 undobuf.other_insn, NULL_RTX, NULL_RTX, NULL_RTX);
230d793d 2467 }
5ef17dd2 2468#ifdef HAVE_cc0
663522cb 2469 /* If I2 is the setter CC0 and I3 is the user CC0 then check whether
5ef17dd2
CC
2470 they are adjacent to each other or not. */
2471 {
2472 rtx p = prev_nonnote_insn (i3);
663522cb
KH
2473 if (p && p != i2 && GET_CODE (p) == INSN && newi2pat
2474 && sets_cc0_p (newi2pat))
5ef17dd2 2475 {
663522cb
KH
2476 undo_all ();
2477 return 0;
5ef17dd2 2478 }
663522cb
KH
2479 }
2480#endif
230d793d 2481
663522cb 2482 /* We now know that we can do this combination. Merge the insns and
230d793d
RS
2483 update the status of registers and LOG_LINKS. */
2484
2485 {
2486 rtx i3notes, i2notes, i1notes = 0;
2487 rtx i3links, i2links, i1links = 0;
2488 rtx midnotes = 0;
770ae6cc 2489 unsigned int regno;
ff3467a9
JW
2490 /* Compute which registers we expect to eliminate. newi2pat may be setting
2491 either i3dest or i2dest, so we must check it. Also, i1dest may be the
2492 same as i3dest, in which case newi2pat may be setting i1dest. */
2493 rtx elim_i2 = ((newi2pat && reg_set_p (i2dest, newi2pat))
2494 || i2dest_in_i2src || i2dest_in_i1src
230d793d 2495 ? 0 : i2dest);
ff3467a9
JW
2496 rtx elim_i1 = (i1 == 0 || i1dest_in_i1src
2497 || (newi2pat && reg_set_p (i1dest, newi2pat))
2498 ? 0 : i1dest);
230d793d
RS
2499
2500 /* Get the old REG_NOTES and LOG_LINKS from all our insns and
2501 clear them. */
2502 i3notes = REG_NOTES (i3), i3links = LOG_LINKS (i3);
2503 i2notes = REG_NOTES (i2), i2links = LOG_LINKS (i2);
2504 if (i1)
2505 i1notes = REG_NOTES (i1), i1links = LOG_LINKS (i1);
2506
2507 /* Ensure that we do not have something that should not be shared but
2508 occurs multiple times in the new insns. Check this by first
5089e22e 2509 resetting all the `used' flags and then copying anything is shared. */
230d793d
RS
2510
2511 reset_used_flags (i3notes);
2512 reset_used_flags (i2notes);
2513 reset_used_flags (i1notes);
2514 reset_used_flags (newpat);
2515 reset_used_flags (newi2pat);
2516 if (undobuf.other_insn)
2517 reset_used_flags (PATTERN (undobuf.other_insn));
2518
2519 i3notes = copy_rtx_if_shared (i3notes);
2520 i2notes = copy_rtx_if_shared (i2notes);
2521 i1notes = copy_rtx_if_shared (i1notes);
2522 newpat = copy_rtx_if_shared (newpat);
2523 newi2pat = copy_rtx_if_shared (newi2pat);
2524 if (undobuf.other_insn)
2525 reset_used_flags (PATTERN (undobuf.other_insn));
2526
2527 INSN_CODE (i3) = insn_code_number;
2528 PATTERN (i3) = newpat;
2529 if (undobuf.other_insn)
2530 INSN_CODE (undobuf.other_insn) = other_code_number;
2531
2532 /* We had one special case above where I2 had more than one set and
2533 we replaced a destination of one of those sets with the destination
2534 of I3. In that case, we have to update LOG_LINKS of insns later
176c9e6b
JW
2535 in this basic block. Note that this (expensive) case is rare.
2536
2537 Also, in this case, we must pretend that all REG_NOTEs for I2
2538 actually came from I3, so that REG_UNUSED notes from I2 will be
2539 properly handled. */
2540
c7be4f66 2541 if (i3_subst_into_i2)
176c9e6b 2542 {
1786009e 2543 for (i = 0; i < XVECLEN (PATTERN (i2), 0); i++)
95ac07b0
AO
2544 if (GET_CODE (XVECEXP (PATTERN (i2), 0, i)) != USE
2545 && GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, i))) == REG
1786009e
ZW
2546 && SET_DEST (XVECEXP (PATTERN (i2), 0, i)) != i2dest
2547 && ! find_reg_note (i2, REG_UNUSED,
2548 SET_DEST (XVECEXP (PATTERN (i2), 0, i))))
2549 for (temp = NEXT_INSN (i2);
2550 temp && (this_basic_block == n_basic_blocks - 1
2551 || BLOCK_HEAD (this_basic_block) != temp);
2552 temp = NEXT_INSN (temp))
2553 if (temp != i3 && INSN_P (temp))
2554 for (link = LOG_LINKS (temp); link; link = XEXP (link, 1))
2555 if (XEXP (link, 0) == i2)
2556 XEXP (link, 0) = i3;
176c9e6b
JW
2557
2558 if (i3notes)
2559 {
2560 rtx link = i3notes;
2561 while (XEXP (link, 1))
2562 link = XEXP (link, 1);
2563 XEXP (link, 1) = i2notes;
2564 }
2565 else
2566 i3notes = i2notes;
2567 i2notes = 0;
2568 }
230d793d
RS
2569
2570 LOG_LINKS (i3) = 0;
2571 REG_NOTES (i3) = 0;
2572 LOG_LINKS (i2) = 0;
2573 REG_NOTES (i2) = 0;
2574
2575 if (newi2pat)
2576 {
2577 INSN_CODE (i2) = i2_code_number;
2578 PATTERN (i2) = newi2pat;
2579 }
2580 else
2581 {
2582 PUT_CODE (i2, NOTE);
2583 NOTE_LINE_NUMBER (i2) = NOTE_INSN_DELETED;
2584 NOTE_SOURCE_FILE (i2) = 0;
2585 }
2586
2587 if (i1)
2588 {
2589 LOG_LINKS (i1) = 0;
2590 REG_NOTES (i1) = 0;
2591 PUT_CODE (i1, NOTE);
2592 NOTE_LINE_NUMBER (i1) = NOTE_INSN_DELETED;
2593 NOTE_SOURCE_FILE (i1) = 0;
2594 }
2595
2596 /* Get death notes for everything that is now used in either I3 or
663522cb 2597 I2 and used to die in a previous insn. If we built two new
6eb12cef
RK
2598 patterns, move from I1 to I2 then I2 to I3 so that we get the
2599 proper movement on registers that I2 modifies. */
230d793d 2600
230d793d 2601 if (newi2pat)
6eb12cef
RK
2602 {
2603 move_deaths (newi2pat, NULL_RTX, INSN_CUID (i1), i2, &midnotes);
2604 move_deaths (newpat, newi2pat, INSN_CUID (i1), i3, &midnotes);
2605 }
2606 else
2607 move_deaths (newpat, NULL_RTX, i1 ? INSN_CUID (i1) : INSN_CUID (i2),
2608 i3, &midnotes);
230d793d
RS
2609
2610 /* Distribute all the LOG_LINKS and REG_NOTES from I1, I2, and I3. */
2611 if (i3notes)
5f4f0e22
CH
2612 distribute_notes (i3notes, i3, i3, newi2pat ? i2 : NULL_RTX,
2613 elim_i2, elim_i1);
230d793d 2614 if (i2notes)
5f4f0e22
CH
2615 distribute_notes (i2notes, i2, i3, newi2pat ? i2 : NULL_RTX,
2616 elim_i2, elim_i1);
230d793d 2617 if (i1notes)
5f4f0e22
CH
2618 distribute_notes (i1notes, i1, i3, newi2pat ? i2 : NULL_RTX,
2619 elim_i2, elim_i1);
230d793d 2620 if (midnotes)
5f4f0e22
CH
2621 distribute_notes (midnotes, NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2622 elim_i2, elim_i1);
230d793d
RS
2623
2624 /* Distribute any notes added to I2 or I3 by recog_for_combine. We
2625 know these are REG_UNUSED and want them to go to the desired insn,
663522cb 2626 so we always pass it as i3. We have not counted the notes in
1a26b032
RK
2627 reg_n_deaths yet, so we need to do so now. */
2628
230d793d 2629 if (newi2pat && new_i2_notes)
1a26b032
RK
2630 {
2631 for (temp = new_i2_notes; temp; temp = XEXP (temp, 1))
2632 if (GET_CODE (XEXP (temp, 0)) == REG)
b1f21e0a 2633 REG_N_DEATHS (REGNO (XEXP (temp, 0)))++;
663522cb 2634
1a26b032
RK
2635 distribute_notes (new_i2_notes, i2, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2636 }
2637
230d793d 2638 if (new_i3_notes)
1a26b032
RK
2639 {
2640 for (temp = new_i3_notes; temp; temp = XEXP (temp, 1))
2641 if (GET_CODE (XEXP (temp, 0)) == REG)
b1f21e0a 2642 REG_N_DEATHS (REGNO (XEXP (temp, 0)))++;
663522cb 2643
1a26b032
RK
2644 distribute_notes (new_i3_notes, i3, i3, NULL_RTX, NULL_RTX, NULL_RTX);
2645 }
230d793d
RS
2646
2647 /* If I3DEST was used in I3SRC, it really died in I3. We may need to
e9a25f70
JL
2648 put a REG_DEAD note for it somewhere. If NEWI2PAT exists and sets
2649 I3DEST, the death must be somewhere before I2, not I3. If we passed I3
2650 in that case, it might delete I2. Similarly for I2 and I1.
1a26b032
RK
2651 Show an additional death due to the REG_DEAD note we make here. If
2652 we discard it in distribute_notes, we will decrement it again. */
d0ab8cd3 2653
230d793d 2654 if (i3dest_killed)
1a26b032
RK
2655 {
2656 if (GET_CODE (i3dest_killed) == REG)
b1f21e0a 2657 REG_N_DEATHS (REGNO (i3dest_killed))++;
1a26b032 2658
e9a25f70 2659 if (newi2pat && reg_set_p (i3dest_killed, newi2pat))
38a448ca
RH
2660 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i3dest_killed,
2661 NULL_RTX),
ff3467a9 2662 NULL_RTX, i2, NULL_RTX, elim_i2, elim_i1);
e9a25f70 2663 else
38a448ca
RH
2664 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i3dest_killed,
2665 NULL_RTX),
e9a25f70 2666 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
ff3467a9 2667 elim_i2, elim_i1);
1a26b032 2668 }
58c8c593 2669
230d793d 2670 if (i2dest_in_i2src)
58c8c593 2671 {
1a26b032 2672 if (GET_CODE (i2dest) == REG)
b1f21e0a 2673 REG_N_DEATHS (REGNO (i2dest))++;
1a26b032 2674
58c8c593 2675 if (newi2pat && reg_set_p (i2dest, newi2pat))
38a448ca 2676 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i2dest, NULL_RTX),
58c8c593
RK
2677 NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2678 else
38a448ca 2679 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i2dest, NULL_RTX),
58c8c593
RK
2680 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2681 NULL_RTX, NULL_RTX);
2682 }
2683
230d793d 2684 if (i1dest_in_i1src)
58c8c593 2685 {
1a26b032 2686 if (GET_CODE (i1dest) == REG)
b1f21e0a 2687 REG_N_DEATHS (REGNO (i1dest))++;
1a26b032 2688
58c8c593 2689 if (newi2pat && reg_set_p (i1dest, newi2pat))
38a448ca 2690 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i1dest, NULL_RTX),
58c8c593
RK
2691 NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2692 else
38a448ca 2693 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i1dest, NULL_RTX),
58c8c593
RK
2694 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2695 NULL_RTX, NULL_RTX);
2696 }
230d793d
RS
2697
2698 distribute_links (i3links);
2699 distribute_links (i2links);
2700 distribute_links (i1links);
2701
2702 if (GET_CODE (i2dest) == REG)
2703 {
d0ab8cd3
RK
2704 rtx link;
2705 rtx i2_insn = 0, i2_val = 0, set;
2706
2707 /* The insn that used to set this register doesn't exist, and
2708 this life of the register may not exist either. See if one of
663522cb 2709 I3's links points to an insn that sets I2DEST. If it does,
d0ab8cd3
RK
2710 that is now the last known value for I2DEST. If we don't update
2711 this and I2 set the register to a value that depended on its old
230d793d
RS
2712 contents, we will get confused. If this insn is used, thing
2713 will be set correctly in combine_instructions. */
d0ab8cd3
RK
2714
2715 for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2716 if ((set = single_set (XEXP (link, 0))) != 0
2717 && rtx_equal_p (i2dest, SET_DEST (set)))
2718 i2_insn = XEXP (link, 0), i2_val = SET_SRC (set);
2719
2720 record_value_for_reg (i2dest, i2_insn, i2_val);
230d793d
RS
2721
2722 /* If the reg formerly set in I2 died only once and that was in I3,
2723 zero its use count so it won't make `reload' do any work. */
538fe8cd
ILT
2724 if (! added_sets_2
2725 && (newi2pat == 0 || ! reg_mentioned_p (i2dest, newi2pat))
2726 && ! i2dest_in_i2src)
230d793d
RS
2727 {
2728 regno = REGNO (i2dest);
b1f21e0a 2729 REG_N_SETS (regno)--;
230d793d
RS
2730 }
2731 }
2732
2733 if (i1 && GET_CODE (i1dest) == REG)
2734 {
d0ab8cd3
RK
2735 rtx link;
2736 rtx i1_insn = 0, i1_val = 0, set;
2737
2738 for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2739 if ((set = single_set (XEXP (link, 0))) != 0
2740 && rtx_equal_p (i1dest, SET_DEST (set)))
2741 i1_insn = XEXP (link, 0), i1_val = SET_SRC (set);
2742
2743 record_value_for_reg (i1dest, i1_insn, i1_val);
2744
230d793d 2745 regno = REGNO (i1dest);
5af91171 2746 if (! added_sets_1 && ! i1dest_in_i1src)
770ae6cc 2747 REG_N_SETS (regno)--;
230d793d
RS
2748 }
2749
951553af 2750 /* Update reg_nonzero_bits et al for any changes that may have been made
663522cb 2751 to this insn. The order of set_nonzero_bits_and_sign_copies() is
5fb7c247 2752 important. Because newi2pat can affect nonzero_bits of newpat */
22609cbf 2753 if (newi2pat)
84832317 2754 note_stores (newi2pat, set_nonzero_bits_and_sign_copies, NULL);
5fb7c247 2755 note_stores (newpat, set_nonzero_bits_and_sign_copies, NULL);
22609cbf 2756
44a76fc8
AG
2757 /* Set new_direct_jump_p if a new return or simple jump instruction
2758 has been created.
2759
663522cb 2760 If I3 is now an unconditional jump, ensure that it has a
230d793d 2761 BARRIER following it since it may have initially been a
381ee8af 2762 conditional jump. It may also be the last nonnote insn. */
663522cb 2763
7f1c097d 2764 if (GET_CODE (newpat) == RETURN || any_uncondjump_p (i3))
44a76fc8
AG
2765 {
2766 *new_direct_jump_p = 1;
230d793d 2767
44a76fc8
AG
2768 if ((temp = next_nonnote_insn (i3)) == NULL_RTX
2769 || GET_CODE (temp) != BARRIER)
2770 emit_barrier_after (i3);
2771 }
230d793d
RS
2772 }
2773
2774 combine_successes++;
e7749837 2775 undo_commit ();
230d793d 2776
bcd49eb7
JW
2777 /* Clear this here, so that subsequent get_last_value calls are not
2778 affected. */
2779 subst_prev_insn = NULL_RTX;
2780
abe6e52f
RK
2781 if (added_links_insn
2782 && (newi2pat == 0 || INSN_CUID (added_links_insn) < INSN_CUID (i2))
2783 && INSN_CUID (added_links_insn) < INSN_CUID (i3))
2784 return added_links_insn;
2785 else
2786 return newi2pat ? i2 : i3;
230d793d
RS
2787}
2788\f
2789/* Undo all the modifications recorded in undobuf. */
2790
2791static void
2792undo_all ()
2793{
241cea85
RK
2794 struct undo *undo, *next;
2795
2796 for (undo = undobuf.undos; undo; undo = next)
7c046e4e 2797 {
241cea85
RK
2798 next = undo->next;
2799 if (undo->is_int)
2800 *undo->where.i = undo->old_contents.i;
7c046e4e 2801 else
241cea85
RK
2802 *undo->where.r = undo->old_contents.r;
2803
2804 undo->next = undobuf.frees;
2805 undobuf.frees = undo;
7c046e4e 2806 }
230d793d 2807
f1c6ba8b 2808 undobuf.undos = 0;
bcd49eb7
JW
2809
2810 /* Clear this here, so that subsequent get_last_value calls are not
2811 affected. */
2812 subst_prev_insn = NULL_RTX;
230d793d 2813}
e7749837
RH
2814
2815/* We've committed to accepting the changes we made. Move all
2816 of the undos to the free list. */
2817
2818static void
2819undo_commit ()
2820{
2821 struct undo *undo, *next;
2822
2823 for (undo = undobuf.undos; undo; undo = next)
2824 {
2825 next = undo->next;
2826 undo->next = undobuf.frees;
2827 undobuf.frees = undo;
2828 }
f1c6ba8b 2829 undobuf.undos = 0;
e7749837
RH
2830}
2831
230d793d
RS
2832\f
2833/* Find the innermost point within the rtx at LOC, possibly LOC itself,
d0ab8cd3
RK
2834 where we have an arithmetic expression and return that point. LOC will
2835 be inside INSN.
230d793d
RS
2836
2837 try_combine will call this function to see if an insn can be split into
2838 two insns. */
2839
2840static rtx *
d0ab8cd3 2841find_split_point (loc, insn)
230d793d 2842 rtx *loc;
d0ab8cd3 2843 rtx insn;
230d793d
RS
2844{
2845 rtx x = *loc;
2846 enum rtx_code code = GET_CODE (x);
2847 rtx *split;
770ae6cc
RK
2848 unsigned HOST_WIDE_INT len = 0;
2849 HOST_WIDE_INT pos = 0;
2850 int unsignedp = 0;
6a651371 2851 rtx inner = NULL_RTX;
230d793d
RS
2852
2853 /* First special-case some codes. */
2854 switch (code)
2855 {
2856 case SUBREG:
2857#ifdef INSN_SCHEDULING
2858 /* If we are making a paradoxical SUBREG invalid, it becomes a split
2859 point. */
2860 if (GET_CODE (SUBREG_REG (x)) == MEM)
2861 return loc;
2862#endif
d0ab8cd3 2863 return find_split_point (&SUBREG_REG (x), insn);
230d793d 2864
230d793d 2865 case MEM:
916f14f1 2866#ifdef HAVE_lo_sum
230d793d
RS
2867 /* If we have (mem (const ..)) or (mem (symbol_ref ...)), split it
2868 using LO_SUM and HIGH. */
2869 if (GET_CODE (XEXP (x, 0)) == CONST
2870 || GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
2871 {
2872 SUBST (XEXP (x, 0),
f1c6ba8b
RK
2873 gen_rtx_LO_SUM (Pmode,
2874 gen_rtx_HIGH (Pmode, XEXP (x, 0)),
2875 XEXP (x, 0)));
230d793d
RS
2876 return &XEXP (XEXP (x, 0), 0);
2877 }
230d793d
RS
2878#endif
2879
916f14f1
RK
2880 /* If we have a PLUS whose second operand is a constant and the
2881 address is not valid, perhaps will can split it up using
2882 the machine-specific way to split large constants. We use
ddd5a7c1 2883 the first pseudo-reg (one of the virtual regs) as a placeholder;
916f14f1
RK
2884 it will not remain in the result. */
2885 if (GET_CODE (XEXP (x, 0)) == PLUS
2886 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2887 && ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
2888 {
2889 rtx reg = regno_reg_rtx[FIRST_PSEUDO_REGISTER];
38a448ca 2890 rtx seq = split_insns (gen_rtx_SET (VOIDmode, reg, XEXP (x, 0)),
916f14f1
RK
2891 subst_insn);
2892
2893 /* This should have produced two insns, each of which sets our
2894 placeholder. If the source of the second is a valid address,
2895 we can make put both sources together and make a split point
2896 in the middle. */
2897
2898 if (seq && XVECLEN (seq, 0) == 2
2899 && GET_CODE (XVECEXP (seq, 0, 0)) == INSN
2900 && GET_CODE (PATTERN (XVECEXP (seq, 0, 0))) == SET
2901 && SET_DEST (PATTERN (XVECEXP (seq, 0, 0))) == reg
2902 && ! reg_mentioned_p (reg,
2903 SET_SRC (PATTERN (XVECEXP (seq, 0, 0))))
2904 && GET_CODE (XVECEXP (seq, 0, 1)) == INSN
2905 && GET_CODE (PATTERN (XVECEXP (seq, 0, 1))) == SET
2906 && SET_DEST (PATTERN (XVECEXP (seq, 0, 1))) == reg
2907 && memory_address_p (GET_MODE (x),
2908 SET_SRC (PATTERN (XVECEXP (seq, 0, 1)))))
2909 {
2910 rtx src1 = SET_SRC (PATTERN (XVECEXP (seq, 0, 0)));
2911 rtx src2 = SET_SRC (PATTERN (XVECEXP (seq, 0, 1)));
2912
2913 /* Replace the placeholder in SRC2 with SRC1. If we can
2914 find where in SRC2 it was placed, that can become our
2915 split point and we can replace this address with SRC2.
2916 Just try two obvious places. */
2917
2918 src2 = replace_rtx (src2, reg, src1);
2919 split = 0;
2920 if (XEXP (src2, 0) == src1)
2921 split = &XEXP (src2, 0);
2922 else if (GET_RTX_FORMAT (GET_CODE (XEXP (src2, 0)))[0] == 'e'
2923 && XEXP (XEXP (src2, 0), 0) == src1)
2924 split = &XEXP (XEXP (src2, 0), 0);
2925
2926 if (split)
2927 {
2928 SUBST (XEXP (x, 0), src2);
2929 return split;
2930 }
2931 }
663522cb 2932
1a26b032
RK
2933 /* If that didn't work, perhaps the first operand is complex and
2934 needs to be computed separately, so make a split point there.
2935 This will occur on machines that just support REG + CONST
2936 and have a constant moved through some previous computation. */
2937
2938 else if (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (x, 0), 0))) != 'o'
2939 && ! (GET_CODE (XEXP (XEXP (x, 0), 0)) == SUBREG
2940 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (XEXP (x, 0), 0))))
2941 == 'o')))
2942 return &XEXP (XEXP (x, 0), 0);
916f14f1
RK
2943 }
2944 break;
2945
230d793d
RS
2946 case SET:
2947#ifdef HAVE_cc0
2948 /* If SET_DEST is CC0 and SET_SRC is not an operand, a COMPARE, or a
2949 ZERO_EXTRACT, the most likely reason why this doesn't match is that
2950 we need to put the operand into a register. So split at that
2951 point. */
2952
2953 if (SET_DEST (x) == cc0_rtx
2954 && GET_CODE (SET_SRC (x)) != COMPARE
2955 && GET_CODE (SET_SRC (x)) != ZERO_EXTRACT
2956 && GET_RTX_CLASS (GET_CODE (SET_SRC (x))) != 'o'
2957 && ! (GET_CODE (SET_SRC (x)) == SUBREG
2958 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (SET_SRC (x)))) == 'o'))
2959 return &SET_SRC (x);
2960#endif
2961
2962 /* See if we can split SET_SRC as it stands. */
d0ab8cd3 2963 split = find_split_point (&SET_SRC (x), insn);
230d793d
RS
2964 if (split && split != &SET_SRC (x))
2965 return split;
2966
041d7180
JL
2967 /* See if we can split SET_DEST as it stands. */
2968 split = find_split_point (&SET_DEST (x), insn);
2969 if (split && split != &SET_DEST (x))
2970 return split;
2971
230d793d
RS
2972 /* See if this is a bitfield assignment with everything constant. If
2973 so, this is an IOR of an AND, so split it into that. */
2974 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2975 && (GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
5f4f0e22 2976 <= HOST_BITS_PER_WIDE_INT)
230d793d
RS
2977 && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT
2978 && GET_CODE (XEXP (SET_DEST (x), 2)) == CONST_INT
2979 && GET_CODE (SET_SRC (x)) == CONST_INT
2980 && ((INTVAL (XEXP (SET_DEST (x), 1))
2981 + INTVAL (XEXP (SET_DEST (x), 2)))
2982 <= GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0))))
2983 && ! side_effects_p (XEXP (SET_DEST (x), 0)))
2984 {
770ae6cc
RK
2985 HOST_WIDE_INT pos = INTVAL (XEXP (SET_DEST (x), 2));
2986 unsigned HOST_WIDE_INT len = INTVAL (XEXP (SET_DEST (x), 1));
2987 unsigned HOST_WIDE_INT src = INTVAL (SET_SRC (x));
230d793d
RS
2988 rtx dest = XEXP (SET_DEST (x), 0);
2989 enum machine_mode mode = GET_MODE (dest);
5f4f0e22 2990 unsigned HOST_WIDE_INT mask = ((HOST_WIDE_INT) 1 << len) - 1;
230d793d 2991
f76b9db2
ILT
2992 if (BITS_BIG_ENDIAN)
2993 pos = GET_MODE_BITSIZE (mode) - len - pos;
230d793d 2994
770ae6cc 2995 if (src == mask)
230d793d 2996 SUBST (SET_SRC (x),
5f4f0e22 2997 gen_binary (IOR, mode, dest, GEN_INT (src << pos)));
230d793d
RS
2998 else
2999 SUBST (SET_SRC (x),
3000 gen_binary (IOR, mode,
663522cb
KH
3001 gen_binary (AND, mode, dest,
3002 GEN_INT (~(mask << pos)
5f4f0e22
CH
3003 & GET_MODE_MASK (mode))),
3004 GEN_INT (src << pos)));
230d793d
RS
3005
3006 SUBST (SET_DEST (x), dest);
3007
d0ab8cd3 3008 split = find_split_point (&SET_SRC (x), insn);
230d793d
RS
3009 if (split && split != &SET_SRC (x))
3010 return split;
3011 }
3012
3013 /* Otherwise, see if this is an operation that we can split into two.
3014 If so, try to split that. */
3015 code = GET_CODE (SET_SRC (x));
3016
3017 switch (code)
3018 {
d0ab8cd3
RK
3019 case AND:
3020 /* If we are AND'ing with a large constant that is only a single
3021 bit and the result is only being used in a context where we
3022 need to know if it is zero or non-zero, replace it with a bit
3023 extraction. This will avoid the large constant, which might
3024 have taken more than one insn to make. If the constant were
3025 not a valid argument to the AND but took only one insn to make,
3026 this is no worse, but if it took more than one insn, it will
3027 be better. */
3028
3029 if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
3030 && GET_CODE (XEXP (SET_SRC (x), 0)) == REG
3031 && (pos = exact_log2 (INTVAL (XEXP (SET_SRC (x), 1)))) >= 7
3032 && GET_CODE (SET_DEST (x)) == REG
6496a589 3033 && (split = find_single_use (SET_DEST (x), insn, (rtx*)0)) != 0
d0ab8cd3
RK
3034 && (GET_CODE (*split) == EQ || GET_CODE (*split) == NE)
3035 && XEXP (*split, 0) == SET_DEST (x)
3036 && XEXP (*split, 1) == const0_rtx)
3037 {
76184def
DE
3038 rtx extraction = make_extraction (GET_MODE (SET_DEST (x)),
3039 XEXP (SET_SRC (x), 0),
3040 pos, NULL_RTX, 1, 1, 0, 0);
3041 if (extraction != 0)
3042 {
3043 SUBST (SET_SRC (x), extraction);
3044 return find_split_point (loc, insn);
3045 }
d0ab8cd3
RK
3046 }
3047 break;
3048
1a6ec070
RK
3049 case NE:
3050 /* if STORE_FLAG_VALUE is -1, this is (NE X 0) and only one bit of X
3051 is known to be on, this can be converted into a NEG of a shift. */
3052 if (STORE_FLAG_VALUE == -1 && XEXP (SET_SRC (x), 1) == const0_rtx
3053 && GET_MODE (SET_SRC (x)) == GET_MODE (XEXP (SET_SRC (x), 0))
4eb2cb10 3054 && 1 <= (pos = exact_log2
1a6ec070
RK
3055 (nonzero_bits (XEXP (SET_SRC (x), 0),
3056 GET_MODE (XEXP (SET_SRC (x), 0))))))
3057 {
3058 enum machine_mode mode = GET_MODE (XEXP (SET_SRC (x), 0));
3059
3060 SUBST (SET_SRC (x),
f1c6ba8b
RK
3061 gen_rtx_NEG (mode,
3062 gen_rtx_LSHIFTRT (mode,
3063 XEXP (SET_SRC (x), 0),
3064 GEN_INT (pos))));
1a6ec070
RK
3065
3066 split = find_split_point (&SET_SRC (x), insn);
3067 if (split && split != &SET_SRC (x))
3068 return split;
3069 }
3070 break;
3071
230d793d
RS
3072 case SIGN_EXTEND:
3073 inner = XEXP (SET_SRC (x), 0);
101c1a3d
JL
3074
3075 /* We can't optimize if either mode is a partial integer
3076 mode as we don't know how many bits are significant
3077 in those modes. */
3078 if (GET_MODE_CLASS (GET_MODE (inner)) == MODE_PARTIAL_INT
3079 || GET_MODE_CLASS (GET_MODE (SET_SRC (x))) == MODE_PARTIAL_INT)
3080 break;
3081
230d793d
RS
3082 pos = 0;
3083 len = GET_MODE_BITSIZE (GET_MODE (inner));
3084 unsignedp = 0;
3085 break;
3086
3087 case SIGN_EXTRACT:
3088 case ZERO_EXTRACT:
3089 if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
3090 && GET_CODE (XEXP (SET_SRC (x), 2)) == CONST_INT)
3091 {
3092 inner = XEXP (SET_SRC (x), 0);
3093 len = INTVAL (XEXP (SET_SRC (x), 1));
3094 pos = INTVAL (XEXP (SET_SRC (x), 2));
3095
f76b9db2
ILT
3096 if (BITS_BIG_ENDIAN)
3097 pos = GET_MODE_BITSIZE (GET_MODE (inner)) - len - pos;
230d793d
RS
3098 unsignedp = (code == ZERO_EXTRACT);
3099 }
3100 break;
e9a25f70
JL
3101
3102 default:
3103 break;
230d793d
RS
3104 }
3105
3106 if (len && pos >= 0 && pos + len <= GET_MODE_BITSIZE (GET_MODE (inner)))
3107 {
3108 enum machine_mode mode = GET_MODE (SET_SRC (x));
3109
d0ab8cd3
RK
3110 /* For unsigned, we have a choice of a shift followed by an
3111 AND or two shifts. Use two shifts for field sizes where the
3112 constant might be too large. We assume here that we can
3113 always at least get 8-bit constants in an AND insn, which is
3114 true for every current RISC. */
3115
3116 if (unsignedp && len <= 8)
230d793d
RS
3117 {
3118 SUBST (SET_SRC (x),
f1c6ba8b
RK
3119 gen_rtx_AND (mode,
3120 gen_rtx_LSHIFTRT
3121 (mode, gen_lowpart_for_combine (mode, inner),
3122 GEN_INT (pos)),
3123 GEN_INT (((HOST_WIDE_INT) 1 << len) - 1)));
230d793d 3124
d0ab8cd3 3125 split = find_split_point (&SET_SRC (x), insn);
230d793d
RS
3126 if (split && split != &SET_SRC (x))
3127 return split;
3128 }
3129 else
3130 {
3131 SUBST (SET_SRC (x),
f1c6ba8b 3132 gen_rtx_fmt_ee
d0ab8cd3 3133 (unsignedp ? LSHIFTRT : ASHIFTRT, mode,
f1c6ba8b
RK
3134 gen_rtx_ASHIFT (mode,
3135 gen_lowpart_for_combine (mode, inner),
3136 GEN_INT (GET_MODE_BITSIZE (mode)
3137 - len - pos)),
5f4f0e22 3138 GEN_INT (GET_MODE_BITSIZE (mode) - len)));
230d793d 3139
d0ab8cd3 3140 split = find_split_point (&SET_SRC (x), insn);
230d793d
RS
3141 if (split && split != &SET_SRC (x))
3142 return split;
3143 }
3144 }
3145
3146 /* See if this is a simple operation with a constant as the second
3147 operand. It might be that this constant is out of range and hence
3148 could be used as a split point. */
3149 if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
3150 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
3151 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<')
3152 && CONSTANT_P (XEXP (SET_SRC (x), 1))
3153 && (GET_RTX_CLASS (GET_CODE (XEXP (SET_SRC (x), 0))) == 'o'
3154 || (GET_CODE (XEXP (SET_SRC (x), 0)) == SUBREG
3155 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (SET_SRC (x), 0))))
3156 == 'o'))))
3157 return &XEXP (SET_SRC (x), 1);
3158
3159 /* Finally, see if this is a simple operation with its first operand
3160 not in a register. The operation might require this operand in a
3161 register, so return it as a split point. We can always do this
3162 because if the first operand were another operation, we would have
3163 already found it as a split point. */
3164 if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
3165 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
3166 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<'
3167 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '1')
3168 && ! register_operand (XEXP (SET_SRC (x), 0), VOIDmode))
3169 return &XEXP (SET_SRC (x), 0);
3170
3171 return 0;
3172
3173 case AND:
3174 case IOR:
3175 /* We write NOR as (and (not A) (not B)), but if we don't have a NOR,
3176 it is better to write this as (not (ior A B)) so we can split it.
3177 Similarly for IOR. */
3178 if (GET_CODE (XEXP (x, 0)) == NOT && GET_CODE (XEXP (x, 1)) == NOT)
3179 {
3180 SUBST (*loc,
f1c6ba8b
RK
3181 gen_rtx_NOT (GET_MODE (x),
3182 gen_rtx_fmt_ee (code == IOR ? AND : IOR,
3183 GET_MODE (x),
3184 XEXP (XEXP (x, 0), 0),
3185 XEXP (XEXP (x, 1), 0))));
d0ab8cd3 3186 return find_split_point (loc, insn);
230d793d
RS
3187 }
3188
3189 /* Many RISC machines have a large set of logical insns. If the
3190 second operand is a NOT, put it first so we will try to split the
3191 other operand first. */
3192 if (GET_CODE (XEXP (x, 1)) == NOT)
3193 {
3194 rtx tem = XEXP (x, 0);
3195 SUBST (XEXP (x, 0), XEXP (x, 1));
3196 SUBST (XEXP (x, 1), tem);
3197 }
3198 break;
e9a25f70
JL
3199
3200 default:
3201 break;
230d793d
RS
3202 }
3203
3204 /* Otherwise, select our actions depending on our rtx class. */
3205 switch (GET_RTX_CLASS (code))
3206 {
3207 case 'b': /* This is ZERO_EXTRACT and SIGN_EXTRACT. */
3208 case '3':
d0ab8cd3 3209 split = find_split_point (&XEXP (x, 2), insn);
230d793d
RS
3210 if (split)
3211 return split;
0f41302f 3212 /* ... fall through ... */
230d793d
RS
3213 case '2':
3214 case 'c':
3215 case '<':
d0ab8cd3 3216 split = find_split_point (&XEXP (x, 1), insn);
230d793d
RS
3217 if (split)
3218 return split;
0f41302f 3219 /* ... fall through ... */
230d793d
RS
3220 case '1':
3221 /* Some machines have (and (shift ...) ...) insns. If X is not
3222 an AND, but XEXP (X, 0) is, use it as our split point. */
3223 if (GET_CODE (x) != AND && GET_CODE (XEXP (x, 0)) == AND)
3224 return &XEXP (x, 0);
3225
d0ab8cd3 3226 split = find_split_point (&XEXP (x, 0), insn);
230d793d
RS
3227 if (split)
3228 return split;
3229 return loc;
3230 }
3231
3232 /* Otherwise, we don't have a split point. */
3233 return 0;
3234}
3235\f
3236/* Throughout X, replace FROM with TO, and return the result.
3237 The result is TO if X is FROM;
3238 otherwise the result is X, but its contents may have been modified.
3239 If they were modified, a record was made in undobuf so that
3240 undo_all will (among other things) return X to its original state.
3241
3242 If the number of changes necessary is too much to record to undo,
3243 the excess changes are not made, so the result is invalid.
3244 The changes already made can still be undone.
3245 undobuf.num_undo is incremented for such changes, so by testing that
3246 the caller can tell whether the result is valid.
3247
3248 `n_occurrences' is incremented each time FROM is replaced.
663522cb 3249
230d793d
RS
3250 IN_DEST is non-zero if we are processing the SET_DEST of a SET.
3251
5089e22e 3252 UNIQUE_COPY is non-zero if each substitution must be unique. We do this
230d793d
RS
3253 by copying if `n_occurrences' is non-zero. */
3254
3255static rtx
3256subst (x, from, to, in_dest, unique_copy)
3257 register rtx x, from, to;
3258 int in_dest;
3259 int unique_copy;
3260{
f24ad0e4 3261 register enum rtx_code code = GET_CODE (x);
230d793d 3262 enum machine_mode op0_mode = VOIDmode;
6f7d635c 3263 register const char *fmt;
8079805d
RK
3264 register int len, i;
3265 rtx new;
230d793d
RS
3266
3267/* Two expressions are equal if they are identical copies of a shared
3268 RTX or if they are both registers with the same register number
3269 and mode. */
3270
3271#define COMBINE_RTX_EQUAL_P(X,Y) \
3272 ((X) == (Y) \
3273 || (GET_CODE (X) == REG && GET_CODE (Y) == REG \
3274 && REGNO (X) == REGNO (Y) && GET_MODE (X) == GET_MODE (Y)))
3275
3276 if (! in_dest && COMBINE_RTX_EQUAL_P (x, from))
3277 {
3278 n_occurrences++;
3279 return (unique_copy && n_occurrences > 1 ? copy_rtx (to) : to);
3280 }
3281
3282 /* If X and FROM are the same register but different modes, they will
663522cb 3283 not have been seen as equal above. However, flow.c will make a
230d793d
RS
3284 LOG_LINKS entry for that case. If we do nothing, we will try to
3285 rerecognize our original insn and, when it succeeds, we will
3286 delete the feeding insn, which is incorrect.
3287
3288 So force this insn not to match in this (rare) case. */
3289 if (! in_dest && code == REG && GET_CODE (from) == REG
3290 && REGNO (x) == REGNO (from))
38a448ca 3291 return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
230d793d
RS
3292
3293 /* If this is an object, we are done unless it is a MEM or LO_SUM, both
3294 of which may contain things that can be combined. */
3295 if (code != MEM && code != LO_SUM && GET_RTX_CLASS (code) == 'o')
3296 return x;
3297
3298 /* It is possible to have a subexpression appear twice in the insn.
3299 Suppose that FROM is a register that appears within TO.
3300 Then, after that subexpression has been scanned once by `subst',
3301 the second time it is scanned, TO may be found. If we were
3302 to scan TO here, we would find FROM within it and create a
3303 self-referent rtl structure which is completely wrong. */
3304 if (COMBINE_RTX_EQUAL_P (x, to))
3305 return to;
3306
4f4b3679
RH
3307 /* Parallel asm_operands need special attention because all of the
3308 inputs are shared across the arms. Furthermore, unsharing the
3309 rtl results in recognition failures. Failure to handle this case
3310 specially can result in circular rtl.
3311
3312 Solve this by doing a normal pass across the first entry of the
3313 parallel, and only processing the SET_DESTs of the subsequent
3314 entries. Ug. */
3315
3316 if (code == PARALLEL
3317 && GET_CODE (XVECEXP (x, 0, 0)) == SET
3318 && GET_CODE (SET_SRC (XVECEXP (x, 0, 0))) == ASM_OPERANDS)
230d793d 3319 {
4f4b3679
RH
3320 new = subst (XVECEXP (x, 0, 0), from, to, 0, unique_copy);
3321
3322 /* If this substitution failed, this whole thing fails. */
3323 if (GET_CODE (new) == CLOBBER
3324 && XEXP (new, 0) == const0_rtx)
3325 return new;
3326
3327 SUBST (XVECEXP (x, 0, 0), new);
3328
3329 for (i = XVECLEN (x, 0) - 1; i >= 1; i--)
230d793d 3330 {
4f4b3679 3331 rtx dest = SET_DEST (XVECEXP (x, 0, i));
663522cb 3332
4f4b3679
RH
3333 if (GET_CODE (dest) != REG
3334 && GET_CODE (dest) != CC0
3335 && GET_CODE (dest) != PC)
230d793d 3336 {
4f4b3679 3337 new = subst (dest, from, to, 0, unique_copy);
230d793d 3338
4f4b3679
RH
3339 /* If this substitution failed, this whole thing fails. */
3340 if (GET_CODE (new) == CLOBBER
3341 && XEXP (new, 0) == const0_rtx)
3342 return new;
230d793d 3343
4f4b3679 3344 SUBST (SET_DEST (XVECEXP (x, 0, i)), new);
230d793d
RS
3345 }
3346 }
4f4b3679
RH
3347 }
3348 else
3349 {
3350 len = GET_RTX_LENGTH (code);
3351 fmt = GET_RTX_FORMAT (code);
3352
3353 /* We don't need to process a SET_DEST that is a register, CC0,
3354 or PC, so set up to skip this common case. All other cases
3355 where we want to suppress replacing something inside a
3356 SET_SRC are handled via the IN_DEST operand. */
3357 if (code == SET
3358 && (GET_CODE (SET_DEST (x)) == REG
3359 || GET_CODE (SET_DEST (x)) == CC0
3360 || GET_CODE (SET_DEST (x)) == PC))
3361 fmt = "ie";
3362
3363 /* Get the mode of operand 0 in case X is now a SIGN_EXTEND of a
3364 constant. */
3365 if (fmt[0] == 'e')
3366 op0_mode = GET_MODE (XEXP (x, 0));
3367
3368 for (i = 0; i < len; i++)
230d793d 3369 {
4f4b3679 3370 if (fmt[i] == 'E')
230d793d 3371 {
4f4b3679
RH
3372 register int j;
3373 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3374 {
3375 if (COMBINE_RTX_EQUAL_P (XVECEXP (x, i, j), from))
3376 {
3377 new = (unique_copy && n_occurrences
3378 ? copy_rtx (to) : to);
3379 n_occurrences++;
3380 }
3381 else
3382 {
3383 new = subst (XVECEXP (x, i, j), from, to, 0,
3384 unique_copy);
3385
3386 /* If this substitution failed, this whole thing
3387 fails. */
3388 if (GET_CODE (new) == CLOBBER
3389 && XEXP (new, 0) == const0_rtx)
3390 return new;
3391 }
3392
3393 SUBST (XVECEXP (x, i, j), new);
3394 }
3395 }
3396 else if (fmt[i] == 'e')
3397 {
0a33d11e
RH
3398 /* If this is a register being set, ignore it. */
3399 new = XEXP (x, i);
3400 if (in_dest
3401 && (code == SUBREG || code == STRICT_LOW_PART
3402 || code == ZERO_EXTRACT)
3403 && i == 0
3404 && GET_CODE (new) == REG)
3405 ;
3406
3407 else if (COMBINE_RTX_EQUAL_P (XEXP (x, i), from))
4f4b3679
RH
3408 {
3409 /* In general, don't install a subreg involving two
3410 modes not tieable. It can worsen register
3411 allocation, and can even make invalid reload
3412 insns, since the reg inside may need to be copied
3413 from in the outside mode, and that may be invalid
3414 if it is an fp reg copied in integer mode.
3415
3416 We allow two exceptions to this: It is valid if
3417 it is inside another SUBREG and the mode of that
3418 SUBREG and the mode of the inside of TO is
3419 tieable and it is valid if X is a SET that copies
3420 FROM to CC0. */
3421
3422 if (GET_CODE (to) == SUBREG
3423 && ! MODES_TIEABLE_P (GET_MODE (to),
3424 GET_MODE (SUBREG_REG (to)))
3425 && ! (code == SUBREG
3426 && MODES_TIEABLE_P (GET_MODE (x),
3427 GET_MODE (SUBREG_REG (to))))
42301240 3428#ifdef HAVE_cc0
4f4b3679 3429 && ! (code == SET && i == 1 && XEXP (x, 0) == cc0_rtx)
42301240 3430#endif
4f4b3679
RH
3431 )
3432 return gen_rtx_CLOBBER (VOIDmode, const0_rtx);
42301240 3433
02188693 3434#ifdef CLASS_CANNOT_CHANGE_MODE
ed8afe3a
GK
3435 if (code == SUBREG
3436 && GET_CODE (to) == REG
3437 && REGNO (to) < FIRST_PSEUDO_REGISTER
3438 && (TEST_HARD_REG_BIT
02188693 3439 (reg_class_contents[(int) CLASS_CANNOT_CHANGE_MODE],
ed8afe3a 3440 REGNO (to)))
02188693
RH
3441 && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (to),
3442 GET_MODE (x)))
ed8afe3a
GK
3443 return gen_rtx_CLOBBER (VOIDmode, const0_rtx);
3444#endif
3445
4f4b3679
RH
3446 new = (unique_copy && n_occurrences ? copy_rtx (to) : to);
3447 n_occurrences++;
3448 }
3449 else
3450 /* If we are in a SET_DEST, suppress most cases unless we
3451 have gone inside a MEM, in which case we want to
3452 simplify the address. We assume here that things that
3453 are actually part of the destination have their inner
663522cb 3454 parts in the first expression. This is true for SUBREG,
4f4b3679
RH
3455 STRICT_LOW_PART, and ZERO_EXTRACT, which are the only
3456 things aside from REG and MEM that should appear in a
3457 SET_DEST. */
3458 new = subst (XEXP (x, i), from, to,
3459 (((in_dest
3460 && (code == SUBREG || code == STRICT_LOW_PART
3461 || code == ZERO_EXTRACT))
3462 || code == SET)
3463 && i == 0), unique_copy);
3464
3465 /* If we found that we will have to reject this combination,
3466 indicate that by returning the CLOBBER ourselves, rather than
3467 an expression containing it. This will speed things up as
3468 well as prevent accidents where two CLOBBERs are considered
3469 to be equal, thus producing an incorrect simplification. */
3470
3471 if (GET_CODE (new) == CLOBBER && XEXP (new, 0) == const0_rtx)
3472 return new;
3473
3474 SUBST (XEXP (x, i), new);
230d793d 3475 }
230d793d
RS
3476 }
3477 }
3478
8079805d
RK
3479 /* Try to simplify X. If the simplification changed the code, it is likely
3480 that further simplification will help, so loop, but limit the number
3481 of repetitions that will be performed. */
3482
3483 for (i = 0; i < 4; i++)
3484 {
3485 /* If X is sufficiently simple, don't bother trying to do anything
3486 with it. */
3487 if (code != CONST_INT && code != REG && code != CLOBBER)
31ec4e5e 3488 x = combine_simplify_rtx (x, op0_mode, i == 3, in_dest);
d0ab8cd3 3489
8079805d
RK
3490 if (GET_CODE (x) == code)
3491 break;
d0ab8cd3 3492
8079805d 3493 code = GET_CODE (x);
eeb43d32 3494
8079805d
RK
3495 /* We no longer know the original mode of operand 0 since we
3496 have changed the form of X) */
3497 op0_mode = VOIDmode;
3498 }
eeb43d32 3499
8079805d
RK
3500 return x;
3501}
3502\f
3503/* Simplify X, a piece of RTL. We just operate on the expression at the
3504 outer level; call `subst' to simplify recursively. Return the new
3505 expression.
3506
3507 OP0_MODE is the original mode of XEXP (x, 0); LAST is nonzero if this
3508 will be the iteration even if an expression with a code different from
3509 X is returned; IN_DEST is nonzero if we are inside a SET_DEST. */
eeb43d32 3510
8079805d 3511static rtx
31ec4e5e 3512combine_simplify_rtx (x, op0_mode, last, in_dest)
8079805d
RK
3513 rtx x;
3514 enum machine_mode op0_mode;
3515 int last;
3516 int in_dest;
3517{
3518 enum rtx_code code = GET_CODE (x);
3519 enum machine_mode mode = GET_MODE (x);
3520 rtx temp;
9a915772 3521 rtx reversed;
8079805d 3522 int i;
d0ab8cd3 3523
230d793d
RS
3524 /* If this is a commutative operation, put a constant last and a complex
3525 expression first. We don't need to do this for comparisons here. */
3526 if (GET_RTX_CLASS (code) == 'c'
e5c56fd9 3527 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
230d793d
RS
3528 {
3529 temp = XEXP (x, 0);
3530 SUBST (XEXP (x, 0), XEXP (x, 1));
3531 SUBST (XEXP (x, 1), temp);
3532 }
3533
22609cbf
RK
3534 /* If this is a PLUS, MINUS, or MULT, and the first operand is the
3535 sign extension of a PLUS with a constant, reverse the order of the sign
3536 extension and the addition. Note that this not the same as the original
3537 code, but overflow is undefined for signed values. Also note that the
3538 PLUS will have been partially moved "inside" the sign-extension, so that
3539 the first operand of X will really look like:
3540 (ashiftrt (plus (ashift A C4) C5) C4).
3541 We convert this to
3542 (plus (ashiftrt (ashift A C4) C2) C4)
3543 and replace the first operand of X with that expression. Later parts
3544 of this function may simplify the expression further.
3545
3546 For example, if we start with (mult (sign_extend (plus A C1)) C2),
3547 we swap the SIGN_EXTEND and PLUS. Later code will apply the
3548 distributive law to produce (plus (mult (sign_extend X) C1) C3).
3549
3550 We do this to simplify address expressions. */
3551
3552 if ((code == PLUS || code == MINUS || code == MULT)
3553 && GET_CODE (XEXP (x, 0)) == ASHIFTRT
3554 && GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS
3555 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ASHIFT
3556 && GET_CODE (XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1)) == CONST_INT
3557 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3558 && XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1) == XEXP (XEXP (x, 0), 1)
3559 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
3560 && (temp = simplify_binary_operation (ASHIFTRT, mode,
3561 XEXP (XEXP (XEXP (x, 0), 0), 1),
3562 XEXP (XEXP (x, 0), 1))) != 0)
3563 {
3564 rtx new
3565 = simplify_shift_const (NULL_RTX, ASHIFT, mode,
3566 XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 0),
3567 INTVAL (XEXP (XEXP (x, 0), 1)));
3568
3569 new = simplify_shift_const (NULL_RTX, ASHIFTRT, mode, new,
3570 INTVAL (XEXP (XEXP (x, 0), 1)));
3571
3572 SUBST (XEXP (x, 0), gen_binary (PLUS, mode, new, temp));
3573 }
3574
663522cb 3575 /* If this is a simple operation applied to an IF_THEN_ELSE, try
d0ab8cd3 3576 applying it to the arms of the IF_THEN_ELSE. This often simplifies
abe6e52f
RK
3577 things. Check for cases where both arms are testing the same
3578 condition.
3579
3580 Don't do anything if all operands are very simple. */
3581
3582 if (((GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c'
3583 || GET_RTX_CLASS (code) == '<')
3584 && ((GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) != 'o'
3585 && ! (GET_CODE (XEXP (x, 0)) == SUBREG
3586 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0))))
3587 == 'o')))
3588 || (GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o'
3589 && ! (GET_CODE (XEXP (x, 1)) == SUBREG
3590 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 1))))
3591 == 'o')))))
3592 || (GET_RTX_CLASS (code) == '1'
3593 && ((GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) != 'o'
3594 && ! (GET_CODE (XEXP (x, 0)) == SUBREG
3595 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0))))
3596 == 'o'))))))
d0ab8cd3 3597 {
d6edb99e 3598 rtx cond, true_rtx, false_rtx;
abe6e52f 3599
d6edb99e 3600 cond = if_then_else_cond (x, &true_rtx, &false_rtx);
0802d516
RK
3601 if (cond != 0
3602 /* If everything is a comparison, what we have is highly unlikely
3603 to be simpler, so don't use it. */
3604 && ! (GET_RTX_CLASS (code) == '<'
d6edb99e
ZW
3605 && (GET_RTX_CLASS (GET_CODE (true_rtx)) == '<'
3606 || GET_RTX_CLASS (GET_CODE (false_rtx)) == '<')))
abe6e52f
RK
3607 {
3608 rtx cop1 = const0_rtx;
3609 enum rtx_code cond_code = simplify_comparison (NE, &cond, &cop1);
3610
15448afc
RK
3611 if (cond_code == NE && GET_RTX_CLASS (GET_CODE (cond)) == '<')
3612 return x;
3613
663522cb 3614 /* Simplify the alternative arms; this may collapse the true and
9210df58 3615 false arms to store-flag values. */
d6edb99e
ZW
3616 true_rtx = subst (true_rtx, pc_rtx, pc_rtx, 0, 0);
3617 false_rtx = subst (false_rtx, pc_rtx, pc_rtx, 0, 0);
9210df58 3618
d6edb99e 3619 /* If true_rtx and false_rtx are not general_operands, an if_then_else
085f1714 3620 is unlikely to be simpler. */
d6edb99e
ZW
3621 if (general_operand (true_rtx, VOIDmode)
3622 && general_operand (false_rtx, VOIDmode))
085f1714
RH
3623 {
3624 /* Restarting if we generate a store-flag expression will cause
3625 us to loop. Just drop through in this case. */
3626
3627 /* If the result values are STORE_FLAG_VALUE and zero, we can
3628 just make the comparison operation. */
d6edb99e 3629 if (true_rtx == const_true_rtx && false_rtx == const0_rtx)
085f1714 3630 x = gen_binary (cond_code, mode, cond, cop1);
d6edb99e 3631 else if (true_rtx == const0_rtx && false_rtx == const_true_rtx)
085f1714
RH
3632 x = gen_binary (reverse_condition (cond_code),
3633 mode, cond, cop1);
3634
3635 /* Likewise, we can make the negate of a comparison operation
3636 if the result values are - STORE_FLAG_VALUE and zero. */
d6edb99e
ZW
3637 else if (GET_CODE (true_rtx) == CONST_INT
3638 && INTVAL (true_rtx) == - STORE_FLAG_VALUE
3639 && false_rtx == const0_rtx)
f1c6ba8b
RK
3640 x = simplify_gen_unary (NEG, mode,
3641 gen_binary (cond_code, mode, cond,
3642 cop1),
3643 mode);
d6edb99e
ZW
3644 else if (GET_CODE (false_rtx) == CONST_INT
3645 && INTVAL (false_rtx) == - STORE_FLAG_VALUE
3646 && true_rtx == const0_rtx)
f1c6ba8b
RK
3647 x = simplify_gen_unary (NEG, mode,
3648 gen_binary (reverse_condition
3649 (cond_code),
3650 mode, cond, cop1),
3651 mode);
085f1714
RH
3652 else
3653 return gen_rtx_IF_THEN_ELSE (mode,
3654 gen_binary (cond_code, VOIDmode,
3655 cond, cop1),
d6edb99e 3656 true_rtx, false_rtx);
5109d49f 3657
085f1714
RH
3658 code = GET_CODE (x);
3659 op0_mode = VOIDmode;
3660 }
abe6e52f 3661 }
d0ab8cd3
RK
3662 }
3663
230d793d
RS
3664 /* Try to fold this expression in case we have constants that weren't
3665 present before. */
3666 temp = 0;
3667 switch (GET_RTX_CLASS (code))
3668 {
3669 case '1':
3670 temp = simplify_unary_operation (code, mode, XEXP (x, 0), op0_mode);
3671 break;
3672 case '<':
47b1e19b
JH
3673 {
3674 enum machine_mode cmp_mode = GET_MODE (XEXP (x, 0));
3675 if (cmp_mode == VOIDmode)
1cac8785
DD
3676 {
3677 cmp_mode = GET_MODE (XEXP (x, 1));
3678 if (cmp_mode == VOIDmode)
3679 cmp_mode = op0_mode;
3680 }
47b1e19b
JH
3681 temp = simplify_relational_operation (code, cmp_mode,
3682 XEXP (x, 0), XEXP (x, 1));
3683 }
77fa0940 3684#ifdef FLOAT_STORE_FLAG_VALUE
12530dbe
RH
3685 if (temp != 0 && GET_MODE_CLASS (mode) == MODE_FLOAT)
3686 {
3687 if (temp == const0_rtx)
3688 temp = CONST0_RTX (mode);
3689 else
3690 temp = immed_real_const_1 (FLOAT_STORE_FLAG_VALUE (mode), mode);
3691 }
77fa0940 3692#endif
230d793d
RS
3693 break;
3694 case 'c':
3695 case '2':
3696 temp = simplify_binary_operation (code, mode, XEXP (x, 0), XEXP (x, 1));
3697 break;
3698 case 'b':
3699 case '3':
3700 temp = simplify_ternary_operation (code, mode, op0_mode, XEXP (x, 0),
3701 XEXP (x, 1), XEXP (x, 2));
3702 break;
3703 }
3704
3705 if (temp)
4531c1c7
DN
3706 {
3707 x = temp;
3708 code = GET_CODE (temp);
3709 op0_mode = VOIDmode;
3710 mode = GET_MODE (temp);
3711 }
230d793d 3712
230d793d 3713 /* First see if we can apply the inverse distributive law. */
224eeff2
RK
3714 if (code == PLUS || code == MINUS
3715 || code == AND || code == IOR || code == XOR)
230d793d
RS
3716 {
3717 x = apply_distributive_law (x);
3718 code = GET_CODE (x);
6e20204f 3719 op0_mode = VOIDmode;
230d793d
RS
3720 }
3721
3722 /* If CODE is an associative operation not otherwise handled, see if we
3723 can associate some operands. This can win if they are constants or
e0e08ac2 3724 if they are logically related (i.e. (a & b) & a). */
493efd37
TM
3725 if ((code == PLUS || code == MINUS || code == MULT || code == DIV
3726 || code == AND || code == IOR || code == XOR
230d793d 3727 || code == SMAX || code == SMIN || code == UMAX || code == UMIN)
493efd37 3728 && ((INTEGRAL_MODE_P (mode) && code != DIV)
4ba5f925 3729 || (flag_unsafe_math_optimizations && FLOAT_MODE_P (mode))))
230d793d
RS
3730 {
3731 if (GET_CODE (XEXP (x, 0)) == code)
3732 {
3733 rtx other = XEXP (XEXP (x, 0), 0);
3734 rtx inner_op0 = XEXP (XEXP (x, 0), 1);
3735 rtx inner_op1 = XEXP (x, 1);
3736 rtx inner;
663522cb 3737
230d793d
RS
3738 /* Make sure we pass the constant operand if any as the second
3739 one if this is a commutative operation. */
3740 if (CONSTANT_P (inner_op0) && GET_RTX_CLASS (code) == 'c')
3741 {
3742 rtx tem = inner_op0;
3743 inner_op0 = inner_op1;
3744 inner_op1 = tem;
3745 }
3746 inner = simplify_binary_operation (code == MINUS ? PLUS
3747 : code == DIV ? MULT
230d793d
RS
3748 : code,
3749 mode, inner_op0, inner_op1);
3750
3751 /* For commutative operations, try the other pair if that one
3752 didn't simplify. */
3753 if (inner == 0 && GET_RTX_CLASS (code) == 'c')
3754 {
3755 other = XEXP (XEXP (x, 0), 1);
3756 inner = simplify_binary_operation (code, mode,
3757 XEXP (XEXP (x, 0), 0),
3758 XEXP (x, 1));
3759 }
3760
3761 if (inner)
8079805d 3762 return gen_binary (code, mode, other, inner);
230d793d
RS
3763 }
3764 }
3765
3766 /* A little bit of algebraic simplification here. */
3767 switch (code)
3768 {
3769 case MEM:
3770 /* Ensure that our address has any ASHIFTs converted to MULT in case
3771 address-recognizing predicates are called later. */
3772 temp = make_compound_operation (XEXP (x, 0), MEM);
3773 SUBST (XEXP (x, 0), temp);
3774 break;
3775
3776 case SUBREG:
eea50aa0
JH
3777 if (op0_mode == VOIDmode)
3778 op0_mode = GET_MODE (SUBREG_REG (x));
230d793d 3779
eea50aa0 3780 /* simplify_subreg can't use gen_lowpart_for_combine. */
3c99d5ff 3781 if (CONSTANT_P (SUBREG_REG (x))
e0e08ac2 3782 && subreg_lowpart_offset (mode, op0_mode) == SUBREG_BYTE (x))
230d793d
RS
3783 return gen_lowpart_for_combine (mode, SUBREG_REG (x));
3784
eea50aa0
JH
3785 {
3786 rtx temp;
3787 temp = simplify_subreg (mode, SUBREG_REG (x), op0_mode,
23190837 3788 SUBREG_BYTE (x));
eea50aa0
JH
3789 if (temp)
3790 return temp;
3791 }
b65c1b5b 3792
87e3e0c1
RK
3793 /* Note that we cannot do any narrowing for non-constants since
3794 we might have been counting on using the fact that some bits were
3795 zero. We now do this in the SET. */
3796
230d793d
RS
3797 break;
3798
3799 case NOT:
3800 /* (not (plus X -1)) can become (neg X). */
3801 if (GET_CODE (XEXP (x, 0)) == PLUS
3802 && XEXP (XEXP (x, 0), 1) == constm1_rtx)
f1c6ba8b 3803 return gen_rtx_NEG (mode, XEXP (XEXP (x, 0), 0));
230d793d
RS
3804
3805 /* Similarly, (not (neg X)) is (plus X -1). */
3806 if (GET_CODE (XEXP (x, 0)) == NEG)
f1c6ba8b 3807 return gen_rtx_PLUS (mode, XEXP (XEXP (x, 0), 0), constm1_rtx);
230d793d 3808
663522cb 3809 /* (not (xor X C)) for C constant is (xor X D) with D = ~C. */
d0ab8cd3
RK
3810 if (GET_CODE (XEXP (x, 0)) == XOR
3811 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3812 && (temp = simplify_unary_operation (NOT, mode,
3813 XEXP (XEXP (x, 0), 1),
3814 mode)) != 0)
787745f5 3815 return gen_binary (XOR, mode, XEXP (XEXP (x, 0), 0), temp);
663522cb 3816
230d793d
RS
3817 /* (not (ashift 1 X)) is (rotate ~1 X). We used to do this for operands
3818 other than 1, but that is not valid. We could do a similar
3819 simplification for (not (lshiftrt C X)) where C is just the sign bit,
3820 but this doesn't seem common enough to bother with. */
3821 if (GET_CODE (XEXP (x, 0)) == ASHIFT
3822 && XEXP (XEXP (x, 0), 0) == const1_rtx)
f1c6ba8b
RK
3823 return gen_rtx_ROTATE (mode, simplify_gen_unary (NOT, mode,
3824 const1_rtx, mode),
38a448ca 3825 XEXP (XEXP (x, 0), 1));
663522cb 3826
230d793d
RS
3827 if (GET_CODE (XEXP (x, 0)) == SUBREG
3828 && subreg_lowpart_p (XEXP (x, 0))
3829 && (GET_MODE_SIZE (GET_MODE (XEXP (x, 0)))
3830 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (x, 0)))))
3831 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == ASHIFT
3832 && XEXP (SUBREG_REG (XEXP (x, 0)), 0) == const1_rtx)
3833 {
3834 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (XEXP (x, 0)));
3835
38a448ca 3836 x = gen_rtx_ROTATE (inner_mode,
f1c6ba8b
RK
3837 simplify_gen_unary (NOT, inner_mode, const1_rtx,
3838 inner_mode),
38a448ca 3839 XEXP (SUBREG_REG (XEXP (x, 0)), 1));
8079805d 3840 return gen_lowpart_for_combine (mode, x);
230d793d 3841 }
663522cb 3842
0802d516
RK
3843 /* If STORE_FLAG_VALUE is -1, (not (comparison foo bar)) can be done by
3844 reversing the comparison code if valid. */
3845 if (STORE_FLAG_VALUE == -1
3846 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
9a915772
JH
3847 && (reversed = reversed_comparison (x, mode, XEXP (XEXP (x, 0), 0),
3848 XEXP (XEXP (x, 0), 1))))
3849 return reversed;
500c518b
RK
3850
3851 /* (ashiftrt foo C) where C is the number of bits in FOO minus 1
0802d516
RK
3852 is (lt foo (const_int 0)) if STORE_FLAG_VALUE is -1, so we can
3853 perform the above simplification. */
500c518b 3854
0802d516 3855 if (STORE_FLAG_VALUE == -1
500c518b 3856 && GET_CODE (XEXP (x, 0)) == ASHIFTRT
37ac53d9 3857 && XEXP (x, 1) == const1_rtx
500c518b
RK
3858 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3859 && INTVAL (XEXP (XEXP (x, 0), 1)) == GET_MODE_BITSIZE (mode) - 1)
f1c6ba8b 3860 return gen_rtx_GE (mode, XEXP (XEXP (x, 0), 0), const0_rtx);
230d793d
RS
3861
3862 /* Apply De Morgan's laws to reduce number of patterns for machines
23190837
AJ
3863 with negating logical insns (and-not, nand, etc.). If result has
3864 only one NOT, put it first, since that is how the patterns are
3865 coded. */
230d793d
RS
3866
3867 if (GET_CODE (XEXP (x, 0)) == IOR || GET_CODE (XEXP (x, 0)) == AND)
23190837 3868 {
663522cb 3869 rtx in1 = XEXP (XEXP (x, 0), 0), in2 = XEXP (XEXP (x, 0), 1);
5bd60ce6 3870 enum machine_mode op_mode;
230d793d 3871
5bd60ce6 3872 op_mode = GET_MODE (in1);
f1c6ba8b 3873 in1 = simplify_gen_unary (NOT, op_mode, in1, op_mode);
230d793d 3874
5bd60ce6
RH
3875 op_mode = GET_MODE (in2);
3876 if (op_mode == VOIDmode)
3877 op_mode = mode;
f1c6ba8b 3878 in2 = simplify_gen_unary (NOT, op_mode, in2, op_mode);
663522cb 3879
5bd60ce6 3880 if (GET_CODE (in2) == NOT && GET_CODE (in1) != NOT)
663522cb
KH
3881 {
3882 rtx tem = in2;
3883 in2 = in1; in1 = tem;
3884 }
3885
f1c6ba8b
RK
3886 return gen_rtx_fmt_ee (GET_CODE (XEXP (x, 0)) == IOR ? AND : IOR,
3887 mode, in1, in2);
663522cb 3888 }
230d793d
RS
3889 break;
3890
3891 case NEG:
3892 /* (neg (plus X 1)) can become (not X). */
3893 if (GET_CODE (XEXP (x, 0)) == PLUS
3894 && XEXP (XEXP (x, 0), 1) == const1_rtx)
f1c6ba8b 3895 return gen_rtx_NOT (mode, XEXP (XEXP (x, 0), 0));
230d793d
RS
3896
3897 /* Similarly, (neg (not X)) is (plus X 1). */
3898 if (GET_CODE (XEXP (x, 0)) == NOT)
8079805d 3899 return plus_constant (XEXP (XEXP (x, 0), 0), 1);
230d793d 3900
230d793d
RS
3901 /* (neg (minus X Y)) can become (minus Y X). */
3902 if (GET_CODE (XEXP (x, 0)) == MINUS
3ad2180a 3903 && (! FLOAT_MODE_P (mode)
0f41302f 3904 /* x-y != -(y-x) with IEEE floating point. */
7e2a0d8e 3905 || TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
de6c5979 3906 || flag_unsafe_math_optimizations))
8079805d
RK
3907 return gen_binary (MINUS, mode, XEXP (XEXP (x, 0), 1),
3908 XEXP (XEXP (x, 0), 0));
230d793d 3909
0f41302f 3910 /* (neg (xor A 1)) is (plus A -1) if A is known to be either 0 or 1. */
d0ab8cd3 3911 if (GET_CODE (XEXP (x, 0)) == XOR && XEXP (XEXP (x, 0), 1) == const1_rtx
951553af 3912 && nonzero_bits (XEXP (XEXP (x, 0), 0), mode) == 1)
8079805d 3913 return gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0), constm1_rtx);
d0ab8cd3 3914
230d793d
RS
3915 /* NEG commutes with ASHIFT since it is multiplication. Only do this
3916 if we can then eliminate the NEG (e.g.,
3917 if the operand is a constant). */
3918
3919 if (GET_CODE (XEXP (x, 0)) == ASHIFT)
3920 {
3921 temp = simplify_unary_operation (NEG, mode,
3922 XEXP (XEXP (x, 0), 0), mode);
3923 if (temp)
3924 {
3925 SUBST (XEXP (XEXP (x, 0), 0), temp);
3926 return XEXP (x, 0);
3927 }
3928 }
3929
3930 temp = expand_compound_operation (XEXP (x, 0));
3931
3932 /* For C equal to the width of MODE minus 1, (neg (ashiftrt X C)) can be
23190837 3933 replaced by (lshiftrt X C). This will convert
230d793d
RS
3934 (neg (sign_extract X 1 Y)) to (zero_extract X 1 Y). */
3935
3936 if (GET_CODE (temp) == ASHIFTRT
3937 && GET_CODE (XEXP (temp, 1)) == CONST_INT
3938 && INTVAL (XEXP (temp, 1)) == GET_MODE_BITSIZE (mode) - 1)
8079805d
RK
3939 return simplify_shift_const (temp, LSHIFTRT, mode, XEXP (temp, 0),
3940 INTVAL (XEXP (temp, 1)));
230d793d 3941
951553af 3942 /* If X has only a single bit that might be nonzero, say, bit I, convert
230d793d
RS
3943 (neg X) to (ashiftrt (ashift X C-I) C-I) where C is the bitsize of
3944 MODE minus 1. This will convert (neg (zero_extract X 1 Y)) to
3945 (sign_extract X 1 Y). But only do this if TEMP isn't a register
3946 or a SUBREG of one since we'd be making the expression more
3947 complex if it was just a register. */
3948
3949 if (GET_CODE (temp) != REG
3950 && ! (GET_CODE (temp) == SUBREG
3951 && GET_CODE (SUBREG_REG (temp)) == REG)
951553af 3952 && (i = exact_log2 (nonzero_bits (temp, mode))) >= 0)
230d793d
RS
3953 {
3954 rtx temp1 = simplify_shift_const
5f4f0e22
CH
3955 (NULL_RTX, ASHIFTRT, mode,
3956 simplify_shift_const (NULL_RTX, ASHIFT, mode, temp,
230d793d
RS
3957 GET_MODE_BITSIZE (mode) - 1 - i),
3958 GET_MODE_BITSIZE (mode) - 1 - i);
3959
3960 /* If all we did was surround TEMP with the two shifts, we
3961 haven't improved anything, so don't use it. Otherwise,
3962 we are better off with TEMP1. */
3963 if (GET_CODE (temp1) != ASHIFTRT
3964 || GET_CODE (XEXP (temp1, 0)) != ASHIFT
3965 || XEXP (XEXP (temp1, 0), 0) != temp)
8079805d 3966 return temp1;
230d793d
RS
3967 }
3968 break;
3969
2ca9ae17 3970 case TRUNCATE:
e30fb98f
JL
3971 /* We can't handle truncation to a partial integer mode here
3972 because we don't know the real bitsize of the partial
3973 integer mode. */
3974 if (GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
3975 break;
3976
80608e27
JL
3977 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3978 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
3979 GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))))
2ca9ae17
JW
3980 SUBST (XEXP (x, 0),
3981 force_to_mode (XEXP (x, 0), GET_MODE (XEXP (x, 0)),
3982 GET_MODE_MASK (mode), NULL_RTX, 0));
0f13a422
ILT
3983
3984 /* (truncate:SI ({sign,zero}_extend:DI foo:SI)) == foo:SI. */
3985 if ((GET_CODE (XEXP (x, 0)) == SIGN_EXTEND
3986 || GET_CODE (XEXP (x, 0)) == ZERO_EXTEND)
3987 && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode)
3988 return XEXP (XEXP (x, 0), 0);
3989
3990 /* (truncate:SI (OP:DI ({sign,zero}_extend:DI foo:SI))) is
3991 (OP:SI foo:SI) if OP is NEG or ABS. */
3992 if ((GET_CODE (XEXP (x, 0)) == ABS
3993 || GET_CODE (XEXP (x, 0)) == NEG)
3994 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == SIGN_EXTEND
3995 || GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND)
3996 && GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == mode)
f1c6ba8b
RK
3997 return simplify_gen_unary (GET_CODE (XEXP (x, 0)), mode,
3998 XEXP (XEXP (XEXP (x, 0), 0), 0), mode);
0f13a422
ILT
3999
4000 /* (truncate:SI (subreg:DI (truncate:SI X) 0)) is
4001 (truncate:SI x). */
4002 if (GET_CODE (XEXP (x, 0)) == SUBREG
4003 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == TRUNCATE
4004 && subreg_lowpart_p (XEXP (x, 0)))
4005 return SUBREG_REG (XEXP (x, 0));
4006
4007 /* If we know that the value is already truncated, we can
14a774a9
RK
4008 replace the TRUNCATE with a SUBREG if TRULY_NOOP_TRUNCATION
4009 is nonzero for the corresponding modes. But don't do this
4010 for an (LSHIFTRT (MULT ...)) since this will cause problems
4011 with the umulXi3_highpart patterns. */
6a992214
JL
4012 if (TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
4013 GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
4014 && num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
14a774a9
RK
4015 >= GET_MODE_BITSIZE (mode) + 1
4016 && ! (GET_CODE (XEXP (x, 0)) == LSHIFTRT
23190837 4017 && GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT))
0f13a422
ILT
4018 return gen_lowpart_for_combine (mode, XEXP (x, 0));
4019
4020 /* A truncate of a comparison can be replaced with a subreg if
4021 STORE_FLAG_VALUE permits. This is like the previous test,
4022 but it works even if the comparison is done in a mode larger
4023 than HOST_BITS_PER_WIDE_INT. */
4024 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4025 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
663522cb 4026 && ((HOST_WIDE_INT) STORE_FLAG_VALUE & ~GET_MODE_MASK (mode)) == 0)
0f13a422
ILT
4027 return gen_lowpart_for_combine (mode, XEXP (x, 0));
4028
4029 /* Similarly, a truncate of a register whose value is a
4030 comparison can be replaced with a subreg if STORE_FLAG_VALUE
4031 permits. */
4032 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
663522cb 4033 && ((HOST_WIDE_INT) STORE_FLAG_VALUE & ~GET_MODE_MASK (mode)) == 0
0f13a422
ILT
4034 && (temp = get_last_value (XEXP (x, 0)))
4035 && GET_RTX_CLASS (GET_CODE (temp)) == '<')
4036 return gen_lowpart_for_combine (mode, XEXP (x, 0));
4037
2ca9ae17
JW
4038 break;
4039
230d793d
RS
4040 case FLOAT_TRUNCATE:
4041 /* (float_truncate:SF (float_extend:DF foo:SF)) = foo:SF. */
4042 if (GET_CODE (XEXP (x, 0)) == FLOAT_EXTEND
4043 && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode)
663522cb 4044 return XEXP (XEXP (x, 0), 0);
4635f748
RK
4045
4046 /* (float_truncate:SF (OP:DF (float_extend:DF foo:sf))) is
4047 (OP:SF foo:SF) if OP is NEG or ABS. */
4048 if ((GET_CODE (XEXP (x, 0)) == ABS
4049 || GET_CODE (XEXP (x, 0)) == NEG)
4050 && GET_CODE (XEXP (XEXP (x, 0), 0)) == FLOAT_EXTEND
4051 && GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == mode)
f1c6ba8b
RK
4052 return simplify_gen_unary (GET_CODE (XEXP (x, 0)), mode,
4053 XEXP (XEXP (XEXP (x, 0), 0), 0), mode);
1d12df72
RK
4054
4055 /* (float_truncate:SF (subreg:DF (float_truncate:SF X) 0))
4056 is (float_truncate:SF x). */
4057 if (GET_CODE (XEXP (x, 0)) == SUBREG
4058 && subreg_lowpart_p (XEXP (x, 0))
4059 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == FLOAT_TRUNCATE)
4060 return SUBREG_REG (XEXP (x, 0));
663522cb 4061 break;
230d793d
RS
4062
4063#ifdef HAVE_cc0
4064 case COMPARE:
4065 /* Convert (compare FOO (const_int 0)) to FOO unless we aren't
4066 using cc0, in which case we want to leave it as a COMPARE
4067 so we can distinguish it from a register-register-copy. */
4068 if (XEXP (x, 1) == const0_rtx)
4069 return XEXP (x, 0);
4070
4071 /* In IEEE floating point, x-0 is not the same as x. */
4072 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
7e2a0d8e 4073 || ! FLOAT_MODE_P (GET_MODE (XEXP (x, 0)))
de6c5979 4074 || flag_unsafe_math_optimizations)
230d793d
RS
4075 && XEXP (x, 1) == CONST0_RTX (GET_MODE (XEXP (x, 0))))
4076 return XEXP (x, 0);
4077 break;
4078#endif
4079
4080 case CONST:
4081 /* (const (const X)) can become (const X). Do it this way rather than
4082 returning the inner CONST since CONST can be shared with a
4083 REG_EQUAL note. */
4084 if (GET_CODE (XEXP (x, 0)) == CONST)
4085 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4086 break;
4087
4088#ifdef HAVE_lo_sum
4089 case LO_SUM:
4090 /* Convert (lo_sum (high FOO) FOO) to FOO. This is necessary so we
4091 can add in an offset. find_split_point will split this address up
4092 again if it doesn't match. */
4093 if (GET_CODE (XEXP (x, 0)) == HIGH
4094 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1)))
4095 return XEXP (x, 1);
4096 break;
4097#endif
4098
4099 case PLUS:
4100 /* If we have (plus (plus (A const) B)), associate it so that CONST is
4101 outermost. That's because that's the way indexed addresses are
4102 supposed to appear. This code used to check many more cases, but
4103 they are now checked elsewhere. */
4104 if (GET_CODE (XEXP (x, 0)) == PLUS
4105 && CONSTANT_ADDRESS_P (XEXP (XEXP (x, 0), 1)))
4106 return gen_binary (PLUS, mode,
4107 gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0),
4108 XEXP (x, 1)),
4109 XEXP (XEXP (x, 0), 1));
4110
4111 /* (plus (xor (and <foo> (const_int pow2 - 1)) <c>) <-c>)
4112 when c is (const_int (pow2 + 1) / 2) is a sign extension of a
4113 bit-field and can be replaced by either a sign_extend or a
e6380233
JL
4114 sign_extract. The `and' may be a zero_extend and the two
4115 <c>, -<c> constants may be reversed. */
230d793d
RS
4116 if (GET_CODE (XEXP (x, 0)) == XOR
4117 && GET_CODE (XEXP (x, 1)) == CONST_INT
4118 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
663522cb 4119 && INTVAL (XEXP (x, 1)) == -INTVAL (XEXP (XEXP (x, 0), 1))
e6380233
JL
4120 && ((i = exact_log2 (INTVAL (XEXP (XEXP (x, 0), 1)))) >= 0
4121 || (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
5f4f0e22 4122 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
230d793d
RS
4123 && ((GET_CODE (XEXP (XEXP (x, 0), 0)) == AND
4124 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
4125 && (INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))
5f4f0e22 4126 == ((HOST_WIDE_INT) 1 << (i + 1)) - 1))
230d793d
RS
4127 || (GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND
4128 && (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)))
770ae6cc 4129 == (unsigned int) i + 1))))
8079805d
RK
4130 return simplify_shift_const
4131 (NULL_RTX, ASHIFTRT, mode,
4132 simplify_shift_const (NULL_RTX, ASHIFT, mode,
4133 XEXP (XEXP (XEXP (x, 0), 0), 0),
4134 GET_MODE_BITSIZE (mode) - (i + 1)),
4135 GET_MODE_BITSIZE (mode) - (i + 1));
230d793d 4136
bc0776c6
RK
4137 /* (plus (comparison A B) C) can become (neg (rev-comp A B)) if
4138 C is 1 and STORE_FLAG_VALUE is -1 or if C is -1 and STORE_FLAG_VALUE
4139 is 1. This produces better code than the alternative immediately
4140 below. */
4141 if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
bc0776c6 4142 && ((STORE_FLAG_VALUE == -1 && XEXP (x, 1) == const1_rtx)
9a915772
JH
4143 || (STORE_FLAG_VALUE == 1 && XEXP (x, 1) == constm1_rtx))
4144 && (reversed = reversed_comparison (XEXP (x, 0), mode,
4145 XEXP (XEXP (x, 0), 0),
4146 XEXP (XEXP (x, 0), 1))))
8079805d 4147 return
f1c6ba8b 4148 simplify_gen_unary (NEG, mode, reversed, mode);
bc0776c6
RK
4149
4150 /* If only the low-order bit of X is possibly nonzero, (plus x -1)
230d793d
RS
4151 can become (ashiftrt (ashift (xor x 1) C) C) where C is
4152 the bitsize of the mode - 1. This allows simplification of
4153 "a = (b & 8) == 0;" */
4154 if (XEXP (x, 1) == constm1_rtx
4155 && GET_CODE (XEXP (x, 0)) != REG
4156 && ! (GET_CODE (XEXP (x,0)) == SUBREG
4157 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG)
951553af 4158 && nonzero_bits (XEXP (x, 0), mode) == 1)
8079805d
RK
4159 return simplify_shift_const (NULL_RTX, ASHIFTRT, mode,
4160 simplify_shift_const (NULL_RTX, ASHIFT, mode,
f1c6ba8b 4161 gen_rtx_XOR (mode, XEXP (x, 0), const1_rtx),
8079805d
RK
4162 GET_MODE_BITSIZE (mode) - 1),
4163 GET_MODE_BITSIZE (mode) - 1);
02f4ada4
RK
4164
4165 /* If we are adding two things that have no bits in common, convert
4166 the addition into an IOR. This will often be further simplified,
4167 for example in cases like ((a & 1) + (a & 2)), which can
4168 become a & 3. */
4169
ac49a949 4170 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
951553af
RK
4171 && (nonzero_bits (XEXP (x, 0), mode)
4172 & nonzero_bits (XEXP (x, 1), mode)) == 0)
085f1714
RH
4173 {
4174 /* Try to simplify the expression further. */
4175 rtx tor = gen_binary (IOR, mode, XEXP (x, 0), XEXP (x, 1));
4176 temp = combine_simplify_rtx (tor, mode, last, in_dest);
4177
4178 /* If we could, great. If not, do not go ahead with the IOR
4179 replacement, since PLUS appears in many special purpose
4180 address arithmetic instructions. */
4181 if (GET_CODE (temp) != CLOBBER && temp != tor)
4182 return temp;
4183 }
230d793d
RS
4184 break;
4185
4186 case MINUS:
0802d516
RK
4187 /* If STORE_FLAG_VALUE is 1, (minus 1 (comparison foo bar)) can be done
4188 by reversing the comparison code if valid. */
4189 if (STORE_FLAG_VALUE == 1
4190 && XEXP (x, 0) == const1_rtx
5109d49f 4191 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<'
9a915772
JH
4192 && (reversed = reversed_comparison (XEXP (x, 1), mode,
4193 XEXP (XEXP (x, 1), 0),
4194 XEXP (XEXP (x, 1), 1))))
4195 return reversed;
5109d49f 4196
230d793d
RS
4197 /* (minus <foo> (and <foo> (const_int -pow2))) becomes
4198 (and <foo> (const_int pow2-1)) */
4199 if (GET_CODE (XEXP (x, 1)) == AND
4200 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
663522cb 4201 && exact_log2 (-INTVAL (XEXP (XEXP (x, 1), 1))) >= 0
230d793d 4202 && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0)))
8079805d 4203 return simplify_and_const_int (NULL_RTX, mode, XEXP (x, 0),
663522cb 4204 -INTVAL (XEXP (XEXP (x, 1), 1)) - 1);
7bef8680
RK
4205
4206 /* Canonicalize (minus A (plus B C)) to (minus (minus A B) C) for
4207 integers. */
4208 if (GET_CODE (XEXP (x, 1)) == PLUS && INTEGRAL_MODE_P (mode))
8079805d
RK
4209 return gen_binary (MINUS, mode,
4210 gen_binary (MINUS, mode, XEXP (x, 0),
4211 XEXP (XEXP (x, 1), 0)),
4212 XEXP (XEXP (x, 1), 1));
230d793d
RS
4213 break;
4214
4215 case MULT:
4216 /* If we have (mult (plus A B) C), apply the distributive law and then
4217 the inverse distributive law to see if things simplify. This
4218 occurs mostly in addresses, often when unrolling loops. */
4219
4220 if (GET_CODE (XEXP (x, 0)) == PLUS)
4221 {
4222 x = apply_distributive_law
4223 (gen_binary (PLUS, mode,
4224 gen_binary (MULT, mode,
4225 XEXP (XEXP (x, 0), 0), XEXP (x, 1)),
4226 gen_binary (MULT, mode,
3749f4ca
BS
4227 XEXP (XEXP (x, 0), 1),
4228 copy_rtx (XEXP (x, 1)))));
230d793d
RS
4229
4230 if (GET_CODE (x) != MULT)
8079805d 4231 return x;
230d793d 4232 }
4ba5f925
JH
4233 /* Try simplify a*(b/c) as (a*b)/c. */
4234 if (FLOAT_MODE_P (mode) && flag_unsafe_math_optimizations
4235 && GET_CODE (XEXP (x, 0)) == DIV)
4236 {
4237 rtx tem = simplify_binary_operation (MULT, mode,
4238 XEXP (XEXP (x, 0), 0),
4239 XEXP (x, 1));
4240 if (tem)
4241 return gen_binary (DIV, mode, tem, XEXP (XEXP (x, 0), 1));
4242 }
230d793d
RS
4243 break;
4244
4245 case UDIV:
4246 /* If this is a divide by a power of two, treat it as a shift if
4247 its first operand is a shift. */
4248 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4249 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0
4250 && (GET_CODE (XEXP (x, 0)) == ASHIFT
4251 || GET_CODE (XEXP (x, 0)) == LSHIFTRT
4252 || GET_CODE (XEXP (x, 0)) == ASHIFTRT
4253 || GET_CODE (XEXP (x, 0)) == ROTATE
4254 || GET_CODE (XEXP (x, 0)) == ROTATERT))
8079805d 4255 return simplify_shift_const (NULL_RTX, LSHIFTRT, mode, XEXP (x, 0), i);
230d793d
RS
4256 break;
4257
4258 case EQ: case NE:
4259 case GT: case GTU: case GE: case GEU:
4260 case LT: case LTU: case LE: case LEU:
69bc0a1f 4261 case UNEQ: case LTGT:
23190837
AJ
4262 case UNGT: case UNGE:
4263 case UNLT: case UNLE:
69bc0a1f 4264 case UNORDERED: case ORDERED:
230d793d
RS
4265 /* If the first operand is a condition code, we can't do anything
4266 with it. */
4267 if (GET_CODE (XEXP (x, 0)) == COMPARE
4268 || (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) != MODE_CC
4269#ifdef HAVE_cc0
4270 && XEXP (x, 0) != cc0_rtx
4271#endif
663522cb 4272 ))
230d793d
RS
4273 {
4274 rtx op0 = XEXP (x, 0);
4275 rtx op1 = XEXP (x, 1);
4276 enum rtx_code new_code;
4277
4278 if (GET_CODE (op0) == COMPARE)
4279 op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
4280
4281 /* Simplify our comparison, if possible. */
4282 new_code = simplify_comparison (code, &op0, &op1);
4283
230d793d 4284 /* If STORE_FLAG_VALUE is 1, we can convert (ne x 0) to simply X
951553af 4285 if only the low-order bit is possibly nonzero in X (such as when
5109d49f
RK
4286 X is a ZERO_EXTRACT of one bit). Similarly, we can convert EQ to
4287 (xor X 1) or (minus 1 X); we use the former. Finally, if X is
4288 known to be either 0 or -1, NE becomes a NEG and EQ becomes
4289 (plus X 1).
4290
4291 Remove any ZERO_EXTRACT we made when thinking this was a
4292 comparison. It may now be simpler to use, e.g., an AND. If a
4293 ZERO_EXTRACT is indeed appropriate, it will be placed back by
4294 the call to make_compound_operation in the SET case. */
4295
0802d516
RK
4296 if (STORE_FLAG_VALUE == 1
4297 && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
a191f0ee
RH
4298 && op1 == const0_rtx
4299 && mode == GET_MODE (op0)
4300 && nonzero_bits (op0, mode) == 1)
818b11b9
RK
4301 return gen_lowpart_for_combine (mode,
4302 expand_compound_operation (op0));
5109d49f 4303
0802d516
RK
4304 else if (STORE_FLAG_VALUE == 1
4305 && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
5109d49f 4306 && op1 == const0_rtx
a191f0ee 4307 && mode == GET_MODE (op0)
5109d49f
RK
4308 && (num_sign_bit_copies (op0, mode)
4309 == GET_MODE_BITSIZE (mode)))
4310 {
4311 op0 = expand_compound_operation (op0);
f1c6ba8b
RK
4312 return simplify_gen_unary (NEG, mode,
4313 gen_lowpart_for_combine (mode, op0),
4314 mode);
5109d49f
RK
4315 }
4316
0802d516
RK
4317 else if (STORE_FLAG_VALUE == 1
4318 && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
230d793d 4319 && op1 == const0_rtx
a191f0ee 4320 && mode == GET_MODE (op0)
5109d49f 4321 && nonzero_bits (op0, mode) == 1)
818b11b9
RK
4322 {
4323 op0 = expand_compound_operation (op0);
8079805d
RK
4324 return gen_binary (XOR, mode,
4325 gen_lowpart_for_combine (mode, op0),
4326 const1_rtx);
5109d49f 4327 }
818b11b9 4328
0802d516
RK
4329 else if (STORE_FLAG_VALUE == 1
4330 && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
5109d49f 4331 && op1 == const0_rtx
a191f0ee 4332 && mode == GET_MODE (op0)
5109d49f
RK
4333 && (num_sign_bit_copies (op0, mode)
4334 == GET_MODE_BITSIZE (mode)))
4335 {
4336 op0 = expand_compound_operation (op0);
8079805d 4337 return plus_constant (gen_lowpart_for_combine (mode, op0), 1);
818b11b9 4338 }
230d793d 4339
5109d49f
RK
4340 /* If STORE_FLAG_VALUE is -1, we have cases similar to
4341 those above. */
0802d516
RK
4342 if (STORE_FLAG_VALUE == -1
4343 && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
230d793d 4344 && op1 == const0_rtx
5109d49f
RK
4345 && (num_sign_bit_copies (op0, mode)
4346 == GET_MODE_BITSIZE (mode)))
4347 return gen_lowpart_for_combine (mode,
4348 expand_compound_operation (op0));
4349
0802d516
RK
4350 else if (STORE_FLAG_VALUE == -1
4351 && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
5109d49f 4352 && op1 == const0_rtx
a191f0ee 4353 && mode == GET_MODE (op0)
5109d49f
RK
4354 && nonzero_bits (op0, mode) == 1)
4355 {
4356 op0 = expand_compound_operation (op0);
f1c6ba8b
RK
4357 return simplify_gen_unary (NEG, mode,
4358 gen_lowpart_for_combine (mode, op0),
4359 mode);
5109d49f
RK
4360 }
4361
0802d516
RK
4362 else if (STORE_FLAG_VALUE == -1
4363 && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
5109d49f 4364 && op1 == const0_rtx
a191f0ee 4365 && mode == GET_MODE (op0)
5109d49f
RK
4366 && (num_sign_bit_copies (op0, mode)
4367 == GET_MODE_BITSIZE (mode)))
230d793d 4368 {
818b11b9 4369 op0 = expand_compound_operation (op0);
f1c6ba8b
RK
4370 return simplify_gen_unary (NOT, mode,
4371 gen_lowpart_for_combine (mode, op0),
4372 mode);
5109d49f
RK
4373 }
4374
4375 /* If X is 0/1, (eq X 0) is X-1. */
0802d516
RK
4376 else if (STORE_FLAG_VALUE == -1
4377 && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
5109d49f 4378 && op1 == const0_rtx
a191f0ee 4379 && mode == GET_MODE (op0)
5109d49f
RK
4380 && nonzero_bits (op0, mode) == 1)
4381 {
4382 op0 = expand_compound_operation (op0);
8079805d 4383 return plus_constant (gen_lowpart_for_combine (mode, op0), -1);
230d793d 4384 }
230d793d
RS
4385
4386 /* If STORE_FLAG_VALUE says to just test the sign bit and X has just
951553af
RK
4387 one bit that might be nonzero, we can convert (ne x 0) to
4388 (ashift x c) where C puts the bit in the sign bit. Remove any
4389 AND with STORE_FLAG_VALUE when we are done, since we are only
4390 going to test the sign bit. */
3f508eca 4391 if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
5f4f0e22 4392 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
0802d516 4393 && ((STORE_FLAG_VALUE & GET_MODE_MASK (mode))
e51712db 4394 == (unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE(mode)-1))
230d793d
RS
4395 && op1 == const0_rtx
4396 && mode == GET_MODE (op0)
5109d49f 4397 && (i = exact_log2 (nonzero_bits (op0, mode))) >= 0)
230d793d 4398 {
818b11b9
RK
4399 x = simplify_shift_const (NULL_RTX, ASHIFT, mode,
4400 expand_compound_operation (op0),
230d793d
RS
4401 GET_MODE_BITSIZE (mode) - 1 - i);
4402 if (GET_CODE (x) == AND && XEXP (x, 1) == const_true_rtx)
4403 return XEXP (x, 0);
4404 else
4405 return x;
4406 }
4407
4408 /* If the code changed, return a whole new comparison. */
4409 if (new_code != code)
f1c6ba8b 4410 return gen_rtx_fmt_ee (new_code, mode, op0, op1);
230d793d 4411
663522cb 4412 /* Otherwise, keep this operation, but maybe change its operands.
230d793d
RS
4413 This also converts (ne (compare FOO BAR) 0) to (ne FOO BAR). */
4414 SUBST (XEXP (x, 0), op0);
4415 SUBST (XEXP (x, 1), op1);
4416 }
4417 break;
663522cb 4418
230d793d 4419 case IF_THEN_ELSE:
8079805d 4420 return simplify_if_then_else (x);
9210df58 4421
8079805d
RK
4422 case ZERO_EXTRACT:
4423 case SIGN_EXTRACT:
4424 case ZERO_EXTEND:
4425 case SIGN_EXTEND:
0f41302f 4426 /* If we are processing SET_DEST, we are done. */
8079805d
RK
4427 if (in_dest)
4428 return x;
d0ab8cd3 4429
8079805d 4430 return expand_compound_operation (x);
d0ab8cd3 4431
8079805d
RK
4432 case SET:
4433 return simplify_set (x);
1a26b032 4434
8079805d
RK
4435 case AND:
4436 case IOR:
4437 case XOR:
4438 return simplify_logical (x, last);
d0ab8cd3 4439
663522cb 4440 case ABS:
8079805d
RK
4441 /* (abs (neg <foo>)) -> (abs <foo>) */
4442 if (GET_CODE (XEXP (x, 0)) == NEG)
4443 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
1a26b032 4444
b472527b
JL
4445 /* If the mode of the operand is VOIDmode (i.e. if it is ASM_OPERANDS),
4446 do nothing. */
4447 if (GET_MODE (XEXP (x, 0)) == VOIDmode)
4448 break;
f40421ce 4449
8079805d
RK
4450 /* If operand is something known to be positive, ignore the ABS. */
4451 if (GET_CODE (XEXP (x, 0)) == FFS || GET_CODE (XEXP (x, 0)) == ABS
4452 || ((GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4453 <= HOST_BITS_PER_WIDE_INT)
4454 && ((nonzero_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
4455 & ((HOST_WIDE_INT) 1
4456 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1)))
4457 == 0)))
4458 return XEXP (x, 0);
1a26b032 4459
8079805d
RK
4460 /* If operand is known to be only -1 or 0, convert ABS to NEG. */
4461 if (num_sign_bit_copies (XEXP (x, 0), mode) == GET_MODE_BITSIZE (mode))
f1c6ba8b 4462 return gen_rtx_NEG (mode, XEXP (x, 0));
1a26b032 4463
8079805d 4464 break;
1a26b032 4465
8079805d
RK
4466 case FFS:
4467 /* (ffs (*_extend <X>)) = (ffs <X>) */
4468 if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND
4469 || GET_CODE (XEXP (x, 0)) == ZERO_EXTEND)
4470 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4471 break;
1a26b032 4472
8079805d
RK
4473 case FLOAT:
4474 /* (float (sign_extend <X>)) = (float <X>). */
4475 if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND)
4476 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4477 break;
1a26b032 4478
8079805d
RK
4479 case ASHIFT:
4480 case LSHIFTRT:
4481 case ASHIFTRT:
4482 case ROTATE:
4483 case ROTATERT:
4484 /* If this is a shift by a constant amount, simplify it. */
4485 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
663522cb 4486 return simplify_shift_const (x, code, mode, XEXP (x, 0),
8079805d
RK
4487 INTVAL (XEXP (x, 1)));
4488
4489#ifdef SHIFT_COUNT_TRUNCATED
4490 else if (SHIFT_COUNT_TRUNCATED && GET_CODE (XEXP (x, 1)) != REG)
4491 SUBST (XEXP (x, 1),
4492 force_to_mode (XEXP (x, 1), GET_MODE (x),
663522cb 4493 ((HOST_WIDE_INT) 1
8079805d
RK
4494 << exact_log2 (GET_MODE_BITSIZE (GET_MODE (x))))
4495 - 1,
4496 NULL_RTX, 0));
4497#endif
4498
4499 break;
e9a25f70 4500
82be40f7
BS
4501 case VEC_SELECT:
4502 {
4503 rtx op0 = XEXP (x, 0);
4504 rtx op1 = XEXP (x, 1);
4505 int len;
4506
4507 if (GET_CODE (op1) != PARALLEL)
4508 abort ();
4509 len = XVECLEN (op1, 0);
4510 if (len == 1
4511 && GET_CODE (XVECEXP (op1, 0, 0)) == CONST_INT
4512 && GET_CODE (op0) == VEC_CONCAT)
4513 {
4514 int offset = INTVAL (XVECEXP (op1, 0, 0)) * GET_MODE_SIZE (GET_MODE (x));
4515
4516 /* Try to find the element in the VEC_CONCAT. */
4517 for (;;)
4518 {
4519 if (GET_MODE (op0) == GET_MODE (x))
4520 return op0;
4521 if (GET_CODE (op0) == VEC_CONCAT)
4522 {
4523 HOST_WIDE_INT op0_size = GET_MODE_SIZE (GET_MODE (XEXP (op0, 0)));
4524 if (op0_size < offset)
4525 op0 = XEXP (op0, 0);
4526 else
4527 {
4528 offset -= op0_size;
4529 op0 = XEXP (op0, 1);
4530 }
4531 }
4532 else
4533 break;
4534 }
4535 }
4536 }
4537
4538 break;
23190837 4539
e9a25f70
JL
4540 default:
4541 break;
8079805d
RK
4542 }
4543
4544 return x;
4545}
4546\f
4547/* Simplify X, an IF_THEN_ELSE expression. Return the new expression. */
5109d49f 4548
8079805d
RK
4549static rtx
4550simplify_if_then_else (x)
4551 rtx x;
4552{
4553 enum machine_mode mode = GET_MODE (x);
4554 rtx cond = XEXP (x, 0);
d6edb99e
ZW
4555 rtx true_rtx = XEXP (x, 1);
4556 rtx false_rtx = XEXP (x, 2);
8079805d
RK
4557 enum rtx_code true_code = GET_CODE (cond);
4558 int comparison_p = GET_RTX_CLASS (true_code) == '<';
4559 rtx temp;
4560 int i;
9a915772
JH
4561 enum rtx_code false_code;
4562 rtx reversed;
8079805d 4563
0f41302f 4564 /* Simplify storing of the truth value. */
d6edb99e 4565 if (comparison_p && true_rtx == const_true_rtx && false_rtx == const0_rtx)
8079805d 4566 return gen_binary (true_code, mode, XEXP (cond, 0), XEXP (cond, 1));
663522cb 4567
0f41302f 4568 /* Also when the truth value has to be reversed. */
9a915772 4569 if (comparison_p
d6edb99e 4570 && true_rtx == const0_rtx && false_rtx == const_true_rtx
9a915772
JH
4571 && (reversed = reversed_comparison (cond, mode, XEXP (cond, 0),
4572 XEXP (cond, 1))))
4573 return reversed;
8079805d
RK
4574
4575 /* Sometimes we can simplify the arm of an IF_THEN_ELSE if a register used
4576 in it is being compared against certain values. Get the true and false
4577 comparisons and see if that says anything about the value of each arm. */
4578
9a915772
JH
4579 if (comparison_p
4580 && ((false_code = combine_reversed_comparison_code (cond))
4581 != UNKNOWN)
8079805d
RK
4582 && GET_CODE (XEXP (cond, 0)) == REG)
4583 {
4584 HOST_WIDE_INT nzb;
4585 rtx from = XEXP (cond, 0);
8079805d
RK
4586 rtx true_val = XEXP (cond, 1);
4587 rtx false_val = true_val;
4588 int swapped = 0;
9210df58 4589
8079805d 4590 /* If FALSE_CODE is EQ, swap the codes and arms. */
5109d49f 4591
8079805d 4592 if (false_code == EQ)
1a26b032 4593 {
8079805d 4594 swapped = 1, true_code = EQ, false_code = NE;
d6edb99e 4595 temp = true_rtx, true_rtx = false_rtx, false_rtx = temp;
8079805d 4596 }
5109d49f 4597
8079805d
RK
4598 /* If we are comparing against zero and the expression being tested has
4599 only a single bit that might be nonzero, that is its value when it is
4600 not equal to zero. Similarly if it is known to be -1 or 0. */
4601
4602 if (true_code == EQ && true_val == const0_rtx
4603 && exact_log2 (nzb = nonzero_bits (from, GET_MODE (from))) >= 0)
4604 false_code = EQ, false_val = GEN_INT (nzb);
4605 else if (true_code == EQ && true_val == const0_rtx
4606 && (num_sign_bit_copies (from, GET_MODE (from))
4607 == GET_MODE_BITSIZE (GET_MODE (from))))
4608 false_code = EQ, false_val = constm1_rtx;
4609
4610 /* Now simplify an arm if we know the value of the register in the
4611 branch and it is used in the arm. Be careful due to the potential
4612 of locally-shared RTL. */
4613
d6edb99e
ZW
4614 if (reg_mentioned_p (from, true_rtx))
4615 true_rtx = subst (known_cond (copy_rtx (true_rtx), true_code,
4616 from, true_val),
8079805d 4617 pc_rtx, pc_rtx, 0, 0);
d6edb99e
ZW
4618 if (reg_mentioned_p (from, false_rtx))
4619 false_rtx = subst (known_cond (copy_rtx (false_rtx), false_code,
8079805d
RK
4620 from, false_val),
4621 pc_rtx, pc_rtx, 0, 0);
4622
d6edb99e
ZW
4623 SUBST (XEXP (x, 1), swapped ? false_rtx : true_rtx);
4624 SUBST (XEXP (x, 2), swapped ? true_rtx : false_rtx);
8079805d 4625
d6edb99e
ZW
4626 true_rtx = XEXP (x, 1);
4627 false_rtx = XEXP (x, 2);
4628 true_code = GET_CODE (cond);
8079805d 4629 }
5109d49f 4630
8079805d
RK
4631 /* If we have (if_then_else FOO (pc) (label_ref BAR)) and FOO can be
4632 reversed, do so to avoid needing two sets of patterns for
4633 subtract-and-branch insns. Similarly if we have a constant in the true
4634 arm, the false arm is the same as the first operand of the comparison, or
4635 the false arm is more complicated than the true arm. */
4636
9a915772
JH
4637 if (comparison_p
4638 && combine_reversed_comparison_code (cond) != UNKNOWN
d6edb99e
ZW
4639 && (true_rtx == pc_rtx
4640 || (CONSTANT_P (true_rtx)
4641 && GET_CODE (false_rtx) != CONST_INT && false_rtx != pc_rtx)
4642 || true_rtx == const0_rtx
4643 || (GET_RTX_CLASS (GET_CODE (true_rtx)) == 'o'
4644 && GET_RTX_CLASS (GET_CODE (false_rtx)) != 'o')
4645 || (GET_CODE (true_rtx) == SUBREG
4646 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (true_rtx))) == 'o'
4647 && GET_RTX_CLASS (GET_CODE (false_rtx)) != 'o')
4648 || reg_mentioned_p (true_rtx, false_rtx)
4649 || rtx_equal_p (false_rtx, XEXP (cond, 0))))
8079805d 4650 {
9a915772 4651 true_code = reversed_comparison_code (cond, NULL);
8079805d 4652 SUBST (XEXP (x, 0),
9a915772
JH
4653 reversed_comparison (cond, GET_MODE (cond), XEXP (cond, 0),
4654 XEXP (cond, 1)));
5109d49f 4655
d6edb99e
ZW
4656 SUBST (XEXP (x, 1), false_rtx);
4657 SUBST (XEXP (x, 2), true_rtx);
1a26b032 4658
d6edb99e
ZW
4659 temp = true_rtx, true_rtx = false_rtx, false_rtx = temp;
4660 cond = XEXP (x, 0);
bb821298 4661
0f41302f 4662 /* It is possible that the conditional has been simplified out. */
bb821298
RK
4663 true_code = GET_CODE (cond);
4664 comparison_p = GET_RTX_CLASS (true_code) == '<';
8079805d 4665 }
abe6e52f 4666
8079805d 4667 /* If the two arms are identical, we don't need the comparison. */
1a26b032 4668
d6edb99e
ZW
4669 if (rtx_equal_p (true_rtx, false_rtx) && ! side_effects_p (cond))
4670 return true_rtx;
1a26b032 4671
5be669c7
RK
4672 /* Convert a == b ? b : a to "a". */
4673 if (true_code == EQ && ! side_effects_p (cond)
de6c5979 4674 && (! FLOAT_MODE_P (mode) || flag_unsafe_math_optimizations)
d6edb99e
ZW
4675 && rtx_equal_p (XEXP (cond, 0), false_rtx)
4676 && rtx_equal_p (XEXP (cond, 1), true_rtx))
4677 return false_rtx;
5be669c7 4678 else if (true_code == NE && ! side_effects_p (cond)
de6c5979 4679 && (! FLOAT_MODE_P (mode) || flag_unsafe_math_optimizations)
d6edb99e
ZW
4680 && rtx_equal_p (XEXP (cond, 0), true_rtx)
4681 && rtx_equal_p (XEXP (cond, 1), false_rtx))
4682 return true_rtx;
5be669c7 4683
8079805d
RK
4684 /* Look for cases where we have (abs x) or (neg (abs X)). */
4685
4686 if (GET_MODE_CLASS (mode) == MODE_INT
d6edb99e
ZW
4687 && GET_CODE (false_rtx) == NEG
4688 && rtx_equal_p (true_rtx, XEXP (false_rtx, 0))
8079805d 4689 && comparison_p
d6edb99e
ZW
4690 && rtx_equal_p (true_rtx, XEXP (cond, 0))
4691 && ! side_effects_p (true_rtx))
8079805d
RK
4692 switch (true_code)
4693 {
4694 case GT:
4695 case GE:
f1c6ba8b 4696 return simplify_gen_unary (ABS, mode, true_rtx, mode);
8079805d
RK
4697 case LT:
4698 case LE:
f1c6ba8b
RK
4699 return
4700 simplify_gen_unary (NEG, mode,
4701 simplify_gen_unary (ABS, mode, true_rtx, mode),
4702 mode);
e9a25f70
JL
4703 default:
4704 break;
8079805d
RK
4705 }
4706
4707 /* Look for MIN or MAX. */
4708
de6c5979 4709 if ((! FLOAT_MODE_P (mode) || flag_unsafe_math_optimizations)
8079805d 4710 && comparison_p
d6edb99e
ZW
4711 && rtx_equal_p (XEXP (cond, 0), true_rtx)
4712 && rtx_equal_p (XEXP (cond, 1), false_rtx)
8079805d
RK
4713 && ! side_effects_p (cond))
4714 switch (true_code)
4715 {
4716 case GE:
4717 case GT:
d6edb99e 4718 return gen_binary (SMAX, mode, true_rtx, false_rtx);
8079805d
RK
4719 case LE:
4720 case LT:
d6edb99e 4721 return gen_binary (SMIN, mode, true_rtx, false_rtx);
8079805d
RK
4722 case GEU:
4723 case GTU:
d6edb99e 4724 return gen_binary (UMAX, mode, true_rtx, false_rtx);
8079805d
RK
4725 case LEU:
4726 case LTU:
d6edb99e 4727 return gen_binary (UMIN, mode, true_rtx, false_rtx);
e9a25f70
JL
4728 default:
4729 break;
8079805d 4730 }
663522cb 4731
8079805d
RK
4732 /* If we have (if_then_else COND (OP Z C1) Z) and OP is an identity when its
4733 second operand is zero, this can be done as (OP Z (mult COND C2)) where
4734 C2 = C1 * STORE_FLAG_VALUE. Similarly if OP has an outer ZERO_EXTEND or
4735 SIGN_EXTEND as long as Z is already extended (so we don't destroy it).
4736 We can do this kind of thing in some cases when STORE_FLAG_VALUE is
0802d516 4737 neither 1 or -1, but it isn't worth checking for. */
8079805d 4738
0802d516
RK
4739 if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
4740 && comparison_p && mode != VOIDmode && ! side_effects_p (x))
8079805d 4741 {
d6edb99e
ZW
4742 rtx t = make_compound_operation (true_rtx, SET);
4743 rtx f = make_compound_operation (false_rtx, SET);
8079805d
RK
4744 rtx cond_op0 = XEXP (cond, 0);
4745 rtx cond_op1 = XEXP (cond, 1);
6a651371 4746 enum rtx_code op = NIL, extend_op = NIL;
8079805d 4747 enum machine_mode m = mode;
6a651371 4748 rtx z = 0, c1 = NULL_RTX;
8079805d 4749
8079805d
RK
4750 if ((GET_CODE (t) == PLUS || GET_CODE (t) == MINUS
4751 || GET_CODE (t) == IOR || GET_CODE (t) == XOR
4752 || GET_CODE (t) == ASHIFT
4753 || GET_CODE (t) == LSHIFTRT || GET_CODE (t) == ASHIFTRT)
4754 && rtx_equal_p (XEXP (t, 0), f))
4755 c1 = XEXP (t, 1), op = GET_CODE (t), z = f;
4756
4757 /* If an identity-zero op is commutative, check whether there
0f41302f 4758 would be a match if we swapped the operands. */
8079805d
RK
4759 else if ((GET_CODE (t) == PLUS || GET_CODE (t) == IOR
4760 || GET_CODE (t) == XOR)
4761 && rtx_equal_p (XEXP (t, 1), f))
4762 c1 = XEXP (t, 0), op = GET_CODE (t), z = f;
4763 else if (GET_CODE (t) == SIGN_EXTEND
4764 && (GET_CODE (XEXP (t, 0)) == PLUS
4765 || GET_CODE (XEXP (t, 0)) == MINUS
4766 || GET_CODE (XEXP (t, 0)) == IOR
4767 || GET_CODE (XEXP (t, 0)) == XOR
4768 || GET_CODE (XEXP (t, 0)) == ASHIFT
4769 || GET_CODE (XEXP (t, 0)) == LSHIFTRT
4770 || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
4771 && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
4772 && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
4773 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
4774 && (num_sign_bit_copies (f, GET_MODE (f))
4775 > (GET_MODE_BITSIZE (mode)
4776 - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 0))))))
4777 {
4778 c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
4779 extend_op = SIGN_EXTEND;
4780 m = GET_MODE (XEXP (t, 0));
1a26b032 4781 }
8079805d
RK
4782 else if (GET_CODE (t) == SIGN_EXTEND
4783 && (GET_CODE (XEXP (t, 0)) == PLUS
4784 || GET_CODE (XEXP (t, 0)) == IOR
4785 || GET_CODE (XEXP (t, 0)) == XOR)
4786 && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
4787 && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
4788 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
4789 && (num_sign_bit_copies (f, GET_MODE (f))
4790 > (GET_MODE_BITSIZE (mode)
4791 - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 1))))))
4792 {
4793 c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
4794 extend_op = SIGN_EXTEND;
4795 m = GET_MODE (XEXP (t, 0));
4796 }
4797 else if (GET_CODE (t) == ZERO_EXTEND
4798 && (GET_CODE (XEXP (t, 0)) == PLUS
4799 || GET_CODE (XEXP (t, 0)) == MINUS
4800 || GET_CODE (XEXP (t, 0)) == IOR
4801 || GET_CODE (XEXP (t, 0)) == XOR
4802 || GET_CODE (XEXP (t, 0)) == ASHIFT
4803 || GET_CODE (XEXP (t, 0)) == LSHIFTRT
4804 || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
4805 && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
4806 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4807 && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
4808 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
4809 && ((nonzero_bits (f, GET_MODE (f))
663522cb 4810 & ~GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 0))))
8079805d
RK
4811 == 0))
4812 {
4813 c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
4814 extend_op = ZERO_EXTEND;
4815 m = GET_MODE (XEXP (t, 0));
4816 }
4817 else if (GET_CODE (t) == ZERO_EXTEND
4818 && (GET_CODE (XEXP (t, 0)) == PLUS
4819 || GET_CODE (XEXP (t, 0)) == IOR
4820 || GET_CODE (XEXP (t, 0)) == XOR)
4821 && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
4822 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4823 && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
4824 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
4825 && ((nonzero_bits (f, GET_MODE (f))
663522cb 4826 & ~GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 1))))
8079805d
RK
4827 == 0))
4828 {
4829 c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
4830 extend_op = ZERO_EXTEND;
4831 m = GET_MODE (XEXP (t, 0));
4832 }
663522cb 4833
8079805d
RK
4834 if (z)
4835 {
4836 temp = subst (gen_binary (true_code, m, cond_op0, cond_op1),
4837 pc_rtx, pc_rtx, 0, 0);
4838 temp = gen_binary (MULT, m, temp,
4839 gen_binary (MULT, m, c1, const_true_rtx));
4840 temp = subst (temp, pc_rtx, pc_rtx, 0, 0);
4841 temp = gen_binary (op, m, gen_lowpart_for_combine (m, z), temp);
4842
4843 if (extend_op != NIL)
f1c6ba8b 4844 temp = simplify_gen_unary (extend_op, mode, temp, m);
8079805d
RK
4845
4846 return temp;
4847 }
4848 }
224eeff2 4849
8079805d
RK
4850 /* If we have (if_then_else (ne A 0) C1 0) and either A is known to be 0 or
4851 1 and C1 is a single bit or A is known to be 0 or -1 and C1 is the
4852 negation of a single bit, we can convert this operation to a shift. We
4853 can actually do this more generally, but it doesn't seem worth it. */
4854
4855 if (true_code == NE && XEXP (cond, 1) == const0_rtx
d6edb99e 4856 && false_rtx == const0_rtx && GET_CODE (true_rtx) == CONST_INT
8079805d 4857 && ((1 == nonzero_bits (XEXP (cond, 0), mode)
d6edb99e 4858 && (i = exact_log2 (INTVAL (true_rtx))) >= 0)
8079805d
RK
4859 || ((num_sign_bit_copies (XEXP (cond, 0), mode)
4860 == GET_MODE_BITSIZE (mode))
d6edb99e 4861 && (i = exact_log2 (-INTVAL (true_rtx))) >= 0)))
8079805d
RK
4862 return
4863 simplify_shift_const (NULL_RTX, ASHIFT, mode,
4864 gen_lowpart_for_combine (mode, XEXP (cond, 0)), i);
230d793d 4865
8079805d
RK
4866 return x;
4867}
4868\f
4869/* Simplify X, a SET expression. Return the new expression. */
230d793d 4870
8079805d
RK
4871static rtx
4872simplify_set (x)
4873 rtx x;
4874{
4875 rtx src = SET_SRC (x);
4876 rtx dest = SET_DEST (x);
4877 enum machine_mode mode
4878 = GET_MODE (src) != VOIDmode ? GET_MODE (src) : GET_MODE (dest);
4879 rtx other_insn;
4880 rtx *cc_use;
4881
4882 /* (set (pc) (return)) gets written as (return). */
4883 if (GET_CODE (dest) == PC && GET_CODE (src) == RETURN)
4884 return src;
230d793d 4885
87e3e0c1
RK
4886 /* Now that we know for sure which bits of SRC we are using, see if we can
4887 simplify the expression for the object knowing that we only need the
4888 low-order bits. */
4889
4890 if (GET_MODE_CLASS (mode) == MODE_INT)
c5c76735 4891 {
e8dc6d50 4892 src = force_to_mode (src, mode, ~(HOST_WIDE_INT) 0, NULL_RTX, 0);
c5c76735
JL
4893 SUBST (SET_SRC (x), src);
4894 }
87e3e0c1 4895
8079805d
RK
4896 /* If we are setting CC0 or if the source is a COMPARE, look for the use of
4897 the comparison result and try to simplify it unless we already have used
4898 undobuf.other_insn. */
4899 if ((GET_CODE (src) == COMPARE
230d793d 4900#ifdef HAVE_cc0
8079805d 4901 || dest == cc0_rtx
230d793d 4902#endif
8079805d
RK
4903 )
4904 && (cc_use = find_single_use (dest, subst_insn, &other_insn)) != 0
4905 && (undobuf.other_insn == 0 || other_insn == undobuf.other_insn)
4906 && GET_RTX_CLASS (GET_CODE (*cc_use)) == '<'
c0d3ac4d 4907 && rtx_equal_p (XEXP (*cc_use, 0), dest))
8079805d
RK
4908 {
4909 enum rtx_code old_code = GET_CODE (*cc_use);
4910 enum rtx_code new_code;
4911 rtx op0, op1;
4912 int other_changed = 0;
4913 enum machine_mode compare_mode = GET_MODE (dest);
4914
4915 if (GET_CODE (src) == COMPARE)
4916 op0 = XEXP (src, 0), op1 = XEXP (src, 1);
4917 else
4918 op0 = src, op1 = const0_rtx;
230d793d 4919
8079805d
RK
4920 /* Simplify our comparison, if possible. */
4921 new_code = simplify_comparison (old_code, &op0, &op1);
230d793d 4922
c141a106 4923#ifdef EXTRA_CC_MODES
8079805d
RK
4924 /* If this machine has CC modes other than CCmode, check to see if we
4925 need to use a different CC mode here. */
4926 compare_mode = SELECT_CC_MODE (new_code, op0, op1);
c141a106 4927#endif /* EXTRA_CC_MODES */
230d793d 4928
c141a106 4929#if !defined (HAVE_cc0) && defined (EXTRA_CC_MODES)
8079805d
RK
4930 /* If the mode changed, we have to change SET_DEST, the mode in the
4931 compare, and the mode in the place SET_DEST is used. If SET_DEST is
4932 a hard register, just build new versions with the proper mode. If it
4933 is a pseudo, we lose unless it is only time we set the pseudo, in
4934 which case we can safely change its mode. */
4935 if (compare_mode != GET_MODE (dest))
4936 {
770ae6cc 4937 unsigned int regno = REGNO (dest);
38a448ca 4938 rtx new_dest = gen_rtx_REG (compare_mode, regno);
8079805d
RK
4939
4940 if (regno < FIRST_PSEUDO_REGISTER
b1f21e0a 4941 || (REG_N_SETS (regno) == 1 && ! REG_USERVAR_P (dest)))
230d793d 4942 {
8079805d
RK
4943 if (regno >= FIRST_PSEUDO_REGISTER)
4944 SUBST (regno_reg_rtx[regno], new_dest);
230d793d 4945
8079805d
RK
4946 SUBST (SET_DEST (x), new_dest);
4947 SUBST (XEXP (*cc_use, 0), new_dest);
4948 other_changed = 1;
230d793d 4949
8079805d 4950 dest = new_dest;
230d793d 4951 }
8079805d 4952 }
230d793d
RS
4953#endif
4954
8079805d
RK
4955 /* If the code changed, we have to build a new comparison in
4956 undobuf.other_insn. */
4957 if (new_code != old_code)
4958 {
4959 unsigned HOST_WIDE_INT mask;
4960
f1c6ba8b
RK
4961 SUBST (*cc_use, gen_rtx_fmt_ee (new_code, GET_MODE (*cc_use),
4962 dest, const0_rtx));
8079805d
RK
4963
4964 /* If the only change we made was to change an EQ into an NE or
4965 vice versa, OP0 has only one bit that might be nonzero, and OP1
4966 is zero, check if changing the user of the condition code will
4967 produce a valid insn. If it won't, we can keep the original code
4968 in that insn by surrounding our operation with an XOR. */
4969
4970 if (((old_code == NE && new_code == EQ)
4971 || (old_code == EQ && new_code == NE))
4972 && ! other_changed && op1 == const0_rtx
4973 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
4974 && exact_log2 (mask = nonzero_bits (op0, GET_MODE (op0))) >= 0)
230d793d 4975 {
8079805d 4976 rtx pat = PATTERN (other_insn), note = 0;
230d793d 4977
8e2f6e35 4978 if ((recog_for_combine (&pat, other_insn, &note) < 0
8079805d
RK
4979 && ! check_asm_operands (pat)))
4980 {
4981 PUT_CODE (*cc_use, old_code);
4982 other_insn = 0;
230d793d 4983
8079805d 4984 op0 = gen_binary (XOR, GET_MODE (op0), op0, GEN_INT (mask));
230d793d 4985 }
230d793d
RS
4986 }
4987
8079805d
RK
4988 other_changed = 1;
4989 }
4990
4991 if (other_changed)
4992 undobuf.other_insn = other_insn;
230d793d
RS
4993
4994#ifdef HAVE_cc0
8079805d
RK
4995 /* If we are now comparing against zero, change our source if
4996 needed. If we do not use cc0, we always have a COMPARE. */
4997 if (op1 == const0_rtx && dest == cc0_rtx)
4998 {
4999 SUBST (SET_SRC (x), op0);
5000 src = op0;
5001 }
5002 else
230d793d
RS
5003#endif
5004
8079805d
RK
5005 /* Otherwise, if we didn't previously have a COMPARE in the
5006 correct mode, we need one. */
5007 if (GET_CODE (src) != COMPARE || GET_MODE (src) != compare_mode)
5008 {
f1c6ba8b 5009 SUBST (SET_SRC (x), gen_rtx_COMPARE (compare_mode, op0, op1));
8079805d 5010 src = SET_SRC (x);
230d793d
RS
5011 }
5012 else
5013 {
8079805d
RK
5014 /* Otherwise, update the COMPARE if needed. */
5015 SUBST (XEXP (src, 0), op0);
5016 SUBST (XEXP (src, 1), op1);
230d793d 5017 }
8079805d
RK
5018 }
5019 else
5020 {
5021 /* Get SET_SRC in a form where we have placed back any
5022 compound expressions. Then do the checks below. */
5023 src = make_compound_operation (src, SET);
5024 SUBST (SET_SRC (x), src);
5025 }
230d793d 5026
8079805d
RK
5027 /* If we have (set x (subreg:m1 (op:m2 ...) 0)) with OP being some operation,
5028 and X being a REG or (subreg (reg)), we may be able to convert this to
663522cb 5029 (set (subreg:m2 x) (op)).
df62f951 5030
8079805d
RK
5031 We can always do this if M1 is narrower than M2 because that means that
5032 we only care about the low bits of the result.
df62f951 5033
8079805d 5034 However, on machines without WORD_REGISTER_OPERATIONS defined, we cannot
9ec36da5 5035 perform a narrower operation than requested since the high-order bits will
8079805d
RK
5036 be undefined. On machine where it is defined, this transformation is safe
5037 as long as M1 and M2 have the same number of words. */
663522cb 5038
8079805d
RK
5039 if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
5040 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (src))) != 'o'
5041 && (((GET_MODE_SIZE (GET_MODE (src)) + (UNITS_PER_WORD - 1))
5042 / UNITS_PER_WORD)
5043 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5044 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
8baf60bb 5045#ifndef WORD_REGISTER_OPERATIONS
8079805d
RK
5046 && (GET_MODE_SIZE (GET_MODE (src))
5047 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
df62f951 5048#endif
02188693 5049#ifdef CLASS_CANNOT_CHANGE_MODE
f507a070
RK
5050 && ! (GET_CODE (dest) == REG && REGNO (dest) < FIRST_PSEUDO_REGISTER
5051 && (TEST_HARD_REG_BIT
02188693 5052 (reg_class_contents[(int) CLASS_CANNOT_CHANGE_MODE],
f507a070 5053 REGNO (dest)))
02188693
RH
5054 && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (src),
5055 GET_MODE (SUBREG_REG (src))))
663522cb 5056#endif
8079805d
RK
5057 && (GET_CODE (dest) == REG
5058 || (GET_CODE (dest) == SUBREG
5059 && GET_CODE (SUBREG_REG (dest)) == REG)))
5060 {
5061 SUBST (SET_DEST (x),
5062 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (src)),
5063 dest));
5064 SUBST (SET_SRC (x), SUBREG_REG (src));
5065
5066 src = SET_SRC (x), dest = SET_DEST (x);
5067 }
df62f951 5068
8baf60bb 5069#ifdef LOAD_EXTEND_OP
8079805d
RK
5070 /* If we have (set FOO (subreg:M (mem:N BAR) 0)) with M wider than N, this
5071 would require a paradoxical subreg. Replace the subreg with a
0f41302f 5072 zero_extend to avoid the reload that would otherwise be required. */
8079805d
RK
5073
5074 if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
5075 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))) != NIL
ddef6bc7 5076 && SUBREG_BYTE (src) == 0
8079805d
RK
5077 && (GET_MODE_SIZE (GET_MODE (src))
5078 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
5079 && GET_CODE (SUBREG_REG (src)) == MEM)
5080 {
5081 SUBST (SET_SRC (x),
f1c6ba8b 5082 gen_rtx (LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))),
ddef6bc7 5083 GET_MODE (src), SUBREG_REG (src)));
8079805d
RK
5084
5085 src = SET_SRC (x);
5086 }
230d793d
RS
5087#endif
5088
8079805d
RK
5089 /* If we don't have a conditional move, SET_SRC is an IF_THEN_ELSE, and we
5090 are comparing an item known to be 0 or -1 against 0, use a logical
5091 operation instead. Check for one of the arms being an IOR of the other
5092 arm with some value. We compute three terms to be IOR'ed together. In
5093 practice, at most two will be nonzero. Then we do the IOR's. */
5094
5095 if (GET_CODE (dest) != PC
5096 && GET_CODE (src) == IF_THEN_ELSE
36b8d792 5097 && GET_MODE_CLASS (GET_MODE (src)) == MODE_INT
8079805d
RK
5098 && (GET_CODE (XEXP (src, 0)) == EQ || GET_CODE (XEXP (src, 0)) == NE)
5099 && XEXP (XEXP (src, 0), 1) == const0_rtx
6dd49058 5100 && GET_MODE (src) == GET_MODE (XEXP (XEXP (src, 0), 0))
ea414472
DE
5101#ifdef HAVE_conditional_move
5102 && ! can_conditionally_move_p (GET_MODE (src))
5103#endif
8079805d
RK
5104 && (num_sign_bit_copies (XEXP (XEXP (src, 0), 0),
5105 GET_MODE (XEXP (XEXP (src, 0), 0)))
5106 == GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (src, 0), 0))))
5107 && ! side_effects_p (src))
5108 {
d6edb99e 5109 rtx true_rtx = (GET_CODE (XEXP (src, 0)) == NE
8079805d 5110 ? XEXP (src, 1) : XEXP (src, 2));
d6edb99e 5111 rtx false_rtx = (GET_CODE (XEXP (src, 0)) == NE
8079805d
RK
5112 ? XEXP (src, 2) : XEXP (src, 1));
5113 rtx term1 = const0_rtx, term2, term3;
5114
d6edb99e
ZW
5115 if (GET_CODE (true_rtx) == IOR
5116 && rtx_equal_p (XEXP (true_rtx, 0), false_rtx))
5117 term1 = false_rtx, true_rtx = XEXP(true_rtx, 1), false_rtx = const0_rtx;
5118 else if (GET_CODE (true_rtx) == IOR
5119 && rtx_equal_p (XEXP (true_rtx, 1), false_rtx))
5120 term1 = false_rtx, true_rtx = XEXP(true_rtx, 0), false_rtx = const0_rtx;
5121 else if (GET_CODE (false_rtx) == IOR
5122 && rtx_equal_p (XEXP (false_rtx, 0), true_rtx))
5123 term1 = true_rtx, false_rtx = XEXP(false_rtx, 1), true_rtx = const0_rtx;
5124 else if (GET_CODE (false_rtx) == IOR
5125 && rtx_equal_p (XEXP (false_rtx, 1), true_rtx))
5126 term1 = true_rtx, false_rtx = XEXP(false_rtx, 0), true_rtx = const0_rtx;
5127
5128 term2 = gen_binary (AND, GET_MODE (src),
5129 XEXP (XEXP (src, 0), 0), true_rtx);
8079805d 5130 term3 = gen_binary (AND, GET_MODE (src),
f1c6ba8b
RK
5131 simplify_gen_unary (NOT, GET_MODE (src),
5132 XEXP (XEXP (src, 0), 0),
5133 GET_MODE (src)),
d6edb99e 5134 false_rtx);
8079805d
RK
5135
5136 SUBST (SET_SRC (x),
5137 gen_binary (IOR, GET_MODE (src),
5138 gen_binary (IOR, GET_MODE (src), term1, term2),
5139 term3));
5140
5141 src = SET_SRC (x);
5142 }
230d793d 5143
246e00f2
RK
5144 /* If either SRC or DEST is a CLOBBER of (const_int 0), make this
5145 whole thing fail. */
5146 if (GET_CODE (src) == CLOBBER && XEXP (src, 0) == const0_rtx)
5147 return src;
5148 else if (GET_CODE (dest) == CLOBBER && XEXP (dest, 0) == const0_rtx)
5149 return dest;
5150 else
5151 /* Convert this into a field assignment operation, if possible. */
5152 return make_field_assignment (x);
8079805d
RK
5153}
5154\f
5155/* Simplify, X, and AND, IOR, or XOR operation, and return the simplified
5156 result. LAST is nonzero if this is the last retry. */
5157
5158static rtx
5159simplify_logical (x, last)
5160 rtx x;
5161 int last;
5162{
5163 enum machine_mode mode = GET_MODE (x);
5164 rtx op0 = XEXP (x, 0);
5165 rtx op1 = XEXP (x, 1);
9a915772 5166 rtx reversed;
8079805d
RK
5167
5168 switch (GET_CODE (x))
5169 {
230d793d 5170 case AND:
663522cb 5171 /* Convert (A ^ B) & A to A & (~B) since the latter is often a single
8079805d
RK
5172 insn (and may simplify more). */
5173 if (GET_CODE (op0) == XOR
5174 && rtx_equal_p (XEXP (op0, 0), op1)
5175 && ! side_effects_p (op1))
0c1c8ea6 5176 x = gen_binary (AND, mode,
f1c6ba8b
RK
5177 simplify_gen_unary (NOT, mode, XEXP (op0, 1), mode),
5178 op1);
8079805d
RK
5179
5180 if (GET_CODE (op0) == XOR
5181 && rtx_equal_p (XEXP (op0, 1), op1)
5182 && ! side_effects_p (op1))
0c1c8ea6 5183 x = gen_binary (AND, mode,
f1c6ba8b
RK
5184 simplify_gen_unary (NOT, mode, XEXP (op0, 0), mode),
5185 op1);
8079805d 5186
663522cb 5187 /* Similarly for (~(A ^ B)) & A. */
8079805d
RK
5188 if (GET_CODE (op0) == NOT
5189 && GET_CODE (XEXP (op0, 0)) == XOR
5190 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), op1)
5191 && ! side_effects_p (op1))
5192 x = gen_binary (AND, mode, XEXP (XEXP (op0, 0), 1), op1);
5193
5194 if (GET_CODE (op0) == NOT
5195 && GET_CODE (XEXP (op0, 0)) == XOR
5196 && rtx_equal_p (XEXP (XEXP (op0, 0), 1), op1)
5197 && ! side_effects_p (op1))
5198 x = gen_binary (AND, mode, XEXP (XEXP (op0, 0), 0), op1);
5199
2e8f9abf
DM
5200 /* We can call simplify_and_const_int only if we don't lose
5201 any (sign) bits when converting INTVAL (op1) to
5202 "unsigned HOST_WIDE_INT". */
5203 if (GET_CODE (op1) == CONST_INT
5204 && (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5205 || INTVAL (op1) > 0))
230d793d 5206 {
8079805d 5207 x = simplify_and_const_int (x, mode, op0, INTVAL (op1));
230d793d
RS
5208
5209 /* If we have (ior (and (X C1) C2)) and the next restart would be
5210 the last, simplify this by making C1 as small as possible
0f41302f 5211 and then exit. */
8079805d
RK
5212 if (last
5213 && GET_CODE (x) == IOR && GET_CODE (op0) == AND
5214 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5215 && GET_CODE (op1) == CONST_INT)
5216 return gen_binary (IOR, mode,
5217 gen_binary (AND, mode, XEXP (op0, 0),
5218 GEN_INT (INTVAL (XEXP (op0, 1))
663522cb 5219 & ~INTVAL (op1))), op1);
230d793d
RS
5220
5221 if (GET_CODE (x) != AND)
8079805d 5222 return x;
0e32506c 5223
663522cb 5224 if (GET_RTX_CLASS (GET_CODE (x)) == 'c'
0e32506c
RK
5225 || GET_RTX_CLASS (GET_CODE (x)) == '2')
5226 op0 = XEXP (x, 0), op1 = XEXP (x, 1);
230d793d
RS
5227 }
5228
5229 /* Convert (A | B) & A to A. */
8079805d
RK
5230 if (GET_CODE (op0) == IOR
5231 && (rtx_equal_p (XEXP (op0, 0), op1)
5232 || rtx_equal_p (XEXP (op0, 1), op1))
5233 && ! side_effects_p (XEXP (op0, 0))
5234 && ! side_effects_p (XEXP (op0, 1)))
5235 return op1;
230d793d 5236
d0ab8cd3 5237 /* In the following group of tests (and those in case IOR below),
230d793d
RS
5238 we start with some combination of logical operations and apply
5239 the distributive law followed by the inverse distributive law.
5240 Most of the time, this results in no change. However, if some of
5241 the operands are the same or inverses of each other, simplifications
5242 will result.
5243
5244 For example, (and (ior A B) (not B)) can occur as the result of
5245 expanding a bit field assignment. When we apply the distributive
5246 law to this, we get (ior (and (A (not B))) (and (B (not B)))),
663522cb 5247 which then simplifies to (and (A (not B))).
230d793d 5248
8079805d 5249 If we have (and (ior A B) C), apply the distributive law and then
230d793d
RS
5250 the inverse distributive law to see if things simplify. */
5251
8079805d 5252 if (GET_CODE (op0) == IOR || GET_CODE (op0) == XOR)
230d793d
RS
5253 {
5254 x = apply_distributive_law
8079805d
RK
5255 (gen_binary (GET_CODE (op0), mode,
5256 gen_binary (AND, mode, XEXP (op0, 0), op1),
3749f4ca
BS
5257 gen_binary (AND, mode, XEXP (op0, 1),
5258 copy_rtx (op1))));
230d793d 5259 if (GET_CODE (x) != AND)
8079805d 5260 return x;
230d793d
RS
5261 }
5262
8079805d
RK
5263 if (GET_CODE (op1) == IOR || GET_CODE (op1) == XOR)
5264 return apply_distributive_law
5265 (gen_binary (GET_CODE (op1), mode,
5266 gen_binary (AND, mode, XEXP (op1, 0), op0),
3749f4ca
BS
5267 gen_binary (AND, mode, XEXP (op1, 1),
5268 copy_rtx (op0))));
230d793d
RS
5269
5270 /* Similarly, taking advantage of the fact that
5271 (and (not A) (xor B C)) == (xor (ior A B) (ior A C)) */
5272
8079805d
RK
5273 if (GET_CODE (op0) == NOT && GET_CODE (op1) == XOR)
5274 return apply_distributive_law
5275 (gen_binary (XOR, mode,
5276 gen_binary (IOR, mode, XEXP (op0, 0), XEXP (op1, 0)),
3749f4ca
BS
5277 gen_binary (IOR, mode, copy_rtx (XEXP (op0, 0)),
5278 XEXP (op1, 1))));
663522cb 5279
8079805d
RK
5280 else if (GET_CODE (op1) == NOT && GET_CODE (op0) == XOR)
5281 return apply_distributive_law
5282 (gen_binary (XOR, mode,
5283 gen_binary (IOR, mode, XEXP (op1, 0), XEXP (op0, 0)),
3749f4ca 5284 gen_binary (IOR, mode, copy_rtx (XEXP (op1, 0)), XEXP (op0, 1))));
230d793d
RS
5285 break;
5286
5287 case IOR:
951553af 5288 /* (ior A C) is C if all bits of A that might be nonzero are on in C. */
8079805d 5289 if (GET_CODE (op1) == CONST_INT
ac49a949 5290 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
663522cb 5291 && (nonzero_bits (op0, mode) & ~INTVAL (op1)) == 0)
8079805d 5292 return op1;
d0ab8cd3 5293
230d793d 5294 /* Convert (A & B) | A to A. */
8079805d
RK
5295 if (GET_CODE (op0) == AND
5296 && (rtx_equal_p (XEXP (op0, 0), op1)
5297 || rtx_equal_p (XEXP (op0, 1), op1))
5298 && ! side_effects_p (XEXP (op0, 0))
5299 && ! side_effects_p (XEXP (op0, 1)))
5300 return op1;
230d793d
RS
5301
5302 /* If we have (ior (and A B) C), apply the distributive law and then
5303 the inverse distributive law to see if things simplify. */
5304
8079805d 5305 if (GET_CODE (op0) == AND)
230d793d
RS
5306 {
5307 x = apply_distributive_law
5308 (gen_binary (AND, mode,
8079805d 5309 gen_binary (IOR, mode, XEXP (op0, 0), op1),
3749f4ca
BS
5310 gen_binary (IOR, mode, XEXP (op0, 1),
5311 copy_rtx (op1))));
230d793d
RS
5312
5313 if (GET_CODE (x) != IOR)
8079805d 5314 return x;
230d793d
RS
5315 }
5316
8079805d 5317 if (GET_CODE (op1) == AND)
230d793d
RS
5318 {
5319 x = apply_distributive_law
5320 (gen_binary (AND, mode,
8079805d 5321 gen_binary (IOR, mode, XEXP (op1, 0), op0),
3749f4ca
BS
5322 gen_binary (IOR, mode, XEXP (op1, 1),
5323 copy_rtx (op0))));
230d793d
RS
5324
5325 if (GET_CODE (x) != IOR)
8079805d 5326 return x;
230d793d
RS
5327 }
5328
5329 /* Convert (ior (ashift A CX) (lshiftrt A CY)) where CX+CY equals the
5330 mode size to (rotate A CX). */
5331
8079805d
RK
5332 if (((GET_CODE (op0) == ASHIFT && GET_CODE (op1) == LSHIFTRT)
5333 || (GET_CODE (op1) == ASHIFT && GET_CODE (op0) == LSHIFTRT))
5334 && rtx_equal_p (XEXP (op0, 0), XEXP (op1, 0))
5335 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5336 && GET_CODE (XEXP (op1, 1)) == CONST_INT
5337 && (INTVAL (XEXP (op0, 1)) + INTVAL (XEXP (op1, 1))
230d793d 5338 == GET_MODE_BITSIZE (mode)))
38a448ca
RH
5339 return gen_rtx_ROTATE (mode, XEXP (op0, 0),
5340 (GET_CODE (op0) == ASHIFT
5341 ? XEXP (op0, 1) : XEXP (op1, 1)));
230d793d 5342
71923da7
RK
5343 /* If OP0 is (ashiftrt (plus ...) C), it might actually be
5344 a (sign_extend (plus ...)). If so, OP1 is a CONST_INT, and the PLUS
5345 does not affect any of the bits in OP1, it can really be done
5346 as a PLUS and we can associate. We do this by seeing if OP1
5347 can be safely shifted left C bits. */
5348 if (GET_CODE (op1) == CONST_INT && GET_CODE (op0) == ASHIFTRT
5349 && GET_CODE (XEXP (op0, 0)) == PLUS
5350 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
5351 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5352 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT)
5353 {
5354 int count = INTVAL (XEXP (op0, 1));
5355 HOST_WIDE_INT mask = INTVAL (op1) << count;
5356
5357 if (mask >> count == INTVAL (op1)
5358 && (mask & nonzero_bits (XEXP (op0, 0), mode)) == 0)
5359 {
5360 SUBST (XEXP (XEXP (op0, 0), 1),
5361 GEN_INT (INTVAL (XEXP (XEXP (op0, 0), 1)) | mask));
5362 return op0;
5363 }
5364 }
230d793d
RS
5365 break;
5366
5367 case XOR:
79e8185c
JH
5368 /* If we are XORing two things that have no bits in common,
5369 convert them into an IOR. This helps to detect rotation encoded
5370 using those methods and possibly other simplifications. */
5371
5372 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5373 && (nonzero_bits (op0, mode)
5374 & nonzero_bits (op1, mode)) == 0)
5375 return (gen_binary (IOR, mode, op0, op1));
5376
230d793d
RS
5377 /* Convert (XOR (NOT x) (NOT y)) to (XOR x y).
5378 Also convert (XOR (NOT x) y) to (NOT (XOR x y)), similarly for
5379 (NOT y). */
5380 {
5381 int num_negated = 0;
230d793d 5382
8079805d
RK
5383 if (GET_CODE (op0) == NOT)
5384 num_negated++, op0 = XEXP (op0, 0);
5385 if (GET_CODE (op1) == NOT)
5386 num_negated++, op1 = XEXP (op1, 0);
230d793d
RS
5387
5388 if (num_negated == 2)
5389 {
8079805d
RK
5390 SUBST (XEXP (x, 0), op0);
5391 SUBST (XEXP (x, 1), op1);
230d793d
RS
5392 }
5393 else if (num_negated == 1)
f1c6ba8b
RK
5394 return
5395 simplify_gen_unary (NOT, mode, gen_binary (XOR, mode, op0, op1),
5396 mode);
230d793d
RS
5397 }
5398
5399 /* Convert (xor (and A B) B) to (and (not A) B). The latter may
5400 correspond to a machine insn or result in further simplifications
5401 if B is a constant. */
5402
8079805d
RK
5403 if (GET_CODE (op0) == AND
5404 && rtx_equal_p (XEXP (op0, 1), op1)
5405 && ! side_effects_p (op1))
0c1c8ea6 5406 return gen_binary (AND, mode,
f1c6ba8b 5407 simplify_gen_unary (NOT, mode, XEXP (op0, 0), mode),
8079805d 5408 op1);
230d793d 5409
8079805d
RK
5410 else if (GET_CODE (op0) == AND
5411 && rtx_equal_p (XEXP (op0, 0), op1)
5412 && ! side_effects_p (op1))
0c1c8ea6 5413 return gen_binary (AND, mode,
f1c6ba8b 5414 simplify_gen_unary (NOT, mode, XEXP (op0, 1), mode),
8079805d 5415 op1);
230d793d 5416
230d793d 5417 /* (xor (comparison foo bar) (const_int 1)) can become the reversed
0802d516
RK
5418 comparison if STORE_FLAG_VALUE is 1. */
5419 if (STORE_FLAG_VALUE == 1
5420 && op1 == const1_rtx
8079805d 5421 && GET_RTX_CLASS (GET_CODE (op0)) == '<'
9a915772
JH
5422 && (reversed = reversed_comparison (op0, mode, XEXP (op0, 0),
5423 XEXP (op0, 1))))
5424 return reversed;
500c518b
RK
5425
5426 /* (lshiftrt foo C) where C is the number of bits in FOO minus 1
5427 is (lt foo (const_int 0)), so we can perform the above
0802d516 5428 simplification if STORE_FLAG_VALUE is 1. */
500c518b 5429
0802d516
RK
5430 if (STORE_FLAG_VALUE == 1
5431 && op1 == const1_rtx
8079805d
RK
5432 && GET_CODE (op0) == LSHIFTRT
5433 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5434 && INTVAL (XEXP (op0, 1)) == GET_MODE_BITSIZE (mode) - 1)
f1c6ba8b 5435 return gen_rtx_GE (mode, XEXP (op0, 0), const0_rtx);
230d793d
RS
5436
5437 /* (xor (comparison foo bar) (const_int sign-bit))
5438 when STORE_FLAG_VALUE is the sign bit. */
5f4f0e22 5439 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
0802d516 5440 && ((STORE_FLAG_VALUE & GET_MODE_MASK (mode))
e51712db 5441 == (unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
8079805d
RK
5442 && op1 == const_true_rtx
5443 && GET_RTX_CLASS (GET_CODE (op0)) == '<'
9a915772
JH
5444 && (reversed = reversed_comparison (op0, mode, XEXP (op0, 0),
5445 XEXP (op0, 1))))
5446 return reversed;
0918eca0 5447
230d793d 5448 break;
e9a25f70
JL
5449
5450 default:
5451 abort ();
230d793d
RS
5452 }
5453
5454 return x;
5455}
5456\f
5457/* We consider ZERO_EXTRACT, SIGN_EXTRACT, and SIGN_EXTEND as "compound
5458 operations" because they can be replaced with two more basic operations.
5459 ZERO_EXTEND is also considered "compound" because it can be replaced with
5460 an AND operation, which is simpler, though only one operation.
5461
5462 The function expand_compound_operation is called with an rtx expression
663522cb 5463 and will convert it to the appropriate shifts and AND operations,
230d793d
RS
5464 simplifying at each stage.
5465
5466 The function make_compound_operation is called to convert an expression
5467 consisting of shifts and ANDs into the equivalent compound expression.
5468 It is the inverse of this function, loosely speaking. */
5469
5470static rtx
5471expand_compound_operation (x)
5472 rtx x;
5473{
770ae6cc 5474 unsigned HOST_WIDE_INT pos = 0, len;
230d793d 5475 int unsignedp = 0;
770ae6cc 5476 unsigned int modewidth;
230d793d
RS
5477 rtx tem;
5478
5479 switch (GET_CODE (x))
5480 {
5481 case ZERO_EXTEND:
5482 unsignedp = 1;
5483 case SIGN_EXTEND:
75473182
RS
5484 /* We can't necessarily use a const_int for a multiword mode;
5485 it depends on implicitly extending the value.
5486 Since we don't know the right way to extend it,
5487 we can't tell whether the implicit way is right.
5488
5489 Even for a mode that is no wider than a const_int,
5490 we can't win, because we need to sign extend one of its bits through
5491 the rest of it, and we don't know which bit. */
230d793d 5492 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
75473182 5493 return x;
230d793d 5494
8079805d
RK
5495 /* Return if (subreg:MODE FROM 0) is not a safe replacement for
5496 (zero_extend:MODE FROM) or (sign_extend:MODE FROM). It is for any MEM
5497 because (SUBREG (MEM...)) is guaranteed to cause the MEM to be
5498 reloaded. If not for that, MEM's would very rarely be safe.
5499
5500 Reject MODEs bigger than a word, because we might not be able
5501 to reference a two-register group starting with an arbitrary register
5502 (and currently gen_lowpart might crash for a SUBREG). */
663522cb 5503
8079805d 5504 if (GET_MODE_SIZE (GET_MODE (XEXP (x, 0))) > UNITS_PER_WORD)
230d793d
RS
5505 return x;
5506
5507 len = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)));
5508 /* If the inner object has VOIDmode (the only way this can happen
5509 is if it is a ASM_OPERANDS), we can't do anything since we don't
5510 know how much masking to do. */
5511 if (len == 0)
5512 return x;
5513
5514 break;
5515
5516 case ZERO_EXTRACT:
5517 unsignedp = 1;
5518 case SIGN_EXTRACT:
5519 /* If the operand is a CLOBBER, just return it. */
5520 if (GET_CODE (XEXP (x, 0)) == CLOBBER)
5521 return XEXP (x, 0);
5522
5523 if (GET_CODE (XEXP (x, 1)) != CONST_INT
5524 || GET_CODE (XEXP (x, 2)) != CONST_INT
5525 || GET_MODE (XEXP (x, 0)) == VOIDmode)
5526 return x;
5527
5528 len = INTVAL (XEXP (x, 1));
5529 pos = INTVAL (XEXP (x, 2));
5530
5531 /* If this goes outside the object being extracted, replace the object
5532 with a (use (mem ...)) construct that only combine understands
5533 and is used only for this purpose. */
5534 if (len + pos > GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
38a448ca 5535 SUBST (XEXP (x, 0), gen_rtx_USE (GET_MODE (x), XEXP (x, 0)));
230d793d 5536
f76b9db2
ILT
5537 if (BITS_BIG_ENDIAN)
5538 pos = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - len - pos;
5539
230d793d
RS
5540 break;
5541
5542 default:
5543 return x;
5544 }
0f808b6f
JH
5545 /* Convert sign extension to zero extension, if we know that the high
5546 bit is not set, as this is easier to optimize. It will be converted
5547 back to cheaper alternative in make_extraction. */
5548 if (GET_CODE (x) == SIGN_EXTEND
5549 && (GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
5550 && ((nonzero_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
663522cb 5551 & ~(((unsigned HOST_WIDE_INT)
0f808b6f
JH
5552 GET_MODE_MASK (GET_MODE (XEXP (x, 0))))
5553 >> 1))
5554 == 0)))
5555 {
5556 rtx temp = gen_rtx_ZERO_EXTEND (GET_MODE (x), XEXP (x, 0));
5557 return expand_compound_operation (temp);
5558 }
230d793d 5559
0f13a422
ILT
5560 /* We can optimize some special cases of ZERO_EXTEND. */
5561 if (GET_CODE (x) == ZERO_EXTEND)
5562 {
5563 /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI if we
5564 know that the last value didn't have any inappropriate bits
5565 set. */
5566 if (GET_CODE (XEXP (x, 0)) == TRUNCATE
5567 && GET_MODE (XEXP (XEXP (x, 0), 0)) == GET_MODE (x)
5568 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
5569 && (nonzero_bits (XEXP (XEXP (x, 0), 0), GET_MODE (x))
663522cb 5570 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
0f13a422
ILT
5571 return XEXP (XEXP (x, 0), 0);
5572
5573 /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)). */
5574 if (GET_CODE (XEXP (x, 0)) == SUBREG
5575 && GET_MODE (SUBREG_REG (XEXP (x, 0))) == GET_MODE (x)
5576 && subreg_lowpart_p (XEXP (x, 0))
5577 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
5578 && (nonzero_bits (SUBREG_REG (XEXP (x, 0)), GET_MODE (x))
663522cb 5579 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
0f13a422
ILT
5580 return SUBREG_REG (XEXP (x, 0));
5581
5582 /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI when foo
5583 is a comparison and STORE_FLAG_VALUE permits. This is like
5584 the first case, but it works even when GET_MODE (x) is larger
5585 than HOST_WIDE_INT. */
5586 if (GET_CODE (XEXP (x, 0)) == TRUNCATE
5587 && GET_MODE (XEXP (XEXP (x, 0), 0)) == GET_MODE (x)
5588 && GET_RTX_CLASS (GET_CODE (XEXP (XEXP (x, 0), 0))) == '<'
5589 && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
5590 <= HOST_BITS_PER_WIDE_INT)
23190837 5591 && ((HOST_WIDE_INT) STORE_FLAG_VALUE
663522cb 5592 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
0f13a422
ILT
5593 return XEXP (XEXP (x, 0), 0);
5594
5595 /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)). */
5596 if (GET_CODE (XEXP (x, 0)) == SUBREG
5597 && GET_MODE (SUBREG_REG (XEXP (x, 0))) == GET_MODE (x)
5598 && subreg_lowpart_p (XEXP (x, 0))
5599 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0)))) == '<'
5600 && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
5601 <= HOST_BITS_PER_WIDE_INT)
5602 && ((HOST_WIDE_INT) STORE_FLAG_VALUE
663522cb 5603 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
0f13a422
ILT
5604 return SUBREG_REG (XEXP (x, 0));
5605
0f13a422
ILT
5606 }
5607
230d793d
RS
5608 /* If we reach here, we want to return a pair of shifts. The inner
5609 shift is a left shift of BITSIZE - POS - LEN bits. The outer
5610 shift is a right shift of BITSIZE - LEN bits. It is arithmetic or
5611 logical depending on the value of UNSIGNEDP.
5612
5613 If this was a ZERO_EXTEND or ZERO_EXTRACT, this pair of shifts will be
5614 converted into an AND of a shift.
5615
5616 We must check for the case where the left shift would have a negative
5617 count. This can happen in a case like (x >> 31) & 255 on machines
5618 that can't shift by a constant. On those machines, we would first
663522cb 5619 combine the shift with the AND to produce a variable-position
230d793d
RS
5620 extraction. Then the constant of 31 would be substituted in to produce
5621 a such a position. */
5622
5623 modewidth = GET_MODE_BITSIZE (GET_MODE (x));
770ae6cc 5624 if (modewidth + len >= pos)
5f4f0e22 5625 tem = simplify_shift_const (NULL_RTX, unsignedp ? LSHIFTRT : ASHIFTRT,
230d793d 5626 GET_MODE (x),
5f4f0e22
CH
5627 simplify_shift_const (NULL_RTX, ASHIFT,
5628 GET_MODE (x),
230d793d
RS
5629 XEXP (x, 0),
5630 modewidth - pos - len),
5631 modewidth - len);
5632
5f4f0e22
CH
5633 else if (unsignedp && len < HOST_BITS_PER_WIDE_INT)
5634 tem = simplify_and_const_int (NULL_RTX, GET_MODE (x),
5635 simplify_shift_const (NULL_RTX, LSHIFTRT,
230d793d
RS
5636 GET_MODE (x),
5637 XEXP (x, 0), pos),
5f4f0e22 5638 ((HOST_WIDE_INT) 1 << len) - 1);
230d793d
RS
5639 else
5640 /* Any other cases we can't handle. */
5641 return x;
230d793d
RS
5642
5643 /* If we couldn't do this for some reason, return the original
5644 expression. */
5645 if (GET_CODE (tem) == CLOBBER)
5646 return x;
5647
5648 return tem;
5649}
5650\f
5651/* X is a SET which contains an assignment of one object into
5652 a part of another (such as a bit-field assignment, STRICT_LOW_PART,
5653 or certain SUBREGS). If possible, convert it into a series of
5654 logical operations.
5655
5656 We half-heartedly support variable positions, but do not at all
5657 support variable lengths. */
5658
5659static rtx
5660expand_field_assignment (x)
5661 rtx x;
5662{
5663 rtx inner;
0f41302f 5664 rtx pos; /* Always counts from low bit. */
230d793d
RS
5665 int len;
5666 rtx mask;
5667 enum machine_mode compute_mode;
5668
5669 /* Loop until we find something we can't simplify. */
5670 while (1)
5671 {
5672 if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
5673 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG)
5674 {
ddef6bc7
JJ
5675 int byte_offset = SUBREG_BYTE (XEXP (SET_DEST (x), 0));
5676
230d793d
RS
5677 inner = SUBREG_REG (XEXP (SET_DEST (x), 0));
5678 len = GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)));
ddef6bc7 5679 pos = GEN_INT (BITS_PER_WORD * (byte_offset / UNITS_PER_WORD));
230d793d
RS
5680 }
5681 else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
5682 && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT)
5683 {
5684 inner = XEXP (SET_DEST (x), 0);
5685 len = INTVAL (XEXP (SET_DEST (x), 1));
5686 pos = XEXP (SET_DEST (x), 2);
5687
5688 /* If the position is constant and spans the width of INNER,
5689 surround INNER with a USE to indicate this. */
5690 if (GET_CODE (pos) == CONST_INT
5691 && INTVAL (pos) + len > GET_MODE_BITSIZE (GET_MODE (inner)))
38a448ca 5692 inner = gen_rtx_USE (GET_MODE (SET_DEST (x)), inner);
230d793d 5693
f76b9db2
ILT
5694 if (BITS_BIG_ENDIAN)
5695 {
5696 if (GET_CODE (pos) == CONST_INT)
5697 pos = GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner)) - len
5698 - INTVAL (pos));
5699 else if (GET_CODE (pos) == MINUS
5700 && GET_CODE (XEXP (pos, 1)) == CONST_INT
5701 && (INTVAL (XEXP (pos, 1))
5702 == GET_MODE_BITSIZE (GET_MODE (inner)) - len))
5703 /* If position is ADJUST - X, new position is X. */
5704 pos = XEXP (pos, 0);
5705 else
5706 pos = gen_binary (MINUS, GET_MODE (pos),
5707 GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner))
5708 - len),
5709 pos);
5710 }
230d793d
RS
5711 }
5712
5713 /* A SUBREG between two modes that occupy the same numbers of words
5714 can be done by moving the SUBREG to the source. */
5715 else if (GET_CODE (SET_DEST (x)) == SUBREG
b1e9c8a9
AO
5716 /* We need SUBREGs to compute nonzero_bits properly. */
5717 && nonzero_sign_valid
230d793d
RS
5718 && (((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
5719 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
5720 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
5721 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
5722 {
38a448ca 5723 x = gen_rtx_SET (VOIDmode, SUBREG_REG (SET_DEST (x)),
c5c76735
JL
5724 gen_lowpart_for_combine
5725 (GET_MODE (SUBREG_REG (SET_DEST (x))),
5726 SET_SRC (x)));
230d793d
RS
5727 continue;
5728 }
5729 else
5730 break;
5731
5732 while (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
5733 inner = SUBREG_REG (inner);
5734
5735 compute_mode = GET_MODE (inner);
5736
861556b4
RH
5737 /* Don't attempt bitwise arithmetic on non-integral modes. */
5738 if (! INTEGRAL_MODE_P (compute_mode))
5739 {
5740 enum machine_mode imode;
5741
5742 /* Something is probably seriously wrong if this matches. */
5743 if (! FLOAT_MODE_P (compute_mode))
5744 break;
5745
5746 /* Try to find an integral mode to pun with. */
5747 imode = mode_for_size (GET_MODE_BITSIZE (compute_mode), MODE_INT, 0);
5748 if (imode == BLKmode)
5749 break;
5750
5751 compute_mode = imode;
5752 inner = gen_lowpart_for_combine (imode, inner);
5753 }
5754
230d793d 5755 /* Compute a mask of LEN bits, if we can do this on the host machine. */
5f4f0e22
CH
5756 if (len < HOST_BITS_PER_WIDE_INT)
5757 mask = GEN_INT (((HOST_WIDE_INT) 1 << len) - 1);
230d793d
RS
5758 else
5759 break;
5760
5761 /* Now compute the equivalent expression. Make a copy of INNER
5762 for the SET_DEST in case it is a MEM into which we will substitute;
5763 we don't want shared RTL in that case. */
c5c76735
JL
5764 x = gen_rtx_SET
5765 (VOIDmode, copy_rtx (inner),
5766 gen_binary (IOR, compute_mode,
5767 gen_binary (AND, compute_mode,
f1c6ba8b
RK
5768 simplify_gen_unary (NOT, compute_mode,
5769 gen_binary (ASHIFT,
5770 compute_mode,
5771 mask, pos),
5772 compute_mode),
c5c76735
JL
5773 inner),
5774 gen_binary (ASHIFT, compute_mode,
5775 gen_binary (AND, compute_mode,
5776 gen_lowpart_for_combine
5777 (compute_mode, SET_SRC (x)),
5778 mask),
5779 pos)));
230d793d
RS
5780 }
5781
5782 return x;
5783}
5784\f
8999a12e
RK
5785/* Return an RTX for a reference to LEN bits of INNER. If POS_RTX is nonzero,
5786 it is an RTX that represents a variable starting position; otherwise,
5787 POS is the (constant) starting bit position (counted from the LSB).
230d793d
RS
5788
5789 INNER may be a USE. This will occur when we started with a bitfield
5790 that went outside the boundary of the object in memory, which is
5791 allowed on most machines. To isolate this case, we produce a USE
5792 whose mode is wide enough and surround the MEM with it. The only
5793 code that understands the USE is this routine. If it is not removed,
5794 it will cause the resulting insn not to match.
5795
663522cb 5796 UNSIGNEDP is non-zero for an unsigned reference and zero for a
230d793d
RS
5797 signed reference.
5798
5799 IN_DEST is non-zero if this is a reference in the destination of a
5800 SET. This is used when a ZERO_ or SIGN_EXTRACT isn't needed. If non-zero,
5801 a STRICT_LOW_PART will be used, if zero, ZERO_EXTEND or SIGN_EXTEND will
5802 be used.
5803
5804 IN_COMPARE is non-zero if we are in a COMPARE. This means that a
5805 ZERO_EXTRACT should be built even for bits starting at bit 0.
5806
76184def
DE
5807 MODE is the desired mode of the result (if IN_DEST == 0).
5808
5809 The result is an RTX for the extraction or NULL_RTX if the target
5810 can't handle it. */
230d793d
RS
5811
5812static rtx
5813make_extraction (mode, inner, pos, pos_rtx, len,
5814 unsignedp, in_dest, in_compare)
5815 enum machine_mode mode;
5816 rtx inner;
770ae6cc 5817 HOST_WIDE_INT pos;
230d793d 5818 rtx pos_rtx;
770ae6cc 5819 unsigned HOST_WIDE_INT len;
230d793d
RS
5820 int unsignedp;
5821 int in_dest, in_compare;
5822{
94b4b17a
RS
5823 /* This mode describes the size of the storage area
5824 to fetch the overall value from. Within that, we
5825 ignore the POS lowest bits, etc. */
230d793d
RS
5826 enum machine_mode is_mode = GET_MODE (inner);
5827 enum machine_mode inner_mode;
d7cd794f
RK
5828 enum machine_mode wanted_inner_mode = byte_mode;
5829 enum machine_mode wanted_inner_reg_mode = word_mode;
230d793d
RS
5830 enum machine_mode pos_mode = word_mode;
5831 enum machine_mode extraction_mode = word_mode;
5832 enum machine_mode tmode = mode_for_size (len, MODE_INT, 1);
5833 int spans_byte = 0;
5834 rtx new = 0;
8999a12e 5835 rtx orig_pos_rtx = pos_rtx;
770ae6cc 5836 HOST_WIDE_INT orig_pos;
230d793d
RS
5837
5838 /* Get some information about INNER and get the innermost object. */
5839 if (GET_CODE (inner) == USE)
94b4b17a 5840 /* (use:SI (mem:QI foo)) stands for (mem:SI foo). */
230d793d
RS
5841 /* We don't need to adjust the position because we set up the USE
5842 to pretend that it was a full-word object. */
5843 spans_byte = 1, inner = XEXP (inner, 0);
5844 else if (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
94b4b17a
RS
5845 {
5846 /* If going from (subreg:SI (mem:QI ...)) to (mem:QI ...),
5847 consider just the QI as the memory to extract from.
5848 The subreg adds or removes high bits; its mode is
5849 irrelevant to the meaning of this extraction,
5850 since POS and LEN count from the lsb. */
5851 if (GET_CODE (SUBREG_REG (inner)) == MEM)
5852 is_mode = GET_MODE (SUBREG_REG (inner));
5853 inner = SUBREG_REG (inner);
5854 }
230d793d
RS
5855
5856 inner_mode = GET_MODE (inner);
5857
5858 if (pos_rtx && GET_CODE (pos_rtx) == CONST_INT)
8999a12e 5859 pos = INTVAL (pos_rtx), pos_rtx = 0;
230d793d
RS
5860
5861 /* See if this can be done without an extraction. We never can if the
5862 width of the field is not the same as that of some integer mode. For
5863 registers, we can only avoid the extraction if the position is at the
5864 low-order bit and this is either not in the destination or we have the
5865 appropriate STRICT_LOW_PART operation available.
5866
5867 For MEM, we can avoid an extract if the field starts on an appropriate
5868 boundary and we can change the mode of the memory reference. However,
5869 we cannot directly access the MEM if we have a USE and the underlying
5870 MEM is not TMODE. This combination means that MEM was being used in a
5871 context where bits outside its mode were being referenced; that is only
5872 valid in bit-field insns. */
5873
5874 if (tmode != BLKmode
5875 && ! (spans_byte && inner_mode != tmode)
4d9cfc7b
RK
5876 && ((pos_rtx == 0 && (pos % BITS_PER_WORD) == 0
5877 && GET_CODE (inner) != MEM
230d793d 5878 && (! in_dest
df62f951
RK
5879 || (GET_CODE (inner) == REG
5880 && (movstrict_optab->handlers[(int) tmode].insn_code
5881 != CODE_FOR_nothing))))
8999a12e 5882 || (GET_CODE (inner) == MEM && pos_rtx == 0
dfbe1b2f
RK
5883 && (pos
5884 % (STRICT_ALIGNMENT ? GET_MODE_ALIGNMENT (tmode)
5885 : BITS_PER_UNIT)) == 0
230d793d
RS
5886 /* We can't do this if we are widening INNER_MODE (it
5887 may not be aligned, for one thing). */
5888 && GET_MODE_BITSIZE (inner_mode) >= GET_MODE_BITSIZE (tmode)
5889 && (inner_mode == tmode
5890 || (! mode_dependent_address_p (XEXP (inner, 0))
5891 && ! MEM_VOLATILE_P (inner))))))
5892 {
230d793d
RS
5893 /* If INNER is a MEM, make a new MEM that encompasses just the desired
5894 field. If the original and current mode are the same, we need not
663522cb 5895 adjust the offset. Otherwise, we do if bytes big endian.
230d793d 5896
4d9cfc7b
RK
5897 If INNER is not a MEM, get a piece consisting of just the field
5898 of interest (in this case POS % BITS_PER_WORD must be 0). */
230d793d
RS
5899
5900 if (GET_CODE (inner) == MEM)
5901 {
f1ec5147
RK
5902 HOST_WIDE_INT offset;
5903
94b4b17a
RS
5904 /* POS counts from lsb, but make OFFSET count in memory order. */
5905 if (BYTES_BIG_ENDIAN)
5906 offset = (GET_MODE_BITSIZE (is_mode) - len - pos) / BITS_PER_UNIT;
5907 else
5908 offset = pos / BITS_PER_UNIT;
230d793d 5909
f1ec5147 5910 new = adjust_address_nv (inner, tmode, offset);
230d793d 5911 }
df62f951 5912 else if (GET_CODE (inner) == REG)
c0d3ac4d
RK
5913 {
5914 /* We can't call gen_lowpart_for_combine here since we always want
5915 a SUBREG and it would sometimes return a new hard register. */
5916 if (tmode != inner_mode)
ddef6bc7 5917 {
f1ec5147 5918 HOST_WIDE_INT final_word = pos / BITS_PER_WORD;
ddef6bc7
JJ
5919
5920 if (WORDS_BIG_ENDIAN
5921 && GET_MODE_SIZE (inner_mode) > UNITS_PER_WORD)
5922 final_word = ((GET_MODE_SIZE (inner_mode)
5923 - GET_MODE_SIZE (tmode))
5924 / UNITS_PER_WORD) - final_word;
5925
5926 final_word *= UNITS_PER_WORD;
5927 if (BYTES_BIG_ENDIAN &&
5928 GET_MODE_SIZE (inner_mode) > GET_MODE_SIZE (tmode))
5929 final_word += (GET_MODE_SIZE (inner_mode)
5930 - GET_MODE_SIZE (tmode)) % UNITS_PER_WORD;
5931
5932 new = gen_rtx_SUBREG (tmode, inner, final_word);
5933 }
23190837
AJ
5934 else
5935 new = inner;
5936 }
230d793d 5937 else
6139ff20
RK
5938 new = force_to_mode (inner, tmode,
5939 len >= HOST_BITS_PER_WIDE_INT
0345195a 5940 ? ~(unsigned HOST_WIDE_INT) 0
729a2125 5941 : ((unsigned HOST_WIDE_INT) 1 << len) - 1,
e3d616e3 5942 NULL_RTX, 0);
230d793d 5943
663522cb 5944 /* If this extraction is going into the destination of a SET,
230d793d
RS
5945 make a STRICT_LOW_PART unless we made a MEM. */
5946
5947 if (in_dest)
5948 return (GET_CODE (new) == MEM ? new
77fa0940 5949 : (GET_CODE (new) != SUBREG
38a448ca 5950 ? gen_rtx_CLOBBER (tmode, const0_rtx)
f1c6ba8b 5951 : gen_rtx_STRICT_LOW_PART (VOIDmode, new)));
230d793d 5952
0f808b6f
JH
5953 if (mode == tmode)
5954 return new;
5955
5956 /* If we know that no extraneous bits are set, and that the high
5957 bit is not set, convert the extraction to the cheaper of
5958 sign and zero extension, that are equivalent in these cases. */
5959 if (flag_expensive_optimizations
5960 && (GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT
5961 && ((nonzero_bits (new, tmode)
663522cb
KH
5962 & ~(((unsigned HOST_WIDE_INT)
5963 GET_MODE_MASK (tmode))
5964 >> 1))
0f808b6f
JH
5965 == 0)))
5966 {
5967 rtx temp = gen_rtx_ZERO_EXTEND (mode, new);
5968 rtx temp1 = gen_rtx_SIGN_EXTEND (mode, new);
5969
5970 /* Prefer ZERO_EXTENSION, since it gives more information to
5971 backends. */
25ffb1f6 5972 if (rtx_cost (temp, SET) <= rtx_cost (temp1, SET))
0f808b6f
JH
5973 return temp;
5974 return temp1;
5975 }
5976
230d793d
RS
5977 /* Otherwise, sign- or zero-extend unless we already are in the
5978 proper mode. */
5979
f1c6ba8b
RK
5980 return (gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
5981 mode, new));
230d793d
RS
5982 }
5983
cc471082
RS
5984 /* Unless this is a COMPARE or we have a funny memory reference,
5985 don't do anything with zero-extending field extracts starting at
5986 the low-order bit since they are simple AND operations. */
8999a12e
RK
5987 if (pos_rtx == 0 && pos == 0 && ! in_dest
5988 && ! in_compare && ! spans_byte && unsignedp)
230d793d
RS
5989 return 0;
5990
c5c76735
JL
5991 /* Unless we are allowed to span bytes or INNER is not MEM, reject this if
5992 we would be spanning bytes or if the position is not a constant and the
5993 length is not 1. In all other cases, we would only be going outside
5994 our object in cases when an original shift would have been
e7373556 5995 undefined. */
c5c76735 5996 if (! spans_byte && GET_CODE (inner) == MEM
e7373556
RK
5997 && ((pos_rtx == 0 && pos + len > GET_MODE_BITSIZE (is_mode))
5998 || (pos_rtx != 0 && len != 1)))
5999 return 0;
6000
d7cd794f 6001 /* Get the mode to use should INNER not be a MEM, the mode for the position,
230d793d
RS
6002 and the mode for the result. */
6003#ifdef HAVE_insv
6004 if (in_dest)
6005 {
0d8e55d8 6006 wanted_inner_reg_mode
a995e389
RH
6007 = insn_data[(int) CODE_FOR_insv].operand[0].mode;
6008 if (wanted_inner_reg_mode == VOIDmode)
6009 wanted_inner_reg_mode = word_mode;
6010
6011 pos_mode = insn_data[(int) CODE_FOR_insv].operand[2].mode;
6012 if (pos_mode == VOIDmode)
6013 pos_mode = word_mode;
6014
6015 extraction_mode = insn_data[(int) CODE_FOR_insv].operand[3].mode;
6016 if (extraction_mode == VOIDmode)
6017 extraction_mode = word_mode;
230d793d
RS
6018 }
6019#endif
6020
6021#ifdef HAVE_extzv
6022 if (! in_dest && unsignedp)
6023 {
0d8e55d8 6024 wanted_inner_reg_mode
a995e389
RH
6025 = insn_data[(int) CODE_FOR_extzv].operand[1].mode;
6026 if (wanted_inner_reg_mode == VOIDmode)
6027 wanted_inner_reg_mode = word_mode;
6028
6029 pos_mode = insn_data[(int) CODE_FOR_extzv].operand[3].mode;
6030 if (pos_mode == VOIDmode)
6031 pos_mode = word_mode;
6032
6033 extraction_mode = insn_data[(int) CODE_FOR_extzv].operand[0].mode;
6034 if (extraction_mode == VOIDmode)
6035 extraction_mode = word_mode;
230d793d
RS
6036 }
6037#endif
6038
6039#ifdef HAVE_extv
6040 if (! in_dest && ! unsignedp)
6041 {
0d8e55d8 6042 wanted_inner_reg_mode
a995e389
RH
6043 = insn_data[(int) CODE_FOR_extv].operand[1].mode;
6044 if (wanted_inner_reg_mode == VOIDmode)
6045 wanted_inner_reg_mode = word_mode;
6046
6047 pos_mode = insn_data[(int) CODE_FOR_extv].operand[3].mode;
6048 if (pos_mode == VOIDmode)
6049 pos_mode = word_mode;
6050
6051 extraction_mode = insn_data[(int) CODE_FOR_extv].operand[0].mode;
6052 if (extraction_mode == VOIDmode)
6053 extraction_mode = word_mode;
230d793d
RS
6054 }
6055#endif
6056
6057 /* Never narrow an object, since that might not be safe. */
6058
6059 if (mode != VOIDmode
6060 && GET_MODE_SIZE (extraction_mode) < GET_MODE_SIZE (mode))
6061 extraction_mode = mode;
6062
6063 if (pos_rtx && GET_MODE (pos_rtx) != VOIDmode
6064 && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
6065 pos_mode = GET_MODE (pos_rtx);
6066
d7cd794f
RK
6067 /* If this is not from memory, the desired mode is wanted_inner_reg_mode;
6068 if we have to change the mode of memory and cannot, the desired mode is
6069 EXTRACTION_MODE. */
6070 if (GET_CODE (inner) != MEM)
6071 wanted_inner_mode = wanted_inner_reg_mode;
6072 else if (inner_mode != wanted_inner_mode
6073 && (mode_dependent_address_p (XEXP (inner, 0))
6074 || MEM_VOLATILE_P (inner)))
6075 wanted_inner_mode = extraction_mode;
230d793d 6076
6139ff20
RK
6077 orig_pos = pos;
6078
f76b9db2
ILT
6079 if (BITS_BIG_ENDIAN)
6080 {
cf54c2cd
DE
6081 /* POS is passed as if BITS_BIG_ENDIAN == 0, so we need to convert it to
6082 BITS_BIG_ENDIAN style. If position is constant, compute new
6083 position. Otherwise, build subtraction.
6084 Note that POS is relative to the mode of the original argument.
6085 If it's a MEM we need to recompute POS relative to that.
6086 However, if we're extracting from (or inserting into) a register,
6087 we want to recompute POS relative to wanted_inner_mode. */
6088 int width = (GET_CODE (inner) == MEM
6089 ? GET_MODE_BITSIZE (is_mode)
6090 : GET_MODE_BITSIZE (wanted_inner_mode));
6091
f76b9db2 6092 if (pos_rtx == 0)
cf54c2cd 6093 pos = width - len - pos;
f76b9db2
ILT
6094 else
6095 pos_rtx
f1c6ba8b 6096 = gen_rtx_MINUS (GET_MODE (pos_rtx), GEN_INT (width - len), pos_rtx);
cf54c2cd
DE
6097 /* POS may be less than 0 now, but we check for that below.
6098 Note that it can only be less than 0 if GET_CODE (inner) != MEM. */
f76b9db2 6099 }
230d793d
RS
6100
6101 /* If INNER has a wider mode, make it smaller. If this is a constant
6102 extract, try to adjust the byte to point to the byte containing
6103 the value. */
d7cd794f
RK
6104 if (wanted_inner_mode != VOIDmode
6105 && GET_MODE_SIZE (wanted_inner_mode) < GET_MODE_SIZE (is_mode)
230d793d 6106 && ((GET_CODE (inner) == MEM
d7cd794f 6107 && (inner_mode == wanted_inner_mode
230d793d
RS
6108 || (! mode_dependent_address_p (XEXP (inner, 0))
6109 && ! MEM_VOLATILE_P (inner))))))
6110 {
6111 int offset = 0;
6112
6113 /* The computations below will be correct if the machine is big
6114 endian in both bits and bytes or little endian in bits and bytes.
6115 If it is mixed, we must adjust. */
663522cb 6116
230d793d 6117 /* If bytes are big endian and we had a paradoxical SUBREG, we must
0f41302f 6118 adjust OFFSET to compensate. */
f76b9db2
ILT
6119 if (BYTES_BIG_ENDIAN
6120 && ! spans_byte
230d793d
RS
6121 && GET_MODE_SIZE (inner_mode) < GET_MODE_SIZE (is_mode))
6122 offset -= GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (inner_mode);
230d793d
RS
6123
6124 /* If this is a constant position, we can move to the desired byte. */
8999a12e 6125 if (pos_rtx == 0)
230d793d
RS
6126 {
6127 offset += pos / BITS_PER_UNIT;
d7cd794f 6128 pos %= GET_MODE_BITSIZE (wanted_inner_mode);
230d793d
RS
6129 }
6130
f76b9db2
ILT
6131 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
6132 && ! spans_byte
d7cd794f 6133 && is_mode != wanted_inner_mode)
c6b3f1f2 6134 offset = (GET_MODE_SIZE (is_mode)
d7cd794f 6135 - GET_MODE_SIZE (wanted_inner_mode) - offset);
c6b3f1f2 6136
d7cd794f 6137 if (offset != 0 || inner_mode != wanted_inner_mode)
f1ec5147 6138 inner = adjust_address_nv (inner, wanted_inner_mode, offset);
230d793d
RS
6139 }
6140
9e74dc41
RK
6141 /* If INNER is not memory, we can always get it into the proper mode. If we
6142 are changing its mode, POS must be a constant and smaller than the size
6143 of the new mode. */
230d793d 6144 else if (GET_CODE (inner) != MEM)
9e74dc41
RK
6145 {
6146 if (GET_MODE (inner) != wanted_inner_mode
6147 && (pos_rtx != 0
6148 || orig_pos + len > GET_MODE_BITSIZE (wanted_inner_mode)))
6149 return 0;
6150
6151 inner = force_to_mode (inner, wanted_inner_mode,
6152 pos_rtx
6153 || len + orig_pos >= HOST_BITS_PER_WIDE_INT
0345195a 6154 ? ~(unsigned HOST_WIDE_INT) 0
729a2125
RK
6155 : ((((unsigned HOST_WIDE_INT) 1 << len) - 1)
6156 << orig_pos),
9e74dc41
RK
6157 NULL_RTX, 0);
6158 }
230d793d
RS
6159
6160 /* Adjust mode of POS_RTX, if needed. If we want a wider mode, we
6161 have to zero extend. Otherwise, we can just use a SUBREG. */
8999a12e 6162 if (pos_rtx != 0
230d793d 6163 && GET_MODE_SIZE (pos_mode) > GET_MODE_SIZE (GET_MODE (pos_rtx)))
0f808b6f 6164 {
f1c6ba8b 6165 rtx temp = gen_rtx_ZERO_EXTEND (pos_mode, pos_rtx);
0f808b6f
JH
6166
6167 /* If we know that no extraneous bits are set, and that the high
6168 bit is not set, convert extraction to cheaper one - eighter
6169 SIGN_EXTENSION or ZERO_EXTENSION, that are equivalent in these
6170 cases. */
6171 if (flag_expensive_optimizations
6172 && (GET_MODE_BITSIZE (GET_MODE (pos_rtx)) <= HOST_BITS_PER_WIDE_INT
6173 && ((nonzero_bits (pos_rtx, GET_MODE (pos_rtx))
663522cb
KH
6174 & ~(((unsigned HOST_WIDE_INT)
6175 GET_MODE_MASK (GET_MODE (pos_rtx)))
6176 >> 1))
0f808b6f
JH
6177 == 0)))
6178 {
6179 rtx temp1 = gen_rtx_SIGN_EXTEND (pos_mode, pos_rtx);
6180
25ffb1f6 6181 /* Prefer ZERO_EXTENSION, since it gives more information to
0f808b6f
JH
6182 backends. */
6183 if (rtx_cost (temp1, SET) < rtx_cost (temp, SET))
6184 temp = temp1;
6185 }
6186 pos_rtx = temp;
6187 }
8999a12e 6188 else if (pos_rtx != 0
230d793d
RS
6189 && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
6190 pos_rtx = gen_lowpart_for_combine (pos_mode, pos_rtx);
6191
8999a12e
RK
6192 /* Make POS_RTX unless we already have it and it is correct. If we don't
6193 have a POS_RTX but we do have an ORIG_POS_RTX, the latter must
0f41302f 6194 be a CONST_INT. */
8999a12e
RK
6195 if (pos_rtx == 0 && orig_pos_rtx != 0 && INTVAL (orig_pos_rtx) == pos)
6196 pos_rtx = orig_pos_rtx;
6197
6198 else if (pos_rtx == 0)
5f4f0e22 6199 pos_rtx = GEN_INT (pos);
230d793d
RS
6200
6201 /* Make the required operation. See if we can use existing rtx. */
f1c6ba8b 6202 new = gen_rtx_fmt_eee (unsignedp ? ZERO_EXTRACT : SIGN_EXTRACT,
5f4f0e22 6203 extraction_mode, inner, GEN_INT (len), pos_rtx);
230d793d
RS
6204 if (! in_dest)
6205 new = gen_lowpart_for_combine (mode, new);
6206
6207 return new;
6208}
6209\f
71923da7
RK
6210/* See if X contains an ASHIFT of COUNT or more bits that can be commuted
6211 with any other operations in X. Return X without that shift if so. */
6212
6213static rtx
6214extract_left_shift (x, count)
6215 rtx x;
6216 int count;
6217{
6218 enum rtx_code code = GET_CODE (x);
6219 enum machine_mode mode = GET_MODE (x);
6220 rtx tem;
6221
6222 switch (code)
6223 {
6224 case ASHIFT:
6225 /* This is the shift itself. If it is wide enough, we will return
6226 either the value being shifted if the shift count is equal to
6227 COUNT or a shift for the difference. */
6228 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6229 && INTVAL (XEXP (x, 1)) >= count)
6230 return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (x, 0),
6231 INTVAL (XEXP (x, 1)) - count);
6232 break;
6233
6234 case NEG: case NOT:
6235 if ((tem = extract_left_shift (XEXP (x, 0), count)) != 0)
f1c6ba8b 6236 return simplify_gen_unary (code, mode, tem, mode);
71923da7
RK
6237
6238 break;
6239
6240 case PLUS: case IOR: case XOR: case AND:
6241 /* If we can safely shift this constant and we find the inner shift,
6242 make a new operation. */
6243 if (GET_CODE (XEXP (x,1)) == CONST_INT
b729186a 6244 && (INTVAL (XEXP (x, 1)) & ((((HOST_WIDE_INT) 1 << count)) - 1)) == 0
71923da7 6245 && (tem = extract_left_shift (XEXP (x, 0), count)) != 0)
663522cb 6246 return gen_binary (code, mode, tem,
71923da7
RK
6247 GEN_INT (INTVAL (XEXP (x, 1)) >> count));
6248
6249 break;
663522cb 6250
e9a25f70
JL
6251 default:
6252 break;
71923da7
RK
6253 }
6254
6255 return 0;
6256}
6257\f
230d793d
RS
6258/* Look at the expression rooted at X. Look for expressions
6259 equivalent to ZERO_EXTRACT, SIGN_EXTRACT, ZERO_EXTEND, SIGN_EXTEND.
6260 Form these expressions.
6261
6262 Return the new rtx, usually just X.
6263
6264 Also, for machines like the Vax that don't have logical shift insns,
6265 try to convert logical to arithmetic shift operations in cases where
6266 they are equivalent. This undoes the canonicalizations to logical
6267 shifts done elsewhere.
6268
6269 We try, as much as possible, to re-use rtl expressions to save memory.
6270
6271 IN_CODE says what kind of expression we are processing. Normally, it is
42495ca0
RK
6272 SET. In a memory address (inside a MEM, PLUS or minus, the latter two
6273 being kludges), it is MEM. When processing the arguments of a comparison
230d793d
RS
6274 or a COMPARE against zero, it is COMPARE. */
6275
6276static rtx
6277make_compound_operation (x, in_code)
6278 rtx x;
6279 enum rtx_code in_code;
6280{
6281 enum rtx_code code = GET_CODE (x);
6282 enum machine_mode mode = GET_MODE (x);
6283 int mode_width = GET_MODE_BITSIZE (mode);
71923da7 6284 rtx rhs, lhs;
230d793d 6285 enum rtx_code next_code;
f24ad0e4 6286 int i;
230d793d 6287 rtx new = 0;
280f58ba 6288 rtx tem;
6f7d635c 6289 const char *fmt;
230d793d
RS
6290
6291 /* Select the code to be used in recursive calls. Once we are inside an
6292 address, we stay there. If we have a comparison, set to COMPARE,
6293 but once inside, go back to our default of SET. */
6294
42495ca0 6295 next_code = (code == MEM || code == PLUS || code == MINUS ? MEM
230d793d
RS
6296 : ((code == COMPARE || GET_RTX_CLASS (code) == '<')
6297 && XEXP (x, 1) == const0_rtx) ? COMPARE
6298 : in_code == COMPARE ? SET : in_code);
6299
6300 /* Process depending on the code of this operation. If NEW is set
6301 non-zero, it will be returned. */
6302
6303 switch (code)
6304 {
6305 case ASHIFT:
230d793d
RS
6306 /* Convert shifts by constants into multiplications if inside
6307 an address. */
6308 if (in_code == MEM && GET_CODE (XEXP (x, 1)) == CONST_INT
5f4f0e22 6309 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
230d793d 6310 && INTVAL (XEXP (x, 1)) >= 0)
280f58ba
RK
6311 {
6312 new = make_compound_operation (XEXP (x, 0), next_code);
f1c6ba8b
RK
6313 new = gen_rtx_MULT (mode, new,
6314 GEN_INT ((HOST_WIDE_INT) 1
6315 << INTVAL (XEXP (x, 1))));
280f58ba 6316 }
230d793d
RS
6317 break;
6318
6319 case AND:
6320 /* If the second operand is not a constant, we can't do anything
6321 with it. */
6322 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6323 break;
6324
6325 /* If the constant is a power of two minus one and the first operand
6326 is a logical right shift, make an extraction. */
6327 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
6328 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
280f58ba
RK
6329 {
6330 new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
6331 new = make_extraction (mode, new, 0, XEXP (XEXP (x, 0), 1), i, 1,
6332 0, in_code == COMPARE);
6333 }
dfbe1b2f 6334
230d793d
RS
6335 /* Same as previous, but for (subreg (lshiftrt ...)) in first op. */
6336 else if (GET_CODE (XEXP (x, 0)) == SUBREG
6337 && subreg_lowpart_p (XEXP (x, 0))
6338 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == LSHIFTRT
6339 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
280f58ba
RK
6340 {
6341 new = make_compound_operation (XEXP (SUBREG_REG (XEXP (x, 0)), 0),
6342 next_code);
2f99f437 6343 new = make_extraction (GET_MODE (SUBREG_REG (XEXP (x, 0))), new, 0,
280f58ba
RK
6344 XEXP (SUBREG_REG (XEXP (x, 0)), 1), i, 1,
6345 0, in_code == COMPARE);
6346 }
45620ed4 6347 /* Same as previous, but for (xor/ior (lshiftrt...) (lshiftrt...)). */
c2f9f64e
JW
6348 else if ((GET_CODE (XEXP (x, 0)) == XOR
6349 || GET_CODE (XEXP (x, 0)) == IOR)
6350 && GET_CODE (XEXP (XEXP (x, 0), 0)) == LSHIFTRT
6351 && GET_CODE (XEXP (XEXP (x, 0), 1)) == LSHIFTRT
6352 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
6353 {
6354 /* Apply the distributive law, and then try to make extractions. */
f1c6ba8b
RK
6355 new = gen_rtx_fmt_ee (GET_CODE (XEXP (x, 0)), mode,
6356 gen_rtx_AND (mode, XEXP (XEXP (x, 0), 0),
6357 XEXP (x, 1)),
6358 gen_rtx_AND (mode, XEXP (XEXP (x, 0), 1),
6359 XEXP (x, 1)));
c2f9f64e
JW
6360 new = make_compound_operation (new, in_code);
6361 }
a7c99304
RK
6362
6363 /* If we are have (and (rotate X C) M) and C is larger than the number
6364 of bits in M, this is an extraction. */
6365
6366 else if (GET_CODE (XEXP (x, 0)) == ROTATE
6367 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6368 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0
6369 && i <= INTVAL (XEXP (XEXP (x, 0), 1)))
280f58ba
RK
6370 {
6371 new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
6372 new = make_extraction (mode, new,
6373 (GET_MODE_BITSIZE (mode)
6374 - INTVAL (XEXP (XEXP (x, 0), 1))),
6375 NULL_RTX, i, 1, 0, in_code == COMPARE);
6376 }
a7c99304
RK
6377
6378 /* On machines without logical shifts, if the operand of the AND is
230d793d
RS
6379 a logical shift and our mask turns off all the propagated sign
6380 bits, we can replace the logical shift with an arithmetic shift. */
d0ab8cd3
RK
6381 else if (ashr_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
6382 && (lshr_optab->handlers[(int) mode].insn_code
6383 == CODE_FOR_nothing)
230d793d
RS
6384 && GET_CODE (XEXP (x, 0)) == LSHIFTRT
6385 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6386 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
5f4f0e22
CH
6387 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
6388 && mode_width <= HOST_BITS_PER_WIDE_INT)
230d793d 6389 {
5f4f0e22 6390 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
230d793d
RS
6391
6392 mask >>= INTVAL (XEXP (XEXP (x, 0), 1));
6393 if ((INTVAL (XEXP (x, 1)) & ~mask) == 0)
6394 SUBST (XEXP (x, 0),
f1c6ba8b
RK
6395 gen_rtx_ASHIFTRT (mode,
6396 make_compound_operation
6397 (XEXP (XEXP (x, 0), 0), next_code),
6398 XEXP (XEXP (x, 0), 1)));
230d793d
RS
6399 }
6400
6401 /* If the constant is one less than a power of two, this might be
6402 representable by an extraction even if no shift is present.
6403 If it doesn't end up being a ZERO_EXTEND, we will ignore it unless
6404 we are in a COMPARE. */
6405 else if ((i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
280f58ba
RK
6406 new = make_extraction (mode,
6407 make_compound_operation (XEXP (x, 0),
6408 next_code),
6409 0, NULL_RTX, i, 1, 0, in_code == COMPARE);
230d793d
RS
6410
6411 /* If we are in a comparison and this is an AND with a power of two,
6412 convert this into the appropriate bit extract. */
6413 else if (in_code == COMPARE
6414 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
280f58ba
RK
6415 new = make_extraction (mode,
6416 make_compound_operation (XEXP (x, 0),
6417 next_code),
6418 i, NULL_RTX, 1, 1, 0, 1);
230d793d
RS
6419
6420 break;
6421
6422 case LSHIFTRT:
6423 /* If the sign bit is known to be zero, replace this with an
6424 arithmetic shift. */
d0ab8cd3
RK
6425 if (ashr_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing
6426 && lshr_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
5f4f0e22 6427 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 6428 && (nonzero_bits (XEXP (x, 0), mode) & (1 << (mode_width - 1))) == 0)
230d793d 6429 {
f1c6ba8b
RK
6430 new = gen_rtx_ASHIFTRT (mode,
6431 make_compound_operation (XEXP (x, 0),
6432 next_code),
6433 XEXP (x, 1));
230d793d
RS
6434 break;
6435 }
6436
0f41302f 6437 /* ... fall through ... */
230d793d
RS
6438
6439 case ASHIFTRT:
71923da7
RK
6440 lhs = XEXP (x, 0);
6441 rhs = XEXP (x, 1);
6442
230d793d
RS
6443 /* If we have (ashiftrt (ashift foo C1) C2) with C2 >= C1,
6444 this is a SIGN_EXTRACT. */
71923da7
RK
6445 if (GET_CODE (rhs) == CONST_INT
6446 && GET_CODE (lhs) == ASHIFT
6447 && GET_CODE (XEXP (lhs, 1)) == CONST_INT
6448 && INTVAL (rhs) >= INTVAL (XEXP (lhs, 1)))
280f58ba 6449 {
71923da7 6450 new = make_compound_operation (XEXP (lhs, 0), next_code);
280f58ba 6451 new = make_extraction (mode, new,
71923da7
RK
6452 INTVAL (rhs) - INTVAL (XEXP (lhs, 1)),
6453 NULL_RTX, mode_width - INTVAL (rhs),
d0ab8cd3 6454 code == LSHIFTRT, 0, in_code == COMPARE);
8231ad94 6455 break;
d0ab8cd3
RK
6456 }
6457
71923da7
RK
6458 /* See if we have operations between an ASHIFTRT and an ASHIFT.
6459 If so, try to merge the shifts into a SIGN_EXTEND. We could
6460 also do this for some cases of SIGN_EXTRACT, but it doesn't
6461 seem worth the effort; the case checked for occurs on Alpha. */
663522cb 6462
71923da7
RK
6463 if (GET_RTX_CLASS (GET_CODE (lhs)) != 'o'
6464 && ! (GET_CODE (lhs) == SUBREG
6465 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (lhs))) == 'o'))
6466 && GET_CODE (rhs) == CONST_INT
6467 && INTVAL (rhs) < HOST_BITS_PER_WIDE_INT
6468 && (new = extract_left_shift (lhs, INTVAL (rhs))) != 0)
6469 new = make_extraction (mode, make_compound_operation (new, next_code),
6470 0, NULL_RTX, mode_width - INTVAL (rhs),
6471 code == LSHIFTRT, 0, in_code == COMPARE);
663522cb 6472
230d793d 6473 break;
280f58ba
RK
6474
6475 case SUBREG:
6476 /* Call ourselves recursively on the inner expression. If we are
6477 narrowing the object and it has a different RTL code from
6478 what it originally did, do this SUBREG as a force_to_mode. */
6479
0a5cbff6 6480 tem = make_compound_operation (SUBREG_REG (x), in_code);
280f58ba
RK
6481 if (GET_CODE (tem) != GET_CODE (SUBREG_REG (x))
6482 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (tem))
6483 && subreg_lowpart_p (x))
0a5cbff6 6484 {
e8dc6d50
JH
6485 rtx newer = force_to_mode (tem, mode, ~(HOST_WIDE_INT) 0,
6486 NULL_RTX, 0);
0a5cbff6
RK
6487
6488 /* If we have something other than a SUBREG, we might have
6489 done an expansion, so rerun outselves. */
6490 if (GET_CODE (newer) != SUBREG)
6491 newer = make_compound_operation (newer, in_code);
6492
6493 return newer;
6494 }
6f28d3e9
RH
6495
6496 /* If this is a paradoxical subreg, and the new code is a sign or
6497 zero extension, omit the subreg and widen the extension. If it
6498 is a regular subreg, we can still get rid of the subreg by not
6499 widening so much, or in fact removing the extension entirely. */
6500 if ((GET_CODE (tem) == SIGN_EXTEND
6501 || GET_CODE (tem) == ZERO_EXTEND)
6502 && subreg_lowpart_p (x))
6503 {
6504 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (tem))
6505 || (GET_MODE_SIZE (mode) >
6506 GET_MODE_SIZE (GET_MODE (XEXP (tem, 0)))))
f1c6ba8b 6507 tem = gen_rtx_fmt_e (GET_CODE (tem), mode, XEXP (tem, 0));
6f28d3e9
RH
6508 else
6509 tem = gen_lowpart_for_combine (mode, XEXP (tem, 0));
6510 return tem;
6511 }
e9a25f70 6512 break;
663522cb 6513
e9a25f70
JL
6514 default:
6515 break;
230d793d
RS
6516 }
6517
6518 if (new)
6519 {
df62f951 6520 x = gen_lowpart_for_combine (mode, new);
230d793d
RS
6521 code = GET_CODE (x);
6522 }
6523
6524 /* Now recursively process each operand of this operation. */
6525 fmt = GET_RTX_FORMAT (code);
6526 for (i = 0; i < GET_RTX_LENGTH (code); i++)
6527 if (fmt[i] == 'e')
6528 {
6529 new = make_compound_operation (XEXP (x, i), next_code);
6530 SUBST (XEXP (x, i), new);
6531 }
6532
6533 return x;
6534}
6535\f
6536/* Given M see if it is a value that would select a field of bits
663522cb
KH
6537 within an item, but not the entire word. Return -1 if not.
6538 Otherwise, return the starting position of the field, where 0 is the
6539 low-order bit.
230d793d
RS
6540
6541 *PLEN is set to the length of the field. */
6542
6543static int
6544get_pos_from_mask (m, plen)
5f4f0e22 6545 unsigned HOST_WIDE_INT m;
770ae6cc 6546 unsigned HOST_WIDE_INT *plen;
230d793d
RS
6547{
6548 /* Get the bit number of the first 1 bit from the right, -1 if none. */
663522cb 6549 int pos = exact_log2 (m & -m);
d3bc8938 6550 int len;
230d793d
RS
6551
6552 if (pos < 0)
6553 return -1;
6554
6555 /* Now shift off the low-order zero bits and see if we have a power of
6556 two minus 1. */
d3bc8938 6557 len = exact_log2 ((m >> pos) + 1);
230d793d 6558
d3bc8938 6559 if (len <= 0)
230d793d
RS
6560 return -1;
6561
d3bc8938 6562 *plen = len;
230d793d
RS
6563 return pos;
6564}
6565\f
6139ff20
RK
6566/* See if X can be simplified knowing that we will only refer to it in
6567 MODE and will only refer to those bits that are nonzero in MASK.
6568 If other bits are being computed or if masking operations are done
6569 that select a superset of the bits in MASK, they can sometimes be
6570 ignored.
6571
6572 Return a possibly simplified expression, but always convert X to
6573 MODE. If X is a CONST_INT, AND the CONST_INT with MASK.
dfbe1b2f 6574
663522cb 6575 Also, if REG is non-zero and X is a register equal in value to REG,
e3d616e3
RK
6576 replace X with REG.
6577
6578 If JUST_SELECT is nonzero, don't optimize by noticing that bits in MASK
6579 are all off in X. This is used when X will be complemented, by either
180b8e4b 6580 NOT, NEG, or XOR. */
dfbe1b2f
RK
6581
6582static rtx
e3d616e3 6583force_to_mode (x, mode, mask, reg, just_select)
dfbe1b2f
RK
6584 rtx x;
6585 enum machine_mode mode;
6139ff20 6586 unsigned HOST_WIDE_INT mask;
dfbe1b2f 6587 rtx reg;
e3d616e3 6588 int just_select;
dfbe1b2f
RK
6589{
6590 enum rtx_code code = GET_CODE (x);
180b8e4b 6591 int next_select = just_select || code == XOR || code == NOT || code == NEG;
ef026f91
RS
6592 enum machine_mode op_mode;
6593 unsigned HOST_WIDE_INT fuller_mask, nonzero;
6139ff20
RK
6594 rtx op0, op1, temp;
6595
132d2040
RK
6596 /* If this is a CALL or ASM_OPERANDS, don't do anything. Some of the
6597 code below will do the wrong thing since the mode of such an
663522cb 6598 expression is VOIDmode.
be3d27d6
CI
6599
6600 Also do nothing if X is a CLOBBER; this can happen if X was
6601 the return value from a call to gen_lowpart_for_combine. */
6602 if (code == CALL || code == ASM_OPERANDS || code == CLOBBER)
246e00f2
RK
6603 return x;
6604
6139ff20
RK
6605 /* We want to perform the operation is its present mode unless we know
6606 that the operation is valid in MODE, in which case we do the operation
6607 in MODE. */
1c75dfa4
RK
6608 op_mode = ((GET_MODE_CLASS (mode) == GET_MODE_CLASS (GET_MODE (x))
6609 && code_to_optab[(int) code] != 0
ef026f91
RS
6610 && (code_to_optab[(int) code]->handlers[(int) mode].insn_code
6611 != CODE_FOR_nothing))
6612 ? mode : GET_MODE (x));
e3d616e3 6613
aa988991
RS
6614 /* It is not valid to do a right-shift in a narrower mode
6615 than the one it came in with. */
6616 if ((code == LSHIFTRT || code == ASHIFTRT)
6617 && GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (GET_MODE (x)))
6618 op_mode = GET_MODE (x);
ef026f91
RS
6619
6620 /* Truncate MASK to fit OP_MODE. */
6621 if (op_mode)
6622 mask &= GET_MODE_MASK (op_mode);
6139ff20
RK
6623
6624 /* When we have an arithmetic operation, or a shift whose count we
6625 do not know, we need to assume that all bit the up to the highest-order
6626 bit in MASK will be needed. This is how we form such a mask. */
ef026f91
RS
6627 if (op_mode)
6628 fuller_mask = (GET_MODE_BITSIZE (op_mode) >= HOST_BITS_PER_WIDE_INT
6629 ? GET_MODE_MASK (op_mode)
729a2125
RK
6630 : (((unsigned HOST_WIDE_INT) 1 << (floor_log2 (mask) + 1))
6631 - 1));
ef026f91 6632 else
663522cb 6633 fuller_mask = ~(HOST_WIDE_INT) 0;
ef026f91
RS
6634
6635 /* Determine what bits of X are guaranteed to be (non)zero. */
6636 nonzero = nonzero_bits (x, mode);
6139ff20
RK
6637
6638 /* If none of the bits in X are needed, return a zero. */
e3d616e3 6639 if (! just_select && (nonzero & mask) == 0)
6139ff20 6640 return const0_rtx;
dfbe1b2f 6641
6139ff20
RK
6642 /* If X is a CONST_INT, return a new one. Do this here since the
6643 test below will fail. */
6644 if (GET_CODE (x) == CONST_INT)
ceb7983c
RK
6645 {
6646 HOST_WIDE_INT cval = INTVAL (x) & mask;
6647 int width = GET_MODE_BITSIZE (mode);
6648
6649 /* If MODE is narrower that HOST_WIDE_INT and CVAL is a negative
6650 number, sign extend it. */
6651 if (width > 0 && width < HOST_BITS_PER_WIDE_INT
6652 && (cval & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
6653 cval |= (HOST_WIDE_INT) -1 << width;
663522cb 6654
ceb7983c
RK
6655 return GEN_INT (cval);
6656 }
dfbe1b2f 6657
180b8e4b
RK
6658 /* If X is narrower than MODE and we want all the bits in X's mode, just
6659 get X in the proper mode. */
6660 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode)
663522cb 6661 && (GET_MODE_MASK (GET_MODE (x)) & ~mask) == 0)
dfbe1b2f
RK
6662 return gen_lowpart_for_combine (mode, x);
6663
71923da7
RK
6664 /* If we aren't changing the mode, X is not a SUBREG, and all zero bits in
6665 MASK are already known to be zero in X, we need not do anything. */
663522cb 6666 if (GET_MODE (x) == mode && code != SUBREG && (~mask & nonzero) == 0)
6139ff20
RK
6667 return x;
6668
dfbe1b2f
RK
6669 switch (code)
6670 {
6139ff20
RK
6671 case CLOBBER:
6672 /* If X is a (clobber (const_int)), return it since we know we are
0f41302f 6673 generating something that won't match. */
6139ff20
RK
6674 return x;
6675
6139ff20
RK
6676 case USE:
6677 /* X is a (use (mem ..)) that was made from a bit-field extraction that
6678 spanned the boundary of the MEM. If we are now masking so it is
6679 within that boundary, we don't need the USE any more. */
f76b9db2 6680 if (! BITS_BIG_ENDIAN
663522cb 6681 && (mask & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
e3d616e3 6682 return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
f76b9db2 6683 break;
6139ff20 6684
dfbe1b2f
RK
6685 case SIGN_EXTEND:
6686 case ZERO_EXTEND:
6687 case ZERO_EXTRACT:
6688 case SIGN_EXTRACT:
6689 x = expand_compound_operation (x);
6690 if (GET_CODE (x) != code)
e3d616e3 6691 return force_to_mode (x, mode, mask, reg, next_select);
dfbe1b2f
RK
6692 break;
6693
6694 case REG:
6695 if (reg != 0 && (rtx_equal_p (get_last_value (reg), x)
6696 || rtx_equal_p (reg, get_last_value (x))))
6697 x = reg;
6698 break;
6699
dfbe1b2f 6700 case SUBREG:
6139ff20 6701 if (subreg_lowpart_p (x)
180b8e4b
RK
6702 /* We can ignore the effect of this SUBREG if it narrows the mode or
6703 if the constant masks to zero all the bits the mode doesn't
6704 have. */
6139ff20
RK
6705 && ((GET_MODE_SIZE (GET_MODE (x))
6706 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
6139ff20
RK
6707 || (0 == (mask
6708 & GET_MODE_MASK (GET_MODE (x))
663522cb 6709 & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x)))))))
e3d616e3 6710 return force_to_mode (SUBREG_REG (x), mode, mask, reg, next_select);
dfbe1b2f
RK
6711 break;
6712
6713 case AND:
6139ff20
RK
6714 /* If this is an AND with a constant, convert it into an AND
6715 whose constant is the AND of that constant with MASK. If it
6716 remains an AND of MASK, delete it since it is redundant. */
dfbe1b2f 6717
2ca9ae17 6718 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
dfbe1b2f 6719 {
6139ff20
RK
6720 x = simplify_and_const_int (x, op_mode, XEXP (x, 0),
6721 mask & INTVAL (XEXP (x, 1)));
dfbe1b2f
RK
6722
6723 /* If X is still an AND, see if it is an AND with a mask that
71923da7
RK
6724 is just some low-order bits. If so, and it is MASK, we don't
6725 need it. */
dfbe1b2f
RK
6726
6727 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
e51712db 6728 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (x, 1)) == mask)
dfbe1b2f 6729 x = XEXP (x, 0);
d0ab8cd3 6730
71923da7
RK
6731 /* If it remains an AND, try making another AND with the bits
6732 in the mode mask that aren't in MASK turned on. If the
6733 constant in the AND is wide enough, this might make a
6734 cheaper constant. */
6735
6736 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
2ca9ae17
JW
6737 && GET_MODE_MASK (GET_MODE (x)) != mask
6738 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
71923da7
RK
6739 {
6740 HOST_WIDE_INT cval = (INTVAL (XEXP (x, 1))
663522cb 6741 | (GET_MODE_MASK (GET_MODE (x)) & ~mask));
71923da7
RK
6742 int width = GET_MODE_BITSIZE (GET_MODE (x));
6743 rtx y;
6744
6745 /* If MODE is narrower that HOST_WIDE_INT and CVAL is a negative
6746 number, sign extend it. */
6747 if (width > 0 && width < HOST_BITS_PER_WIDE_INT
6748 && (cval & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
6749 cval |= (HOST_WIDE_INT) -1 << width;
6750
6751 y = gen_binary (AND, GET_MODE (x), XEXP (x, 0), GEN_INT (cval));
6752 if (rtx_cost (y, SET) < rtx_cost (x, SET))
6753 x = y;
6754 }
6755
d0ab8cd3 6756 break;
dfbe1b2f
RK
6757 }
6758
6139ff20 6759 goto binop;
dfbe1b2f
RK
6760
6761 case PLUS:
6139ff20
RK
6762 /* In (and (plus FOO C1) M), if M is a mask that just turns off
6763 low-order bits (as in an alignment operation) and FOO is already
6764 aligned to that boundary, mask C1 to that boundary as well.
6765 This may eliminate that PLUS and, later, the AND. */
9fa6d012
TG
6766
6767 {
770ae6cc 6768 unsigned int width = GET_MODE_BITSIZE (mode);
9fa6d012
TG
6769 unsigned HOST_WIDE_INT smask = mask;
6770
6771 /* If MODE is narrower than HOST_WIDE_INT and mask is a negative
6772 number, sign extend it. */
6773
6774 if (width < HOST_BITS_PER_WIDE_INT
6775 && (smask & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
6776 smask |= (HOST_WIDE_INT) -1 << width;
6777
6778 if (GET_CODE (XEXP (x, 1)) == CONST_INT
0e9ff885
DM
6779 && exact_log2 (- smask) >= 0)
6780 {
6781#ifdef STACK_BIAS
6782 if (STACK_BIAS
6783 && (XEXP (x, 0) == stack_pointer_rtx
6784 || XEXP (x, 0) == frame_pointer_rtx))
6785 {
663522cb
KH
6786 int sp_alignment = STACK_BOUNDARY / BITS_PER_UNIT;
6787 unsigned HOST_WIDE_INT sp_mask = GET_MODE_MASK (mode);
6788
6789 sp_mask &= ~(sp_alignment - 1);
6790 if ((sp_mask & ~smask) == 0
6791 && ((INTVAL (XEXP (x, 1)) - STACK_BIAS) & ~smask) != 0)
0e9ff885 6792 return force_to_mode (plus_constant (XEXP (x, 0),
663522cb 6793 ((INTVAL (XEXP (x, 1)) -
835c8e04 6794 STACK_BIAS) & smask)
0e9ff885 6795 + STACK_BIAS),
663522cb
KH
6796 mode, smask, reg, next_select);
6797 }
0e9ff885 6798#endif
663522cb
KH
6799 if ((nonzero_bits (XEXP (x, 0), mode) & ~smask) == 0
6800 && (INTVAL (XEXP (x, 1)) & ~smask) != 0)
0e9ff885 6801 return force_to_mode (plus_constant (XEXP (x, 0),
663522cb 6802 (INTVAL (XEXP (x, 1))
835c8e04
DT
6803 & smask)),
6804 mode, smask, reg, next_select);
0e9ff885 6805 }
9fa6d012 6806 }
6139ff20 6807
0f41302f 6808 /* ... fall through ... */
6139ff20 6809
dfbe1b2f 6810 case MULT:
6139ff20
RK
6811 /* For PLUS, MINUS and MULT, we need any bits less significant than the
6812 most significant bit in MASK since carries from those bits will
6813 affect the bits we are interested in. */
6814 mask = fuller_mask;
6815 goto binop;
6816
d41638e4
RH
6817 case MINUS:
6818 /* If X is (minus C Y) where C's least set bit is larger than any bit
6819 in the mask, then we may replace with (neg Y). */
6820 if (GET_CODE (XEXP (x, 0)) == CONST_INT
0345195a
RK
6821 && (((unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 0))
6822 & -INTVAL (XEXP (x, 0))))
6823 > mask))
d41638e4 6824 {
f1c6ba8b
RK
6825 x = simplify_gen_unary (NEG, GET_MODE (x), XEXP (x, 1),
6826 GET_MODE (x));
d41638e4
RH
6827 return force_to_mode (x, mode, mask, reg, next_select);
6828 }
6829
6830 /* Similarly, if C contains every bit in the mask, then we may
6831 replace with (not Y). */
6832 if (GET_CODE (XEXP (x, 0)) == CONST_INT
0345195a
RK
6833 && ((INTVAL (XEXP (x, 0)) | (HOST_WIDE_INT) mask)
6834 == INTVAL (XEXP (x, 0))))
d41638e4 6835 {
f1c6ba8b
RK
6836 x = simplify_gen_unary (NOT, GET_MODE (x),
6837 XEXP (x, 1), GET_MODE (x));
d41638e4
RH
6838 return force_to_mode (x, mode, mask, reg, next_select);
6839 }
6840
6841 mask = fuller_mask;
6842 goto binop;
6843
dfbe1b2f
RK
6844 case IOR:
6845 case XOR:
6139ff20
RK
6846 /* If X is (ior (lshiftrt FOO C1) C2), try to commute the IOR and
6847 LSHIFTRT so we end up with an (and (lshiftrt (ior ...) ...) ...)
6848 operation which may be a bitfield extraction. Ensure that the
6849 constant we form is not wider than the mode of X. */
6850
6851 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
6852 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6853 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
6854 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
6855 && GET_CODE (XEXP (x, 1)) == CONST_INT
6856 && ((INTVAL (XEXP (XEXP (x, 0), 1))
6857 + floor_log2 (INTVAL (XEXP (x, 1))))
6858 < GET_MODE_BITSIZE (GET_MODE (x)))
6859 && (INTVAL (XEXP (x, 1))
663522cb 6860 & ~nonzero_bits (XEXP (x, 0), GET_MODE (x))) == 0)
6139ff20
RK
6861 {
6862 temp = GEN_INT ((INTVAL (XEXP (x, 1)) & mask)
663522cb 6863 << INTVAL (XEXP (XEXP (x, 0), 1)));
6139ff20
RK
6864 temp = gen_binary (GET_CODE (x), GET_MODE (x),
6865 XEXP (XEXP (x, 0), 0), temp);
d4d2b13f
RK
6866 x = gen_binary (LSHIFTRT, GET_MODE (x), temp,
6867 XEXP (XEXP (x, 0), 1));
e3d616e3 6868 return force_to_mode (x, mode, mask, reg, next_select);
6139ff20
RK
6869 }
6870
6871 binop:
dfbe1b2f 6872 /* For most binary operations, just propagate into the operation and
6139ff20
RK
6873 change the mode if we have an operation of that mode. */
6874
e3d616e3
RK
6875 op0 = gen_lowpart_for_combine (op_mode,
6876 force_to_mode (XEXP (x, 0), mode, mask,
6877 reg, next_select));
6878 op1 = gen_lowpart_for_combine (op_mode,
6879 force_to_mode (XEXP (x, 1), mode, mask,
6880 reg, next_select));
6139ff20 6881
2dd484ed
RK
6882 /* If OP1 is a CONST_INT and X is an IOR or XOR, clear bits outside
6883 MASK since OP1 might have been sign-extended but we never want
6884 to turn on extra bits, since combine might have previously relied
6885 on them being off. */
6886 if (GET_CODE (op1) == CONST_INT && (code == IOR || code == XOR)
6887 && (INTVAL (op1) & mask) != 0)
6888 op1 = GEN_INT (INTVAL (op1) & mask);
663522cb 6889
6139ff20
RK
6890 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
6891 x = gen_binary (code, op_mode, op0, op1);
d0ab8cd3 6892 break;
dfbe1b2f
RK
6893
6894 case ASHIFT:
dfbe1b2f 6895 /* For left shifts, do the same, but just for the first operand.
f6785026
RK
6896 However, we cannot do anything with shifts where we cannot
6897 guarantee that the counts are smaller than the size of the mode
6898 because such a count will have a different meaning in a
6139ff20 6899 wider mode. */
f6785026
RK
6900
6901 if (! (GET_CODE (XEXP (x, 1)) == CONST_INT
6139ff20 6902 && INTVAL (XEXP (x, 1)) >= 0
f6785026
RK
6903 && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (mode))
6904 && ! (GET_MODE (XEXP (x, 1)) != VOIDmode
6905 && (nonzero_bits (XEXP (x, 1), GET_MODE (XEXP (x, 1)))
adb7a1cb 6906 < (unsigned HOST_WIDE_INT) GET_MODE_BITSIZE (mode))))
f6785026 6907 break;
663522cb 6908
6139ff20
RK
6909 /* If the shift count is a constant and we can do arithmetic in
6910 the mode of the shift, refine which bits we need. Otherwise, use the
6911 conservative form of the mask. */
6912 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6913 && INTVAL (XEXP (x, 1)) >= 0
6914 && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (op_mode)
6915 && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
6916 mask >>= INTVAL (XEXP (x, 1));
6917 else
6918 mask = fuller_mask;
6919
6920 op0 = gen_lowpart_for_combine (op_mode,
6921 force_to_mode (XEXP (x, 0), op_mode,
e3d616e3 6922 mask, reg, next_select));
6139ff20
RK
6923
6924 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
663522cb 6925 x = gen_binary (code, op_mode, op0, XEXP (x, 1));
d0ab8cd3 6926 break;
dfbe1b2f
RK
6927
6928 case LSHIFTRT:
1347292b
JW
6929 /* Here we can only do something if the shift count is a constant,
6930 this shift constant is valid for the host, and we can do arithmetic
6931 in OP_MODE. */
dfbe1b2f
RK
6932
6933 if (GET_CODE (XEXP (x, 1)) == CONST_INT
1347292b 6934 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
6139ff20 6935 && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
d0ab8cd3 6936 {
6139ff20 6937 rtx inner = XEXP (x, 0);
402b6c2a 6938 unsigned HOST_WIDE_INT inner_mask;
6139ff20
RK
6939
6940 /* Select the mask of the bits we need for the shift operand. */
402b6c2a 6941 inner_mask = mask << INTVAL (XEXP (x, 1));
d0ab8cd3 6942
6139ff20 6943 /* We can only change the mode of the shift if we can do arithmetic
402b6c2a
JW
6944 in the mode of the shift and INNER_MASK is no wider than the
6945 width of OP_MODE. */
6139ff20 6946 if (GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT
663522cb 6947 || (inner_mask & ~GET_MODE_MASK (op_mode)) != 0)
d0ab8cd3
RK
6948 op_mode = GET_MODE (x);
6949
402b6c2a 6950 inner = force_to_mode (inner, op_mode, inner_mask, reg, next_select);
6139ff20
RK
6951
6952 if (GET_MODE (x) != op_mode || inner != XEXP (x, 0))
6953 x = gen_binary (LSHIFTRT, op_mode, inner, XEXP (x, 1));
d0ab8cd3 6954 }
6139ff20
RK
6955
6956 /* If we have (and (lshiftrt FOO C1) C2) where the combination of the
6957 shift and AND produces only copies of the sign bit (C2 is one less
6958 than a power of two), we can do this with just a shift. */
6959
6960 if (GET_CODE (x) == LSHIFTRT
6961 && GET_CODE (XEXP (x, 1)) == CONST_INT
cfff35c1
JW
6962 /* The shift puts one of the sign bit copies in the least significant
6963 bit. */
6139ff20
RK
6964 && ((INTVAL (XEXP (x, 1))
6965 + num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0))))
6966 >= GET_MODE_BITSIZE (GET_MODE (x)))
6967 && exact_log2 (mask + 1) >= 0
cfff35c1
JW
6968 /* Number of bits left after the shift must be more than the mask
6969 needs. */
6970 && ((INTVAL (XEXP (x, 1)) + exact_log2 (mask + 1))
6971 <= GET_MODE_BITSIZE (GET_MODE (x)))
6972 /* Must be more sign bit copies than the mask needs. */
770ae6cc 6973 && ((int) num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
6139ff20
RK
6974 >= exact_log2 (mask + 1)))
6975 x = gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0),
6976 GEN_INT (GET_MODE_BITSIZE (GET_MODE (x))
6977 - exact_log2 (mask + 1)));
fae2db47
JW
6978
6979 goto shiftrt;
d0ab8cd3
RK
6980
6981 case ASHIFTRT:
6139ff20
RK
6982 /* If we are just looking for the sign bit, we don't need this shift at
6983 all, even if it has a variable count. */
9bf22b75 6984 if (GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
e51712db 6985 && (mask == ((unsigned HOST_WIDE_INT) 1
9bf22b75 6986 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
e3d616e3 6987 return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
6139ff20
RK
6988
6989 /* If this is a shift by a constant, get a mask that contains those bits
6990 that are not copies of the sign bit. We then have two cases: If
6991 MASK only includes those bits, this can be a logical shift, which may
6992 allow simplifications. If MASK is a single-bit field not within
6993 those bits, we are requesting a copy of the sign bit and hence can
6994 shift the sign bit to the appropriate location. */
6995
6996 if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) >= 0
6997 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
6998 {
6999 int i = -1;
7000
b69960ac
RK
7001 /* If the considered data is wider then HOST_WIDE_INT, we can't
7002 represent a mask for all its bits in a single scalar.
7003 But we only care about the lower bits, so calculate these. */
7004
6a11342f 7005 if (GET_MODE_BITSIZE (GET_MODE (x)) > HOST_BITS_PER_WIDE_INT)
b69960ac 7006 {
663522cb 7007 nonzero = ~(HOST_WIDE_INT) 0;
b69960ac
RK
7008
7009 /* GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1))
7010 is the number of bits a full-width mask would have set.
7011 We need only shift if these are fewer than nonzero can
7012 hold. If not, we must keep all bits set in nonzero. */
7013
7014 if (GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1))
7015 < HOST_BITS_PER_WIDE_INT)
7016 nonzero >>= INTVAL (XEXP (x, 1))
7017 + HOST_BITS_PER_WIDE_INT
7018 - GET_MODE_BITSIZE (GET_MODE (x)) ;
7019 }
7020 else
7021 {
7022 nonzero = GET_MODE_MASK (GET_MODE (x));
7023 nonzero >>= INTVAL (XEXP (x, 1));
7024 }
6139ff20 7025
663522cb 7026 if ((mask & ~nonzero) == 0
6139ff20
RK
7027 || (i = exact_log2 (mask)) >= 0)
7028 {
7029 x = simplify_shift_const
7030 (x, LSHIFTRT, GET_MODE (x), XEXP (x, 0),
7031 i < 0 ? INTVAL (XEXP (x, 1))
7032 : GET_MODE_BITSIZE (GET_MODE (x)) - 1 - i);
7033
7034 if (GET_CODE (x) != ASHIFTRT)
e3d616e3 7035 return force_to_mode (x, mode, mask, reg, next_select);
6139ff20
RK
7036 }
7037 }
7038
7039 /* If MASK is 1, convert this to a LSHIFTRT. This can be done
7040 even if the shift count isn't a constant. */
7041 if (mask == 1)
7042 x = gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0), XEXP (x, 1));
7043
fae2db47
JW
7044 shiftrt:
7045
7046 /* If this is a zero- or sign-extension operation that just affects bits
4c002f29
RK
7047 we don't care about, remove it. Be sure the call above returned
7048 something that is still a shift. */
d0ab8cd3 7049
4c002f29
RK
7050 if ((GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ASHIFTRT)
7051 && GET_CODE (XEXP (x, 1)) == CONST_INT
d0ab8cd3 7052 && INTVAL (XEXP (x, 1)) >= 0
6139ff20
RK
7053 && (INTVAL (XEXP (x, 1))
7054 <= GET_MODE_BITSIZE (GET_MODE (x)) - (floor_log2 (mask) + 1))
d0ab8cd3
RK
7055 && GET_CODE (XEXP (x, 0)) == ASHIFT
7056 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
7057 && INTVAL (XEXP (XEXP (x, 0), 1)) == INTVAL (XEXP (x, 1)))
e3d616e3
RK
7058 return force_to_mode (XEXP (XEXP (x, 0), 0), mode, mask,
7059 reg, next_select);
6139ff20 7060
dfbe1b2f
RK
7061 break;
7062
6139ff20
RK
7063 case ROTATE:
7064 case ROTATERT:
7065 /* If the shift count is constant and we can do computations
7066 in the mode of X, compute where the bits we care about are.
7067 Otherwise, we can't do anything. Don't change the mode of
7068 the shift or propagate MODE into the shift, though. */
7069 if (GET_CODE (XEXP (x, 1)) == CONST_INT
7070 && INTVAL (XEXP (x, 1)) >= 0)
7071 {
7072 temp = simplify_binary_operation (code == ROTATE ? ROTATERT : ROTATE,
7073 GET_MODE (x), GEN_INT (mask),
7074 XEXP (x, 1));
7d171a1e 7075 if (temp && GET_CODE(temp) == CONST_INT)
6139ff20
RK
7076 SUBST (XEXP (x, 0),
7077 force_to_mode (XEXP (x, 0), GET_MODE (x),
e3d616e3 7078 INTVAL (temp), reg, next_select));
6139ff20
RK
7079 }
7080 break;
663522cb 7081
dfbe1b2f 7082 case NEG:
180b8e4b
RK
7083 /* If we just want the low-order bit, the NEG isn't needed since it
7084 won't change the low-order bit. */
7085 if (mask == 1)
7086 return force_to_mode (XEXP (x, 0), mode, mask, reg, just_select);
7087
6139ff20
RK
7088 /* We need any bits less significant than the most significant bit in
7089 MASK since carries from those bits will affect the bits we are
7090 interested in. */
7091 mask = fuller_mask;
7092 goto unop;
7093
dfbe1b2f 7094 case NOT:
6139ff20
RK
7095 /* (not FOO) is (xor FOO CONST), so if FOO is an LSHIFTRT, we can do the
7096 same as the XOR case above. Ensure that the constant we form is not
7097 wider than the mode of X. */
7098
7099 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
7100 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
7101 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
7102 && (INTVAL (XEXP (XEXP (x, 0), 1)) + floor_log2 (mask)
7103 < GET_MODE_BITSIZE (GET_MODE (x)))
7104 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT)
7105 {
7106 temp = GEN_INT (mask << INTVAL (XEXP (XEXP (x, 0), 1)));
7107 temp = gen_binary (XOR, GET_MODE (x), XEXP (XEXP (x, 0), 0), temp);
7108 x = gen_binary (LSHIFTRT, GET_MODE (x), temp, XEXP (XEXP (x, 0), 1));
7109
e3d616e3 7110 return force_to_mode (x, mode, mask, reg, next_select);
6139ff20
RK
7111 }
7112
f82da7d2
JW
7113 /* (and (not FOO) CONST) is (not (or FOO (not CONST))), so we must
7114 use the full mask inside the NOT. */
7115 mask = fuller_mask;
7116
6139ff20 7117 unop:
e3d616e3
RK
7118 op0 = gen_lowpart_for_combine (op_mode,
7119 force_to_mode (XEXP (x, 0), mode, mask,
7120 reg, next_select));
6139ff20 7121 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
f1c6ba8b 7122 x = simplify_gen_unary (code, op_mode, op0, op_mode);
6139ff20
RK
7123 break;
7124
7125 case NE:
7126 /* (and (ne FOO 0) CONST) can be (and FOO CONST) if CONST is included
3aceff0d 7127 in STORE_FLAG_VALUE and FOO has a single bit that might be nonzero,
1a6ec070 7128 which is equal to STORE_FLAG_VALUE. */
663522cb 7129 if ((mask & ~STORE_FLAG_VALUE) == 0 && XEXP (x, 1) == const0_rtx
3aceff0d 7130 && exact_log2 (nonzero_bits (XEXP (x, 0), mode)) >= 0
1a6ec070 7131 && nonzero_bits (XEXP (x, 0), mode) == STORE_FLAG_VALUE)
e3d616e3 7132 return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
6139ff20 7133
d0ab8cd3
RK
7134 break;
7135
7136 case IF_THEN_ELSE:
7137 /* We have no way of knowing if the IF_THEN_ELSE can itself be
7138 written in a narrower mode. We play it safe and do not do so. */
7139
7140 SUBST (XEXP (x, 1),
7141 gen_lowpart_for_combine (GET_MODE (x),
7142 force_to_mode (XEXP (x, 1), mode,
e3d616e3 7143 mask, reg, next_select)));
d0ab8cd3
RK
7144 SUBST (XEXP (x, 2),
7145 gen_lowpart_for_combine (GET_MODE (x),
7146 force_to_mode (XEXP (x, 2), mode,
e3d616e3 7147 mask, reg,next_select)));
d0ab8cd3 7148 break;
663522cb 7149
e9a25f70
JL
7150 default:
7151 break;
dfbe1b2f
RK
7152 }
7153
d0ab8cd3 7154 /* Ensure we return a value of the proper mode. */
dfbe1b2f
RK
7155 return gen_lowpart_for_combine (mode, x);
7156}
7157\f
abe6e52f
RK
7158/* Return nonzero if X is an expression that has one of two values depending on
7159 whether some other value is zero or nonzero. In that case, we return the
7160 value that is being tested, *PTRUE is set to the value if the rtx being
7161 returned has a nonzero value, and *PFALSE is set to the other alternative.
7162
7163 If we return zero, we set *PTRUE and *PFALSE to X. */
7164
7165static rtx
7166if_then_else_cond (x, ptrue, pfalse)
7167 rtx x;
7168 rtx *ptrue, *pfalse;
7169{
7170 enum machine_mode mode = GET_MODE (x);
7171 enum rtx_code code = GET_CODE (x);
abe6e52f
RK
7172 rtx cond0, cond1, true0, true1, false0, false1;
7173 unsigned HOST_WIDE_INT nz;
7174
14a774a9
RK
7175 /* If we are comparing a value against zero, we are done. */
7176 if ((code == NE || code == EQ)
7177 && GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) == 0)
7178 {
e8758a3a
JL
7179 *ptrue = (code == NE) ? const_true_rtx : const0_rtx;
7180 *pfalse = (code == NE) ? const0_rtx : const_true_rtx;
14a774a9
RK
7181 return XEXP (x, 0);
7182 }
7183
abe6e52f
RK
7184 /* If this is a unary operation whose operand has one of two values, apply
7185 our opcode to compute those values. */
14a774a9
RK
7186 else if (GET_RTX_CLASS (code) == '1'
7187 && (cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0)) != 0)
abe6e52f 7188 {
f1c6ba8b
RK
7189 *ptrue = simplify_gen_unary (code, mode, true0, GET_MODE (XEXP (x, 0)));
7190 *pfalse = simplify_gen_unary (code, mode, false0,
7191 GET_MODE (XEXP (x, 0)));
abe6e52f
RK
7192 return cond0;
7193 }
7194
3a19aabc 7195 /* If this is a COMPARE, do nothing, since the IF_THEN_ELSE we would
ddd5a7c1 7196 make can't possibly match and would suppress other optimizations. */
3a19aabc
RK
7197 else if (code == COMPARE)
7198 ;
7199
abe6e52f
RK
7200 /* If this is a binary operation, see if either side has only one of two
7201 values. If either one does or if both do and they are conditional on
7202 the same value, compute the new true and false values. */
7203 else if (GET_RTX_CLASS (code) == 'c' || GET_RTX_CLASS (code) == '2'
7204 || GET_RTX_CLASS (code) == '<')
7205 {
7206 cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0);
7207 cond1 = if_then_else_cond (XEXP (x, 1), &true1, &false1);
7208
7209 if ((cond0 != 0 || cond1 != 0)
7210 && ! (cond0 != 0 && cond1 != 0 && ! rtx_equal_p (cond0, cond1)))
7211 {
987e845a
JW
7212 /* If if_then_else_cond returned zero, then true/false are the
7213 same rtl. We must copy one of them to prevent invalid rtl
7214 sharing. */
7215 if (cond0 == 0)
7216 true0 = copy_rtx (true0);
7217 else if (cond1 == 0)
7218 true1 = copy_rtx (true1);
7219
abe6e52f
RK
7220 *ptrue = gen_binary (code, mode, true0, true1);
7221 *pfalse = gen_binary (code, mode, false0, false1);
7222 return cond0 ? cond0 : cond1;
7223 }
9210df58 7224
9210df58 7225 /* See if we have PLUS, IOR, XOR, MINUS or UMAX, where one of the
0802d516
RK
7226 operands is zero when the other is non-zero, and vice-versa,
7227 and STORE_FLAG_VALUE is 1 or -1. */
9210df58 7228
0802d516
RK
7229 if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
7230 && (code == PLUS || code == IOR || code == XOR || code == MINUS
663522cb 7231 || code == UMAX)
9210df58
RK
7232 && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
7233 {
7234 rtx op0 = XEXP (XEXP (x, 0), 1);
7235 rtx op1 = XEXP (XEXP (x, 1), 1);
7236
7237 cond0 = XEXP (XEXP (x, 0), 0);
7238 cond1 = XEXP (XEXP (x, 1), 0);
7239
7240 if (GET_RTX_CLASS (GET_CODE (cond0)) == '<'
7241 && GET_RTX_CLASS (GET_CODE (cond1)) == '<'
9a915772 7242 && ((GET_CODE (cond0) == combine_reversed_comparison_code (cond1)
9210df58
RK
7243 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
7244 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
7245 || ((swap_condition (GET_CODE (cond0))
9a915772 7246 == combine_reversed_comparison_code (cond1))
9210df58
RK
7247 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
7248 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
7249 && ! side_effects_p (x))
7250 {
7251 *ptrue = gen_binary (MULT, mode, op0, const_true_rtx);
663522cb
KH
7252 *pfalse = gen_binary (MULT, mode,
7253 (code == MINUS
f1c6ba8b
RK
7254 ? simplify_gen_unary (NEG, mode, op1,
7255 mode)
7256 : op1),
9210df58
RK
7257 const_true_rtx);
7258 return cond0;
7259 }
7260 }
7261
7262 /* Similarly for MULT, AND and UMIN, execpt that for these the result
7263 is always zero. */
0802d516
RK
7264 if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
7265 && (code == MULT || code == AND || code == UMIN)
9210df58
RK
7266 && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
7267 {
7268 cond0 = XEXP (XEXP (x, 0), 0);
7269 cond1 = XEXP (XEXP (x, 1), 0);
7270
7271 if (GET_RTX_CLASS (GET_CODE (cond0)) == '<'
7272 && GET_RTX_CLASS (GET_CODE (cond1)) == '<'
9a915772 7273 && ((GET_CODE (cond0) == combine_reversed_comparison_code (cond1)
9210df58
RK
7274 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
7275 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
7276 || ((swap_condition (GET_CODE (cond0))
9a915772 7277 == combine_reversed_comparison_code (cond1))
9210df58
RK
7278 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
7279 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
7280 && ! side_effects_p (x))
7281 {
7282 *ptrue = *pfalse = const0_rtx;
7283 return cond0;
7284 }
7285 }
abe6e52f
RK
7286 }
7287
7288 else if (code == IF_THEN_ELSE)
7289 {
7290 /* If we have IF_THEN_ELSE already, extract the condition and
7291 canonicalize it if it is NE or EQ. */
7292 cond0 = XEXP (x, 0);
7293 *ptrue = XEXP (x, 1), *pfalse = XEXP (x, 2);
7294 if (GET_CODE (cond0) == NE && XEXP (cond0, 1) == const0_rtx)
7295 return XEXP (cond0, 0);
7296 else if (GET_CODE (cond0) == EQ && XEXP (cond0, 1) == const0_rtx)
7297 {
7298 *ptrue = XEXP (x, 2), *pfalse = XEXP (x, 1);
7299 return XEXP (cond0, 0);
7300 }
7301 else
7302 return cond0;
7303 }
7304
0631e0bf
JH
7305 /* If X is a SUBREG, we can narrow both the true and false values
7306 if the inner expression, if there is a condition. */
7307 else if (code == SUBREG
abe6e52f
RK
7308 && 0 != (cond0 = if_then_else_cond (SUBREG_REG (x),
7309 &true0, &false0)))
7310 {
0631e0bf
JH
7311 *ptrue = simplify_gen_subreg (mode, true0,
7312 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
7313 *pfalse = simplify_gen_subreg (mode, false0,
7314 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
abe6e52f 7315
abe6e52f
RK
7316 return cond0;
7317 }
7318
7319 /* If X is a constant, this isn't special and will cause confusions
7320 if we treat it as such. Likewise if it is equivalent to a constant. */
7321 else if (CONSTANT_P (x)
7322 || ((cond0 = get_last_value (x)) != 0 && CONSTANT_P (cond0)))
7323 ;
7324
1f3f36d1
RH
7325 /* If we're in BImode, canonicalize on 0 and STORE_FLAG_VALUE, as that
7326 will be least confusing to the rest of the compiler. */
7327 else if (mode == BImode)
7328 {
7329 *ptrue = GEN_INT (STORE_FLAG_VALUE), *pfalse = const0_rtx;
7330 return x;
7331 }
7332
663522cb 7333 /* If X is known to be either 0 or -1, those are the true and
abe6e52f 7334 false values when testing X. */
49219895
JH
7335 else if (x == constm1_rtx || x == const0_rtx
7336 || (mode != VOIDmode
7337 && num_sign_bit_copies (x, mode) == GET_MODE_BITSIZE (mode)))
abe6e52f
RK
7338 {
7339 *ptrue = constm1_rtx, *pfalse = const0_rtx;
7340 return x;
7341 }
7342
7343 /* Likewise for 0 or a single bit. */
49219895
JH
7344 else if (mode != VOIDmode
7345 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7346 && exact_log2 (nz = nonzero_bits (x, mode)) >= 0)
abe6e52f
RK
7347 {
7348 *ptrue = GEN_INT (nz), *pfalse = const0_rtx;
7349 return x;
7350 }
7351
7352 /* Otherwise fail; show no condition with true and false values the same. */
7353 *ptrue = *pfalse = x;
7354 return 0;
7355}
7356\f
1a26b032
RK
7357/* Return the value of expression X given the fact that condition COND
7358 is known to be true when applied to REG as its first operand and VAL
7359 as its second. X is known to not be shared and so can be modified in
7360 place.
7361
7362 We only handle the simplest cases, and specifically those cases that
7363 arise with IF_THEN_ELSE expressions. */
7364
7365static rtx
7366known_cond (x, cond, reg, val)
7367 rtx x;
7368 enum rtx_code cond;
7369 rtx reg, val;
7370{
7371 enum rtx_code code = GET_CODE (x);
f24ad0e4 7372 rtx temp;
6f7d635c 7373 const char *fmt;
1a26b032
RK
7374 int i, j;
7375
7376 if (side_effects_p (x))
7377 return x;
7378
69bc0a1f
JH
7379 if (cond == EQ && rtx_equal_p (x, reg) && !FLOAT_MODE_P (cond))
7380 return val;
7381 if (cond == UNEQ && rtx_equal_p (x, reg))
1a26b032
RK
7382 return val;
7383
7384 /* If X is (abs REG) and we know something about REG's relationship
7385 with zero, we may be able to simplify this. */
7386
7387 if (code == ABS && rtx_equal_p (XEXP (x, 0), reg) && val == const0_rtx)
7388 switch (cond)
7389 {
7390 case GE: case GT: case EQ:
7391 return XEXP (x, 0);
7392 case LT: case LE:
f1c6ba8b
RK
7393 return simplify_gen_unary (NEG, GET_MODE (XEXP (x, 0)),
7394 XEXP (x, 0),
7395 GET_MODE (XEXP (x, 0)));
e9a25f70
JL
7396 default:
7397 break;
1a26b032
RK
7398 }
7399
7400 /* The only other cases we handle are MIN, MAX, and comparisons if the
7401 operands are the same as REG and VAL. */
7402
7403 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
7404 {
7405 if (rtx_equal_p (XEXP (x, 0), val))
7406 cond = swap_condition (cond), temp = val, val = reg, reg = temp;
7407
7408 if (rtx_equal_p (XEXP (x, 0), reg) && rtx_equal_p (XEXP (x, 1), val))
7409 {
7410 if (GET_RTX_CLASS (code) == '<')
1eb8759b
RH
7411 {
7412 if (comparison_dominates_p (cond, code))
7413 return const_true_rtx;
1a26b032 7414
9a915772 7415 code = combine_reversed_comparison_code (x);
1eb8759b
RH
7416 if (code != UNKNOWN
7417 && comparison_dominates_p (cond, code))
7418 return const0_rtx;
7419 else
7420 return x;
7421 }
1a26b032
RK
7422 else if (code == SMAX || code == SMIN
7423 || code == UMIN || code == UMAX)
7424 {
7425 int unsignedp = (code == UMIN || code == UMAX);
7426
ac4cdf40
JE
7427 /* Do not reverse the condition when it is NE or EQ.
7428 This is because we cannot conclude anything about
7429 the value of 'SMAX (x, y)' when x is not equal to y,
23190837 7430 but we can when x equals y. */
ac4cdf40
JE
7431 if ((code == SMAX || code == UMAX)
7432 && ! (cond == EQ || cond == NE))
1a26b032
RK
7433 cond = reverse_condition (cond);
7434
7435 switch (cond)
7436 {
7437 case GE: case GT:
7438 return unsignedp ? x : XEXP (x, 1);
7439 case LE: case LT:
7440 return unsignedp ? x : XEXP (x, 0);
7441 case GEU: case GTU:
7442 return unsignedp ? XEXP (x, 1) : x;
7443 case LEU: case LTU:
7444 return unsignedp ? XEXP (x, 0) : x;
e9a25f70
JL
7445 default:
7446 break;
1a26b032
RK
7447 }
7448 }
7449 }
7450 }
7451
7452 fmt = GET_RTX_FORMAT (code);
7453 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7454 {
7455 if (fmt[i] == 'e')
7456 SUBST (XEXP (x, i), known_cond (XEXP (x, i), cond, reg, val));
7457 else if (fmt[i] == 'E')
7458 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7459 SUBST (XVECEXP (x, i, j), known_cond (XVECEXP (x, i, j),
7460 cond, reg, val));
7461 }
7462
7463 return x;
7464}
7465\f
e11fa86f
RK
7466/* See if X and Y are equal for the purposes of seeing if we can rewrite an
7467 assignment as a field assignment. */
7468
7469static int
7470rtx_equal_for_field_assignment_p (x, y)
7471 rtx x;
7472 rtx y;
7473{
e11fa86f
RK
7474 if (x == y || rtx_equal_p (x, y))
7475 return 1;
7476
7477 if (x == 0 || y == 0 || GET_MODE (x) != GET_MODE (y))
7478 return 0;
7479
7480 /* Check for a paradoxical SUBREG of a MEM compared with the MEM.
7481 Note that all SUBREGs of MEM are paradoxical; otherwise they
7482 would have been rewritten. */
7483 if (GET_CODE (x) == MEM && GET_CODE (y) == SUBREG
7484 && GET_CODE (SUBREG_REG (y)) == MEM
7485 && rtx_equal_p (SUBREG_REG (y),
7486 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (y)), x)))
7487 return 1;
7488
7489 if (GET_CODE (y) == MEM && GET_CODE (x) == SUBREG
7490 && GET_CODE (SUBREG_REG (x)) == MEM
7491 && rtx_equal_p (SUBREG_REG (x),
7492 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (x)), y)))
7493 return 1;
7494
9ec36da5
JL
7495 /* We used to see if get_last_value of X and Y were the same but that's
7496 not correct. In one direction, we'll cause the assignment to have
7497 the wrong destination and in the case, we'll import a register into this
7498 insn that might have already have been dead. So fail if none of the
7499 above cases are true. */
7500 return 0;
e11fa86f
RK
7501}
7502\f
230d793d
RS
7503/* See if X, a SET operation, can be rewritten as a bit-field assignment.
7504 Return that assignment if so.
7505
7506 We only handle the most common cases. */
7507
7508static rtx
7509make_field_assignment (x)
7510 rtx x;
7511{
7512 rtx dest = SET_DEST (x);
7513 rtx src = SET_SRC (x);
dfbe1b2f 7514 rtx assign;
e11fa86f 7515 rtx rhs, lhs;
5f4f0e22 7516 HOST_WIDE_INT c1;
770ae6cc
RK
7517 HOST_WIDE_INT pos;
7518 unsigned HOST_WIDE_INT len;
dfbe1b2f
RK
7519 rtx other;
7520 enum machine_mode mode;
230d793d
RS
7521
7522 /* If SRC was (and (not (ashift (const_int 1) POS)) DEST), this is
7523 a clear of a one-bit field. We will have changed it to
7524 (and (rotate (const_int -2) POS) DEST), so check for that. Also check
7525 for a SUBREG. */
7526
7527 if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == ROTATE
7528 && GET_CODE (XEXP (XEXP (src, 0), 0)) == CONST_INT
7529 && INTVAL (XEXP (XEXP (src, 0), 0)) == -2
e11fa86f 7530 && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
230d793d 7531 {
8999a12e 7532 assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
230d793d 7533 1, 1, 1, 0);
76184def 7534 if (assign != 0)
38a448ca 7535 return gen_rtx_SET (VOIDmode, assign, const0_rtx);
76184def 7536 return x;
230d793d
RS
7537 }
7538
7539 else if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == SUBREG
7540 && subreg_lowpart_p (XEXP (src, 0))
663522cb 7541 && (GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
230d793d
RS
7542 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (src, 0)))))
7543 && GET_CODE (SUBREG_REG (XEXP (src, 0))) == ROTATE
7544 && INTVAL (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == -2
e11fa86f 7545 && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
230d793d 7546 {
8999a12e 7547 assign = make_extraction (VOIDmode, dest, 0,
230d793d
RS
7548 XEXP (SUBREG_REG (XEXP (src, 0)), 1),
7549 1, 1, 1, 0);
76184def 7550 if (assign != 0)
38a448ca 7551 return gen_rtx_SET (VOIDmode, assign, const0_rtx);
76184def 7552 return x;
230d793d
RS
7553 }
7554
9dd11dcb 7555 /* If SRC is (ior (ashift (const_int 1) POS) DEST), this is a set of a
230d793d
RS
7556 one-bit field. */
7557 else if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == ASHIFT
7558 && XEXP (XEXP (src, 0), 0) == const1_rtx
e11fa86f 7559 && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
230d793d 7560 {
8999a12e 7561 assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
230d793d 7562 1, 1, 1, 0);
76184def 7563 if (assign != 0)
38a448ca 7564 return gen_rtx_SET (VOIDmode, assign, const1_rtx);
76184def 7565 return x;
230d793d
RS
7566 }
7567
dfbe1b2f 7568 /* The other case we handle is assignments into a constant-position
9dd11dcb 7569 field. They look like (ior/xor (and DEST C1) OTHER). If C1 represents
dfbe1b2f
RK
7570 a mask that has all one bits except for a group of zero bits and
7571 OTHER is known to have zeros where C1 has ones, this is such an
7572 assignment. Compute the position and length from C1. Shift OTHER
7573 to the appropriate position, force it to the required mode, and
7574 make the extraction. Check for the AND in both operands. */
7575
9dd11dcb 7576 if (GET_CODE (src) != IOR && GET_CODE (src) != XOR)
e11fa86f
RK
7577 return x;
7578
7579 rhs = expand_compound_operation (XEXP (src, 0));
7580 lhs = expand_compound_operation (XEXP (src, 1));
7581
7582 if (GET_CODE (rhs) == AND
7583 && GET_CODE (XEXP (rhs, 1)) == CONST_INT
7584 && rtx_equal_for_field_assignment_p (XEXP (rhs, 0), dest))
7585 c1 = INTVAL (XEXP (rhs, 1)), other = lhs;
7586 else if (GET_CODE (lhs) == AND
7587 && GET_CODE (XEXP (lhs, 1)) == CONST_INT
7588 && rtx_equal_for_field_assignment_p (XEXP (lhs, 0), dest))
7589 c1 = INTVAL (XEXP (lhs, 1)), other = rhs;
dfbe1b2f
RK
7590 else
7591 return x;
230d793d 7592
663522cb 7593 pos = get_pos_from_mask ((~c1) & GET_MODE_MASK (GET_MODE (dest)), &len);
dfbe1b2f 7594 if (pos < 0 || pos + len > GET_MODE_BITSIZE (GET_MODE (dest))
e5e809f4
JL
7595 || GET_MODE_BITSIZE (GET_MODE (dest)) > HOST_BITS_PER_WIDE_INT
7596 || (c1 & nonzero_bits (other, GET_MODE (dest))) != 0)
dfbe1b2f 7597 return x;
230d793d 7598
5f4f0e22 7599 assign = make_extraction (VOIDmode, dest, pos, NULL_RTX, len, 1, 1, 0);
76184def
DE
7600 if (assign == 0)
7601 return x;
230d793d 7602
dfbe1b2f
RK
7603 /* The mode to use for the source is the mode of the assignment, or of
7604 what is inside a possible STRICT_LOW_PART. */
663522cb 7605 mode = (GET_CODE (assign) == STRICT_LOW_PART
dfbe1b2f 7606 ? GET_MODE (XEXP (assign, 0)) : GET_MODE (assign));
230d793d 7607
dfbe1b2f
RK
7608 /* Shift OTHER right POS places and make it the source, restricting it
7609 to the proper length and mode. */
230d793d 7610
5f4f0e22
CH
7611 src = force_to_mode (simplify_shift_const (NULL_RTX, LSHIFTRT,
7612 GET_MODE (src), other, pos),
6139ff20
RK
7613 mode,
7614 GET_MODE_BITSIZE (mode) >= HOST_BITS_PER_WIDE_INT
0345195a 7615 ? ~(unsigned HOST_WIDE_INT) 0
729a2125 7616 : ((unsigned HOST_WIDE_INT) 1 << len) - 1,
e3d616e3 7617 dest, 0);
230d793d 7618
f1c6ba8b 7619 return gen_rtx_SET (VOIDmode, assign, src);
230d793d
RS
7620}
7621\f
7622/* See if X is of the form (+ (* a c) (* b c)) and convert to (* (+ a b) c)
7623 if so. */
7624
7625static rtx
7626apply_distributive_law (x)
7627 rtx x;
7628{
7629 enum rtx_code code = GET_CODE (x);
7630 rtx lhs, rhs, other;
7631 rtx tem;
7632 enum rtx_code inner_code;
7633
d8a8a4da
RS
7634 /* Distributivity is not true for floating point.
7635 It can change the value. So don't do it.
7636 -- rms and moshier@world.std.com. */
3ad2180a 7637 if (FLOAT_MODE_P (GET_MODE (x)))
d8a8a4da
RS
7638 return x;
7639
230d793d
RS
7640 /* The outer operation can only be one of the following: */
7641 if (code != IOR && code != AND && code != XOR
7642 && code != PLUS && code != MINUS)
7643 return x;
7644
7645 lhs = XEXP (x, 0), rhs = XEXP (x, 1);
7646
0f41302f
MS
7647 /* If either operand is a primitive we can't do anything, so get out
7648 fast. */
230d793d 7649 if (GET_RTX_CLASS (GET_CODE (lhs)) == 'o'
dfbe1b2f 7650 || GET_RTX_CLASS (GET_CODE (rhs)) == 'o')
230d793d
RS
7651 return x;
7652
7653 lhs = expand_compound_operation (lhs);
7654 rhs = expand_compound_operation (rhs);
7655 inner_code = GET_CODE (lhs);
7656 if (inner_code != GET_CODE (rhs))
7657 return x;
7658
7659 /* See if the inner and outer operations distribute. */
7660 switch (inner_code)
7661 {
7662 case LSHIFTRT:
7663 case ASHIFTRT:
7664 case AND:
7665 case IOR:
7666 /* These all distribute except over PLUS. */
7667 if (code == PLUS || code == MINUS)
7668 return x;
7669 break;
7670
7671 case MULT:
7672 if (code != PLUS && code != MINUS)
7673 return x;
7674 break;
7675
7676 case ASHIFT:
45620ed4 7677 /* This is also a multiply, so it distributes over everything. */
230d793d
RS
7678 break;
7679
7680 case SUBREG:
dfbe1b2f 7681 /* Non-paradoxical SUBREGs distributes over all operations, provided
ddef6bc7 7682 the inner modes and byte offsets are the same, this is an extraction
2b4bd1bc
JW
7683 of a low-order part, we don't convert an fp operation to int or
7684 vice versa, and we would not be converting a single-word
dfbe1b2f 7685 operation into a multi-word operation. The latter test is not
2b4bd1bc 7686 required, but it prevents generating unneeded multi-word operations.
dfbe1b2f
RK
7687 Some of the previous tests are redundant given the latter test, but
7688 are retained because they are required for correctness.
7689
7690 We produce the result slightly differently in this case. */
7691
7692 if (GET_MODE (SUBREG_REG (lhs)) != GET_MODE (SUBREG_REG (rhs))
ddef6bc7 7693 || SUBREG_BYTE (lhs) != SUBREG_BYTE (rhs)
dfbe1b2f 7694 || ! subreg_lowpart_p (lhs)
2b4bd1bc
JW
7695 || (GET_MODE_CLASS (GET_MODE (lhs))
7696 != GET_MODE_CLASS (GET_MODE (SUBREG_REG (lhs))))
dfbe1b2f 7697 || (GET_MODE_SIZE (GET_MODE (lhs))
8af24e26 7698 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))))
dfbe1b2f 7699 || GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))) > UNITS_PER_WORD)
230d793d
RS
7700 return x;
7701
7702 tem = gen_binary (code, GET_MODE (SUBREG_REG (lhs)),
7703 SUBREG_REG (lhs), SUBREG_REG (rhs));
7704 return gen_lowpart_for_combine (GET_MODE (x), tem);
7705
7706 default:
7707 return x;
7708 }
7709
7710 /* Set LHS and RHS to the inner operands (A and B in the example
7711 above) and set OTHER to the common operand (C in the example).
7712 These is only one way to do this unless the inner operation is
7713 commutative. */
7714 if (GET_RTX_CLASS (inner_code) == 'c'
7715 && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 0)))
7716 other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 1);
7717 else if (GET_RTX_CLASS (inner_code) == 'c'
7718 && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 1)))
7719 other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 0);
7720 else if (GET_RTX_CLASS (inner_code) == 'c'
7721 && rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 0)))
7722 other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 1);
7723 else if (rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 1)))
7724 other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 0);
7725 else
7726 return x;
7727
7728 /* Form the new inner operation, seeing if it simplifies first. */
7729 tem = gen_binary (code, GET_MODE (x), lhs, rhs);
7730
7731 /* There is one exception to the general way of distributing:
7732 (a ^ b) | (a ^ c) -> (~a) & (b ^ c) */
7733 if (code == XOR && inner_code == IOR)
7734 {
7735 inner_code = AND;
f1c6ba8b 7736 other = simplify_gen_unary (NOT, GET_MODE (x), other, GET_MODE (x));
230d793d
RS
7737 }
7738
7739 /* We may be able to continuing distributing the result, so call
7740 ourselves recursively on the inner operation before forming the
7741 outer operation, which we return. */
7742 return gen_binary (inner_code, GET_MODE (x),
7743 apply_distributive_law (tem), other);
7744}
7745\f
7746/* We have X, a logical `and' of VAROP with the constant CONSTOP, to be done
7747 in MODE.
7748
7749 Return an equivalent form, if different from X. Otherwise, return X. If
7750 X is zero, we are to always construct the equivalent form. */
7751
7752static rtx
7753simplify_and_const_int (x, mode, varop, constop)
7754 rtx x;
7755 enum machine_mode mode;
7756 rtx varop;
5f4f0e22 7757 unsigned HOST_WIDE_INT constop;
230d793d 7758{
951553af 7759 unsigned HOST_WIDE_INT nonzero;
42301240 7760 int i;
230d793d 7761
6139ff20
RK
7762 /* Simplify VAROP knowing that we will be only looking at some of the
7763 bits in it. */
e3d616e3 7764 varop = force_to_mode (varop, mode, constop, NULL_RTX, 0);
230d793d 7765
6139ff20
RK
7766 /* If VAROP is a CLOBBER, we will fail so return it; if it is a
7767 CONST_INT, we are done. */
7768 if (GET_CODE (varop) == CLOBBER || GET_CODE (varop) == CONST_INT)
7769 return varop;
230d793d 7770
fc06d7aa
RK
7771 /* See what bits may be nonzero in VAROP. Unlike the general case of
7772 a call to nonzero_bits, here we don't care about bits outside
7773 MODE. */
7774
7775 nonzero = nonzero_bits (varop, mode) & GET_MODE_MASK (mode);
7e4ce834 7776 nonzero = trunc_int_for_mode (nonzero, mode);
9fa6d012 7777
230d793d 7778 /* Turn off all bits in the constant that are known to already be zero.
951553af 7779 Thus, if the AND isn't needed at all, we will have CONSTOP == NONZERO_BITS
230d793d
RS
7780 which is tested below. */
7781
951553af 7782 constop &= nonzero;
230d793d
RS
7783
7784 /* If we don't have any bits left, return zero. */
7785 if (constop == 0)
7786 return const0_rtx;
7787
42301240
RK
7788 /* If VAROP is a NEG of something known to be zero or 1 and CONSTOP is
7789 a power of two, we can replace this with a ASHIFT. */
7790 if (GET_CODE (varop) == NEG && nonzero_bits (XEXP (varop, 0), mode) == 1
7791 && (i = exact_log2 (constop)) >= 0)
7792 return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (varop, 0), i);
663522cb 7793
6139ff20
RK
7794 /* If VAROP is an IOR or XOR, apply the AND to both branches of the IOR
7795 or XOR, then try to apply the distributive law. This may eliminate
7796 operations if either branch can be simplified because of the AND.
7797 It may also make some cases more complex, but those cases probably
7798 won't match a pattern either with or without this. */
7799
7800 if (GET_CODE (varop) == IOR || GET_CODE (varop) == XOR)
7801 return
7802 gen_lowpart_for_combine
7803 (mode,
7804 apply_distributive_law
7805 (gen_binary (GET_CODE (varop), GET_MODE (varop),
7806 simplify_and_const_int (NULL_RTX, GET_MODE (varop),
7807 XEXP (varop, 0), constop),
7808 simplify_and_const_int (NULL_RTX, GET_MODE (varop),
7809 XEXP (varop, 1), constop))));
7810
230d793d
RS
7811 /* Get VAROP in MODE. Try to get a SUBREG if not. Don't make a new SUBREG
7812 if we already had one (just check for the simplest cases). */
7813 if (x && GET_CODE (XEXP (x, 0)) == SUBREG
7814 && GET_MODE (XEXP (x, 0)) == mode
7815 && SUBREG_REG (XEXP (x, 0)) == varop)
7816 varop = XEXP (x, 0);
7817 else
7818 varop = gen_lowpart_for_combine (mode, varop);
7819
0f41302f 7820 /* If we can't make the SUBREG, try to return what we were given. */
230d793d
RS
7821 if (GET_CODE (varop) == CLOBBER)
7822 return x ? x : varop;
7823
7824 /* If we are only masking insignificant bits, return VAROP. */
951553af 7825 if (constop == nonzero)
230d793d
RS
7826 x = varop;
7827
7828 /* Otherwise, return an AND. See how much, if any, of X we can use. */
7829 else if (x == 0 || GET_CODE (x) != AND || GET_MODE (x) != mode)
6139ff20 7830 x = gen_binary (AND, mode, varop, GEN_INT (constop));
230d793d
RS
7831
7832 else
7833 {
7834 if (GET_CODE (XEXP (x, 1)) != CONST_INT
e51712db 7835 || (unsigned HOST_WIDE_INT) INTVAL (XEXP (x, 1)) != constop)
5f4f0e22 7836 SUBST (XEXP (x, 1), GEN_INT (constop));
230d793d
RS
7837
7838 SUBST (XEXP (x, 0), varop);
7839 }
7840
7841 return x;
7842}
7843\f
b3728b0e
JW
7844/* We let num_sign_bit_copies recur into nonzero_bits as that is useful.
7845 We don't let nonzero_bits recur into num_sign_bit_copies, because that
7846 is less useful. We can't allow both, because that results in exponential
956d6950 7847 run time recursion. There is a nullstone testcase that triggered
b3728b0e
JW
7848 this. This macro avoids accidental uses of num_sign_bit_copies. */
7849#define num_sign_bit_copies()
7850
230d793d
RS
7851/* Given an expression, X, compute which bits in X can be non-zero.
7852 We don't care about bits outside of those defined in MODE.
7853
7854 For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
7855 a shift, AND, or zero_extract, we can do better. */
7856
5f4f0e22 7857static unsigned HOST_WIDE_INT
951553af 7858nonzero_bits (x, mode)
230d793d
RS
7859 rtx x;
7860 enum machine_mode mode;
7861{
951553af
RK
7862 unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
7863 unsigned HOST_WIDE_INT inner_nz;
230d793d 7864 enum rtx_code code;
770ae6cc 7865 unsigned int mode_width = GET_MODE_BITSIZE (mode);
230d793d
RS
7866 rtx tem;
7867
1c75dfa4
RK
7868 /* For floating-point values, assume all bits are needed. */
7869 if (FLOAT_MODE_P (GET_MODE (x)) || FLOAT_MODE_P (mode))
7870 return nonzero;
7871
230d793d
RS
7872 /* If X is wider than MODE, use its mode instead. */
7873 if (GET_MODE_BITSIZE (GET_MODE (x)) > mode_width)
7874 {
7875 mode = GET_MODE (x);
951553af 7876 nonzero = GET_MODE_MASK (mode);
230d793d
RS
7877 mode_width = GET_MODE_BITSIZE (mode);
7878 }
7879
5f4f0e22 7880 if (mode_width > HOST_BITS_PER_WIDE_INT)
230d793d
RS
7881 /* Our only callers in this case look for single bit values. So
7882 just return the mode mask. Those tests will then be false. */
951553af 7883 return nonzero;
230d793d 7884
8baf60bb 7885#ifndef WORD_REGISTER_OPERATIONS
c6965c0f 7886 /* If MODE is wider than X, but both are a single word for both the host
663522cb 7887 and target machines, we can compute this from which bits of the
0840fd91
RK
7888 object might be nonzero in its own mode, taking into account the fact
7889 that on many CISC machines, accessing an object in a wider mode
7890 causes the high-order bits to become undefined. So they are
7891 not known to be zero. */
7892
7893 if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode
7894 && GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD
7895 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
c6965c0f 7896 && GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (GET_MODE (x)))
0840fd91
RK
7897 {
7898 nonzero &= nonzero_bits (x, GET_MODE (x));
663522cb 7899 nonzero |= GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x));
0840fd91
RK
7900 return nonzero;
7901 }
7902#endif
7903
230d793d
RS
7904 code = GET_CODE (x);
7905 switch (code)
7906 {
7907 case REG:
6dd12198 7908#if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
320dd7a7
RK
7909 /* If pointers extend unsigned and this is a pointer in Pmode, say that
7910 all the bits above ptr_mode are known to be zero. */
7911 if (POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
3502dc9c 7912 && REG_POINTER (x))
320dd7a7
RK
7913 nonzero &= GET_MODE_MASK (ptr_mode);
7914#endif
7915
b0d71df9
RK
7916#ifdef STACK_BOUNDARY
7917 /* If this is the stack pointer, we may know something about its
7918 alignment. If PUSH_ROUNDING is defined, it is possible for the
230d793d
RS
7919 stack to be momentarily aligned only to that amount, so we pick
7920 the least alignment. */
7921
ee49a9c7
JW
7922 /* We can't check for arg_pointer_rtx here, because it is not
7923 guaranteed to have as much alignment as the stack pointer.
7924 In particular, in the Irix6 n64 ABI, the stack has 128 bit
7925 alignment but the argument pointer has only 64 bit alignment. */
7926
0e9ff885
DM
7927 if ((x == frame_pointer_rtx
7928 || x == stack_pointer_rtx
7929 || x == hard_frame_pointer_rtx
7930 || (REGNO (x) >= FIRST_VIRTUAL_REGISTER
7931 && REGNO (x) <= LAST_VIRTUAL_REGISTER))
7932#ifdef STACK_BIAS
7933 && !STACK_BIAS
663522cb 7934#endif
0e9ff885 7935 )
230d793d 7936 {
b0d71df9 7937 int sp_alignment = STACK_BOUNDARY / BITS_PER_UNIT;
230d793d
RS
7938
7939#ifdef PUSH_ROUNDING
f73ad30e 7940 if (REGNO (x) == STACK_POINTER_REGNUM && PUSH_ARGS)
b0d71df9 7941 sp_alignment = MIN (PUSH_ROUNDING (1), sp_alignment);
230d793d
RS
7942#endif
7943
320dd7a7
RK
7944 /* We must return here, otherwise we may get a worse result from
7945 one of the choices below. There is nothing useful below as
7946 far as the stack pointer is concerned. */
663522cb 7947 return nonzero &= ~(sp_alignment - 1);
230d793d 7948 }
b0d71df9 7949#endif
230d793d 7950
55310dad
RK
7951 /* If X is a register whose nonzero bits value is current, use it.
7952 Otherwise, if X is a register whose value we can find, use that
7953 value. Otherwise, use the previously-computed global nonzero bits
7954 for this register. */
7955
7956 if (reg_last_set_value[REGNO (x)] != 0
7957 && reg_last_set_mode[REGNO (x)] == mode
57cf50a4
GRK
7958 && (reg_last_set_label[REGNO (x)] == label_tick
7959 || (REGNO (x) >= FIRST_PSEUDO_REGISTER
7960 && REG_N_SETS (REGNO (x)) == 1
663522cb 7961 && ! REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start,
57cf50a4 7962 REGNO (x))))
55310dad
RK
7963 && INSN_CUID (reg_last_set[REGNO (x)]) < subst_low_cuid)
7964 return reg_last_set_nonzero_bits[REGNO (x)];
230d793d
RS
7965
7966 tem = get_last_value (x);
9afa3d54 7967
230d793d 7968 if (tem)
9afa3d54
RK
7969 {
7970#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
7971 /* If X is narrower than MODE and TEM is a non-negative
7972 constant that would appear negative in the mode of X,
7973 sign-extend it for use in reg_nonzero_bits because some
7974 machines (maybe most) will actually do the sign-extension
663522cb 7975 and this is the conservative approach.
9afa3d54
RK
7976
7977 ??? For 2.5, try to tighten up the MD files in this regard
7978 instead of this kludge. */
7979
7980 if (GET_MODE_BITSIZE (GET_MODE (x)) < mode_width
7981 && GET_CODE (tem) == CONST_INT
7982 && INTVAL (tem) > 0
7983 && 0 != (INTVAL (tem)
7984 & ((HOST_WIDE_INT) 1
9e69be8c 7985 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
9afa3d54
RK
7986 tem = GEN_INT (INTVAL (tem)
7987 | ((HOST_WIDE_INT) (-1)
7988 << GET_MODE_BITSIZE (GET_MODE (x))));
7989#endif
7990 return nonzero_bits (tem, mode);
7991 }
951553af
RK
7992 else if (nonzero_sign_valid && reg_nonzero_bits[REGNO (x)])
7993 return reg_nonzero_bits[REGNO (x)] & nonzero;
230d793d 7994 else
951553af 7995 return nonzero;
230d793d
RS
7996
7997 case CONST_INT:
9afa3d54
RK
7998#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
7999 /* If X is negative in MODE, sign-extend the value. */
9e69be8c
RK
8000 if (INTVAL (x) > 0 && mode_width < BITS_PER_WORD
8001 && 0 != (INTVAL (x) & ((HOST_WIDE_INT) 1 << (mode_width - 1))))
8002 return (INTVAL (x) | ((HOST_WIDE_INT) (-1) << mode_width));
9afa3d54
RK
8003#endif
8004
230d793d
RS
8005 return INTVAL (x);
8006
230d793d 8007 case MEM:
8baf60bb 8008#ifdef LOAD_EXTEND_OP
230d793d
RS
8009 /* In many, if not most, RISC machines, reading a byte from memory
8010 zeros the rest of the register. Noticing that fact saves a lot
8011 of extra zero-extends. */
8baf60bb
RK
8012 if (LOAD_EXTEND_OP (GET_MODE (x)) == ZERO_EXTEND)
8013 nonzero &= GET_MODE_MASK (GET_MODE (x));
230d793d 8014#endif
8baf60bb 8015 break;
230d793d 8016
230d793d 8017 case EQ: case NE:
69bc0a1f
JH
8018 case UNEQ: case LTGT:
8019 case GT: case GTU: case UNGT:
8020 case LT: case LTU: case UNLT:
8021 case GE: case GEU: case UNGE:
8022 case LE: case LEU: case UNLE:
8023 case UNORDERED: case ORDERED:
3f508eca 8024
c6965c0f
RK
8025 /* If this produces an integer result, we know which bits are set.
8026 Code here used to clear bits outside the mode of X, but that is
8027 now done above. */
230d793d 8028
c6965c0f
RK
8029 if (GET_MODE_CLASS (mode) == MODE_INT
8030 && mode_width <= HOST_BITS_PER_WIDE_INT)
8031 nonzero = STORE_FLAG_VALUE;
230d793d 8032 break;
230d793d 8033
230d793d 8034 case NEG:
b3728b0e
JW
8035#if 0
8036 /* Disabled to avoid exponential mutual recursion between nonzero_bits
8037 and num_sign_bit_copies. */
d0ab8cd3
RK
8038 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
8039 == GET_MODE_BITSIZE (GET_MODE (x)))
951553af 8040 nonzero = 1;
b3728b0e 8041#endif
230d793d
RS
8042
8043 if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
663522cb 8044 nonzero |= (GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x)));
230d793d 8045 break;
d0ab8cd3
RK
8046
8047 case ABS:
b3728b0e
JW
8048#if 0
8049 /* Disabled to avoid exponential mutual recursion between nonzero_bits
8050 and num_sign_bit_copies. */
d0ab8cd3
RK
8051 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
8052 == GET_MODE_BITSIZE (GET_MODE (x)))
951553af 8053 nonzero = 1;
b3728b0e 8054#endif
d0ab8cd3 8055 break;
230d793d
RS
8056
8057 case TRUNCATE:
951553af 8058 nonzero &= (nonzero_bits (XEXP (x, 0), mode) & GET_MODE_MASK (mode));
230d793d
RS
8059 break;
8060
8061 case ZERO_EXTEND:
951553af 8062 nonzero &= nonzero_bits (XEXP (x, 0), mode);
230d793d 8063 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
951553af 8064 nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
230d793d
RS
8065 break;
8066
8067 case SIGN_EXTEND:
8068 /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
8069 Otherwise, show all the bits in the outer mode but not the inner
8070 may be non-zero. */
951553af 8071 inner_nz = nonzero_bits (XEXP (x, 0), mode);
230d793d
RS
8072 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
8073 {
951553af 8074 inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
e3da301d
MS
8075 if (inner_nz
8076 & (((HOST_WIDE_INT) 1
8077 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1))))
951553af 8078 inner_nz |= (GET_MODE_MASK (mode)
663522cb 8079 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
230d793d
RS
8080 }
8081
951553af 8082 nonzero &= inner_nz;
230d793d
RS
8083 break;
8084
8085 case AND:
951553af
RK
8086 nonzero &= (nonzero_bits (XEXP (x, 0), mode)
8087 & nonzero_bits (XEXP (x, 1), mode));
230d793d
RS
8088 break;
8089
d0ab8cd3
RK
8090 case XOR: case IOR:
8091 case UMIN: case UMAX: case SMIN: case SMAX:
951553af
RK
8092 nonzero &= (nonzero_bits (XEXP (x, 0), mode)
8093 | nonzero_bits (XEXP (x, 1), mode));
230d793d
RS
8094 break;
8095
8096 case PLUS: case MINUS:
8097 case MULT:
8098 case DIV: case UDIV:
8099 case MOD: case UMOD:
8100 /* We can apply the rules of arithmetic to compute the number of
8101 high- and low-order zero bits of these operations. We start by
8102 computing the width (position of the highest-order non-zero bit)
8103 and the number of low-order zero bits for each value. */
8104 {
951553af
RK
8105 unsigned HOST_WIDE_INT nz0 = nonzero_bits (XEXP (x, 0), mode);
8106 unsigned HOST_WIDE_INT nz1 = nonzero_bits (XEXP (x, 1), mode);
8107 int width0 = floor_log2 (nz0) + 1;
8108 int width1 = floor_log2 (nz1) + 1;
8109 int low0 = floor_log2 (nz0 & -nz0);
8110 int low1 = floor_log2 (nz1 & -nz1);
318b149c
RK
8111 HOST_WIDE_INT op0_maybe_minusp
8112 = (nz0 & ((HOST_WIDE_INT) 1 << (mode_width - 1)));
8113 HOST_WIDE_INT op1_maybe_minusp
8114 = (nz1 & ((HOST_WIDE_INT) 1 << (mode_width - 1)));
770ae6cc 8115 unsigned int result_width = mode_width;
230d793d
RS
8116 int result_low = 0;
8117
8118 switch (code)
8119 {
8120 case PLUS:
0e9ff885
DM
8121#ifdef STACK_BIAS
8122 if (STACK_BIAS
663522cb
KH
8123 && (XEXP (x, 0) == stack_pointer_rtx
8124 || XEXP (x, 0) == frame_pointer_rtx)
8125 && GET_CODE (XEXP (x, 1)) == CONST_INT)
0e9ff885
DM
8126 {
8127 int sp_alignment = STACK_BOUNDARY / BITS_PER_UNIT;
8128
663522cb
KH
8129 nz0 = (GET_MODE_MASK (mode) & ~(sp_alignment - 1));
8130 nz1 = INTVAL (XEXP (x, 1)) - STACK_BIAS;
8131 width0 = floor_log2 (nz0) + 1;
8132 width1 = floor_log2 (nz1) + 1;
8133 low0 = floor_log2 (nz0 & -nz0);
8134 low1 = floor_log2 (nz1 & -nz1);
0e9ff885 8135 }
663522cb 8136#endif
230d793d
RS
8137 result_width = MAX (width0, width1) + 1;
8138 result_low = MIN (low0, low1);
8139 break;
8140 case MINUS:
8141 result_low = MIN (low0, low1);
8142 break;
8143 case MULT:
8144 result_width = width0 + width1;
8145 result_low = low0 + low1;
8146 break;
8147 case DIV:
2a8bb5cf
AH
8148 if (width1 == 0)
8149 break;
230d793d
RS
8150 if (! op0_maybe_minusp && ! op1_maybe_minusp)
8151 result_width = width0;
8152 break;
8153 case UDIV:
2a8bb5cf
AH
8154 if (width1 == 0)
8155 break;
230d793d
RS
8156 result_width = width0;
8157 break;
8158 case MOD:
2a8bb5cf
AH
8159 if (width1 == 0)
8160 break;
230d793d
RS
8161 if (! op0_maybe_minusp && ! op1_maybe_minusp)
8162 result_width = MIN (width0, width1);
8163 result_low = MIN (low0, low1);
8164 break;
8165 case UMOD:
2a8bb5cf
AH
8166 if (width1 == 0)
8167 break;
230d793d
RS
8168 result_width = MIN (width0, width1);
8169 result_low = MIN (low0, low1);
8170 break;
e9a25f70
JL
8171 default:
8172 abort ();
230d793d
RS
8173 }
8174
8175 if (result_width < mode_width)
951553af 8176 nonzero &= ((HOST_WIDE_INT) 1 << result_width) - 1;
230d793d
RS
8177
8178 if (result_low > 0)
663522cb 8179 nonzero &= ~(((HOST_WIDE_INT) 1 << result_low) - 1);
d1405722
RK
8180
8181#ifdef POINTERS_EXTEND_UNSIGNED
8182 /* If pointers extend unsigned and this is an addition or subtraction
8183 to a pointer in Pmode, all the bits above ptr_mode are known to be
8184 zero. */
6dd12198 8185 if (POINTERS_EXTEND_UNSIGNED > 0 && GET_MODE (x) == Pmode
d1405722
RK
8186 && (code == PLUS || code == MINUS)
8187 && GET_CODE (XEXP (x, 0)) == REG && REG_POINTER (XEXP (x, 0)))
8188 nonzero &= GET_MODE_MASK (ptr_mode);
8189#endif
230d793d
RS
8190 }
8191 break;
8192
8193 case ZERO_EXTRACT:
8194 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5f4f0e22 8195 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
951553af 8196 nonzero &= ((HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
230d793d
RS
8197 break;
8198
8199 case SUBREG:
c3c2cb37
RK
8200 /* If this is a SUBREG formed for a promoted variable that has
8201 been zero-extended, we know that at least the high-order bits
8202 are zero, though others might be too. */
8203
8204 if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x))
951553af
RK
8205 nonzero = (GET_MODE_MASK (GET_MODE (x))
8206 & nonzero_bits (SUBREG_REG (x), GET_MODE (x)));
c3c2cb37 8207
230d793d
RS
8208 /* If the inner mode is a single word for both the host and target
8209 machines, we can compute this from which bits of the inner
951553af 8210 object might be nonzero. */
230d793d 8211 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) <= BITS_PER_WORD
5f4f0e22
CH
8212 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
8213 <= HOST_BITS_PER_WIDE_INT))
230d793d 8214 {
951553af 8215 nonzero &= nonzero_bits (SUBREG_REG (x), mode);
8baf60bb 8216
b52ce03d
R
8217#if defined (WORD_REGISTER_OPERATIONS) && defined (LOAD_EXTEND_OP)
8218 /* If this is a typical RISC machine, we only have to worry
8219 about the way loads are extended. */
8220 if (LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
729a2125
RK
8221 ? (((nonzero
8222 & (((unsigned HOST_WIDE_INT) 1
8223 << (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) - 1))))
8224 != 0))
b52ce03d 8225 : LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) != ZERO_EXTEND)
230d793d 8226#endif
b52ce03d
R
8227 {
8228 /* On many CISC machines, accessing an object in a wider mode
8229 causes the high-order bits to become undefined. So they are
8230 not known to be zero. */
8231 if (GET_MODE_SIZE (GET_MODE (x))
8232 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
8233 nonzero |= (GET_MODE_MASK (GET_MODE (x))
663522cb 8234 & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x))));
b52ce03d 8235 }
230d793d
RS
8236 }
8237 break;
8238
8239 case ASHIFTRT:
8240 case LSHIFTRT:
8241 case ASHIFT:
230d793d 8242 case ROTATE:
951553af 8243 /* The nonzero bits are in two classes: any bits within MODE
230d793d 8244 that aren't in GET_MODE (x) are always significant. The rest of the
951553af 8245 nonzero bits are those that are significant in the operand of
230d793d
RS
8246 the shift when shifted the appropriate number of bits. This
8247 shows that high-order bits are cleared by the right shift and
8248 low-order bits by left shifts. */
8249 if (GET_CODE (XEXP (x, 1)) == CONST_INT
8250 && INTVAL (XEXP (x, 1)) >= 0
5f4f0e22 8251 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
230d793d
RS
8252 {
8253 enum machine_mode inner_mode = GET_MODE (x);
770ae6cc 8254 unsigned int width = GET_MODE_BITSIZE (inner_mode);
230d793d 8255 int count = INTVAL (XEXP (x, 1));
5f4f0e22 8256 unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
951553af
RK
8257 unsigned HOST_WIDE_INT op_nonzero = nonzero_bits (XEXP (x, 0), mode);
8258 unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask;
5f4f0e22 8259 unsigned HOST_WIDE_INT outer = 0;
230d793d
RS
8260
8261 if (mode_width > width)
663522cb 8262 outer = (op_nonzero & nonzero & ~mode_mask);
230d793d
RS
8263
8264 if (code == LSHIFTRT)
8265 inner >>= count;
8266 else if (code == ASHIFTRT)
8267 {
8268 inner >>= count;
8269
951553af 8270 /* If the sign bit may have been nonzero before the shift, we
230d793d 8271 need to mark all the places it could have been copied to
951553af 8272 by the shift as possibly nonzero. */
5f4f0e22
CH
8273 if (inner & ((HOST_WIDE_INT) 1 << (width - 1 - count)))
8274 inner |= (((HOST_WIDE_INT) 1 << count) - 1) << (width - count);
230d793d 8275 }
45620ed4 8276 else if (code == ASHIFT)
230d793d
RS
8277 inner <<= count;
8278 else
8279 inner = ((inner << (count % width)
8280 | (inner >> (width - (count % width)))) & mode_mask);
8281
951553af 8282 nonzero &= (outer | inner);
230d793d
RS
8283 }
8284 break;
8285
8286 case FFS:
8287 /* This is at most the number of bits in the mode. */
951553af 8288 nonzero = ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width) + 1)) - 1;
230d793d 8289 break;
d0ab8cd3
RK
8290
8291 case IF_THEN_ELSE:
951553af
RK
8292 nonzero &= (nonzero_bits (XEXP (x, 1), mode)
8293 | nonzero_bits (XEXP (x, 2), mode));
d0ab8cd3 8294 break;
663522cb 8295
e9a25f70
JL
8296 default:
8297 break;
230d793d
RS
8298 }
8299
951553af 8300 return nonzero;
230d793d 8301}
b3728b0e
JW
8302
8303/* See the macro definition above. */
8304#undef num_sign_bit_copies
230d793d 8305\f
d0ab8cd3 8306/* Return the number of bits at the high-order end of X that are known to
5109d49f
RK
8307 be equal to the sign bit. X will be used in mode MODE; if MODE is
8308 VOIDmode, X will be used in its own mode. The returned value will always
8309 be between 1 and the number of bits in MODE. */
d0ab8cd3 8310
770ae6cc 8311static unsigned int
d0ab8cd3
RK
8312num_sign_bit_copies (x, mode)
8313 rtx x;
8314 enum machine_mode mode;
8315{
8316 enum rtx_code code = GET_CODE (x);
770ae6cc 8317 unsigned int bitwidth;
d0ab8cd3 8318 int num0, num1, result;
951553af 8319 unsigned HOST_WIDE_INT nonzero;
d0ab8cd3
RK
8320 rtx tem;
8321
8322 /* If we weren't given a mode, use the mode of X. If the mode is still
1c75dfa4
RK
8323 VOIDmode, we don't know anything. Likewise if one of the modes is
8324 floating-point. */
d0ab8cd3
RK
8325
8326 if (mode == VOIDmode)
8327 mode = GET_MODE (x);
8328
1c75dfa4 8329 if (mode == VOIDmode || FLOAT_MODE_P (mode) || FLOAT_MODE_P (GET_MODE (x)))
6752e8d2 8330 return 1;
d0ab8cd3
RK
8331
8332 bitwidth = GET_MODE_BITSIZE (mode);
8333
0f41302f 8334 /* For a smaller object, just ignore the high bits. */
312def2e 8335 if (bitwidth < GET_MODE_BITSIZE (GET_MODE (x)))
770ae6cc
RK
8336 {
8337 num0 = num_sign_bit_copies (x, GET_MODE (x));
8338 return MAX (1,
8339 num0 - (int) (GET_MODE_BITSIZE (GET_MODE (x)) - bitwidth));
8340 }
663522cb 8341
e9a25f70
JL
8342 if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_BITSIZE (GET_MODE (x)))
8343 {
0c314d1a
RK
8344#ifndef WORD_REGISTER_OPERATIONS
8345 /* If this machine does not do all register operations on the entire
8346 register and MODE is wider than the mode of X, we can say nothing
8347 at all about the high-order bits. */
e9a25f70
JL
8348 return 1;
8349#else
8350 /* Likewise on machines that do, if the mode of the object is smaller
8351 than a word and loads of that size don't sign extend, we can say
8352 nothing about the high order bits. */
8353 if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
8354#ifdef LOAD_EXTEND_OP
8355 && LOAD_EXTEND_OP (GET_MODE (x)) != SIGN_EXTEND
8356#endif
8357 )
8358 return 1;
0c314d1a 8359#endif
e9a25f70 8360 }
0c314d1a 8361
d0ab8cd3
RK
8362 switch (code)
8363 {
8364 case REG:
55310dad 8365
6dd12198 8366#if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
ff0dbdd1
RK
8367 /* If pointers extend signed and this is a pointer in Pmode, say that
8368 all the bits above ptr_mode are known to be sign bit copies. */
8369 if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode && mode == Pmode
3502dc9c 8370 && REG_POINTER (x))
ff0dbdd1
RK
8371 return GET_MODE_BITSIZE (Pmode) - GET_MODE_BITSIZE (ptr_mode) + 1;
8372#endif
8373
55310dad
RK
8374 if (reg_last_set_value[REGNO (x)] != 0
8375 && reg_last_set_mode[REGNO (x)] == mode
57cf50a4
GRK
8376 && (reg_last_set_label[REGNO (x)] == label_tick
8377 || (REGNO (x) >= FIRST_PSEUDO_REGISTER
8378 && REG_N_SETS (REGNO (x)) == 1
8379 && ! REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start,
8380 REGNO (x))))
55310dad
RK
8381 && INSN_CUID (reg_last_set[REGNO (x)]) < subst_low_cuid)
8382 return reg_last_set_sign_bit_copies[REGNO (x)];
d0ab8cd3 8383
663522cb 8384 tem = get_last_value (x);
d0ab8cd3
RK
8385 if (tem != 0)
8386 return num_sign_bit_copies (tem, mode);
55310dad
RK
8387
8388 if (nonzero_sign_valid && reg_sign_bit_copies[REGNO (x)] != 0)
8389 return reg_sign_bit_copies[REGNO (x)];
d0ab8cd3
RK
8390 break;
8391
457816e2 8392 case MEM:
8baf60bb 8393#ifdef LOAD_EXTEND_OP
457816e2 8394 /* Some RISC machines sign-extend all loads of smaller than a word. */
8baf60bb 8395 if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND)
770ae6cc
RK
8396 return MAX (1, ((int) bitwidth
8397 - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1));
457816e2 8398#endif
8baf60bb 8399 break;
457816e2 8400
d0ab8cd3
RK
8401 case CONST_INT:
8402 /* If the constant is negative, take its 1's complement and remask.
8403 Then see how many zero bits we have. */
951553af 8404 nonzero = INTVAL (x) & GET_MODE_MASK (mode);
ac49a949 8405 if (bitwidth <= HOST_BITS_PER_WIDE_INT
951553af 8406 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
663522cb 8407 nonzero = (~nonzero) & GET_MODE_MASK (mode);
d0ab8cd3 8408
951553af 8409 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
d0ab8cd3
RK
8410
8411 case SUBREG:
c3c2cb37
RK
8412 /* If this is a SUBREG for a promoted object that is sign-extended
8413 and we are looking at it in a wider mode, we know that at least the
8414 high-order bits are known to be sign bit copies. */
8415
8416 if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x))
770ae6cc
RK
8417 {
8418 num0 = num_sign_bit_copies (SUBREG_REG (x), mode);
8419 return MAX ((int) bitwidth
8420 - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1,
8421 num0);
8422 }
663522cb 8423
0f41302f 8424 /* For a smaller object, just ignore the high bits. */
d0ab8cd3
RK
8425 if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
8426 {
8427 num0 = num_sign_bit_copies (SUBREG_REG (x), VOIDmode);
8428 return MAX (1, (num0
770ae6cc
RK
8429 - (int) (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
8430 - bitwidth)));
d0ab8cd3 8431 }
457816e2 8432
8baf60bb 8433#ifdef WORD_REGISTER_OPERATIONS
2aec5b7a 8434#ifdef LOAD_EXTEND_OP
8baf60bb
RK
8435 /* For paradoxical SUBREGs on machines where all register operations
8436 affect the entire register, just look inside. Note that we are
8437 passing MODE to the recursive call, so the number of sign bit copies
8438 will remain relative to that mode, not the inner mode. */
457816e2 8439
2aec5b7a
JW
8440 /* This works only if loads sign extend. Otherwise, if we get a
8441 reload for the inner part, it may be loaded from the stack, and
8442 then we lose all sign bit copies that existed before the store
8443 to the stack. */
8444
8445 if ((GET_MODE_SIZE (GET_MODE (x))
8446 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
8447 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND)
457816e2 8448 return num_sign_bit_copies (SUBREG_REG (x), mode);
2aec5b7a 8449#endif
457816e2 8450#endif
d0ab8cd3
RK
8451 break;
8452
8453 case SIGN_EXTRACT:
8454 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
770ae6cc 8455 return MAX (1, (int) bitwidth - INTVAL (XEXP (x, 1)));
d0ab8cd3
RK
8456 break;
8457
663522cb 8458 case SIGN_EXTEND:
d0ab8cd3
RK
8459 return (bitwidth - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
8460 + num_sign_bit_copies (XEXP (x, 0), VOIDmode));
8461
8462 case TRUNCATE:
0f41302f 8463 /* For a smaller object, just ignore the high bits. */
d0ab8cd3 8464 num0 = num_sign_bit_copies (XEXP (x, 0), VOIDmode);
770ae6cc
RK
8465 return MAX (1, (num0 - (int) (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
8466 - bitwidth)));
d0ab8cd3
RK
8467
8468 case NOT:
8469 return num_sign_bit_copies (XEXP (x, 0), mode);
8470
8471 case ROTATE: case ROTATERT:
8472 /* If we are rotating left by a number of bits less than the number
8473 of sign bit copies, we can just subtract that amount from the
8474 number. */
8475 if (GET_CODE (XEXP (x, 1)) == CONST_INT
ae0ed63a
JM
8476 && INTVAL (XEXP (x, 1)) >= 0
8477 && INTVAL (XEXP (x, 1)) < (int) bitwidth)
d0ab8cd3
RK
8478 {
8479 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8480 return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
770ae6cc 8481 : (int) bitwidth - INTVAL (XEXP (x, 1))));
d0ab8cd3
RK
8482 }
8483 break;
8484
8485 case NEG:
8486 /* In general, this subtracts one sign bit copy. But if the value
8487 is known to be positive, the number of sign bit copies is the
951553af
RK
8488 same as that of the input. Finally, if the input has just one bit
8489 that might be nonzero, all the bits are copies of the sign bit. */
70186b34
BS
8490 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8491 if (bitwidth > HOST_BITS_PER_WIDE_INT)
8492 return num0 > 1 ? num0 - 1 : 1;
8493
951553af
RK
8494 nonzero = nonzero_bits (XEXP (x, 0), mode);
8495 if (nonzero == 1)
d0ab8cd3
RK
8496 return bitwidth;
8497
d0ab8cd3 8498 if (num0 > 1
951553af 8499 && (((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero))
d0ab8cd3
RK
8500 num0--;
8501
8502 return num0;
8503
8504 case IOR: case AND: case XOR:
8505 case SMIN: case SMAX: case UMIN: case UMAX:
8506 /* Logical operations will preserve the number of sign-bit copies.
8507 MIN and MAX operations always return one of the operands. */
8508 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8509 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
8510 return MIN (num0, num1);
8511
8512 case PLUS: case MINUS:
8513 /* For addition and subtraction, we can have a 1-bit carry. However,
8514 if we are subtracting 1 from a positive number, there will not
8515 be such a carry. Furthermore, if the positive number is known to
8516 be 0 or 1, we know the result is either -1 or 0. */
8517
3e3ea975 8518 if (code == PLUS && XEXP (x, 1) == constm1_rtx
9295e6af 8519 && bitwidth <= HOST_BITS_PER_WIDE_INT)
d0ab8cd3 8520 {
951553af
RK
8521 nonzero = nonzero_bits (XEXP (x, 0), mode);
8522 if ((((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0)
8523 return (nonzero == 1 || nonzero == 0 ? bitwidth
8524 : bitwidth - floor_log2 (nonzero) - 1);
d0ab8cd3
RK
8525 }
8526
8527 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8528 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
d1405722
RK
8529 result = MAX (1, MIN (num0, num1) - 1);
8530
8531#ifdef POINTERS_EXTEND_UNSIGNED
8532 /* If pointers extend signed and this is an addition or subtraction
8533 to a pointer in Pmode, all the bits above ptr_mode are known to be
8534 sign bit copies. */
8535 if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
8536 && (code == PLUS || code == MINUS)
8537 && GET_CODE (XEXP (x, 0)) == REG && REG_POINTER (XEXP (x, 0)))
8538 result = MAX ((GET_MODE_BITSIZE (Pmode)
8539 - GET_MODE_BITSIZE (ptr_mode) + 1),
8540 result);
8541#endif
8542 return result;
663522cb 8543
d0ab8cd3
RK
8544 case MULT:
8545 /* The number of bits of the product is the sum of the number of
8546 bits of both terms. However, unless one of the terms if known
8547 to be positive, we must allow for an additional bit since negating
8548 a negative number can remove one sign bit copy. */
8549
8550 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8551 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
8552
8553 result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
8554 if (result > 0
70186b34
BS
8555 && (bitwidth > HOST_BITS_PER_WIDE_INT
8556 || (((nonzero_bits (XEXP (x, 0), mode)
8557 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
8558 && ((nonzero_bits (XEXP (x, 1), mode)
8559 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))))
d0ab8cd3
RK
8560 result--;
8561
8562 return MAX (1, result);
8563
8564 case UDIV:
70186b34
BS
8565 /* The result must be <= the first operand. If the first operand
8566 has the high bit set, we know nothing about the number of sign
8567 bit copies. */
8568 if (bitwidth > HOST_BITS_PER_WIDE_INT)
8569 return 1;
8570 else if ((nonzero_bits (XEXP (x, 0), mode)
8571 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
8572 return 1;
8573 else
8574 return num_sign_bit_copies (XEXP (x, 0), mode);
663522cb 8575
d0ab8cd3
RK
8576 case UMOD:
8577 /* The result must be <= the scond operand. */
8578 return num_sign_bit_copies (XEXP (x, 1), mode);
8579
8580 case DIV:
8581 /* Similar to unsigned division, except that we have to worry about
8582 the case where the divisor is negative, in which case we have
8583 to add 1. */
8584 result = num_sign_bit_copies (XEXP (x, 0), mode);
8585 if (result > 1
70186b34
BS
8586 && (bitwidth > HOST_BITS_PER_WIDE_INT
8587 || (nonzero_bits (XEXP (x, 1), mode)
8588 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
8589 result--;
d0ab8cd3
RK
8590
8591 return result;
8592
8593 case MOD:
8594 result = num_sign_bit_copies (XEXP (x, 1), mode);
8595 if (result > 1
70186b34
BS
8596 && (bitwidth > HOST_BITS_PER_WIDE_INT
8597 || (nonzero_bits (XEXP (x, 1), mode)
8598 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
8599 result--;
d0ab8cd3
RK
8600
8601 return result;
8602
8603 case ASHIFTRT:
8604 /* Shifts by a constant add to the number of bits equal to the
8605 sign bit. */
8606 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8607 if (GET_CODE (XEXP (x, 1)) == CONST_INT
8608 && INTVAL (XEXP (x, 1)) > 0)
ae0ed63a 8609 num0 = MIN ((int) bitwidth, num0 + INTVAL (XEXP (x, 1)));
d0ab8cd3
RK
8610
8611 return num0;
8612
8613 case ASHIFT:
d0ab8cd3
RK
8614 /* Left shifts destroy copies. */
8615 if (GET_CODE (XEXP (x, 1)) != CONST_INT
8616 || INTVAL (XEXP (x, 1)) < 0
ae0ed63a 8617 || INTVAL (XEXP (x, 1)) >= (int) bitwidth)
d0ab8cd3
RK
8618 return 1;
8619
8620 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8621 return MAX (1, num0 - INTVAL (XEXP (x, 1)));
8622
8623 case IF_THEN_ELSE:
8624 num0 = num_sign_bit_copies (XEXP (x, 1), mode);
8625 num1 = num_sign_bit_copies (XEXP (x, 2), mode);
8626 return MIN (num0, num1);
8627
d0ab8cd3 8628 case EQ: case NE: case GE: case GT: case LE: case LT:
69bc0a1f 8629 case UNEQ: case LTGT: case UNGE: case UNGT: case UNLE: case UNLT:
d0ab8cd3 8630 case GEU: case GTU: case LEU: case LTU:
69bc0a1f
JH
8631 case UNORDERED: case ORDERED:
8632 /* If the constant is negative, take its 1's complement and remask.
8633 Then see how many zero bits we have. */
8634 nonzero = STORE_FLAG_VALUE;
8635 if (bitwidth <= HOST_BITS_PER_WIDE_INT
8636 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
8637 nonzero = (~nonzero) & GET_MODE_MASK (mode);
8638
8639 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
e9a25f70 8640 break;
663522cb 8641
e9a25f70
JL
8642 default:
8643 break;
d0ab8cd3
RK
8644 }
8645
8646 /* If we haven't been able to figure it out by one of the above rules,
8647 see if some of the high-order bits are known to be zero. If so,
ac49a949
RS
8648 count those bits and return one less than that amount. If we can't
8649 safely compute the mask for this mode, always return BITWIDTH. */
8650
8651 if (bitwidth > HOST_BITS_PER_WIDE_INT)
6752e8d2 8652 return 1;
d0ab8cd3 8653
951553af 8654 nonzero = nonzero_bits (x, mode);
df6f4086 8655 return (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))
951553af 8656 ? 1 : bitwidth - floor_log2 (nonzero) - 1);
d0ab8cd3
RK
8657}
8658\f
1a26b032
RK
8659/* Return the number of "extended" bits there are in X, when interpreted
8660 as a quantity in MODE whose signedness is indicated by UNSIGNEDP. For
8661 unsigned quantities, this is the number of high-order zero bits.
8662 For signed quantities, this is the number of copies of the sign bit
8663 minus 1. In both case, this function returns the number of "spare"
8664 bits. For example, if two quantities for which this function returns
8665 at least 1 are added, the addition is known not to overflow.
8666
8667 This function will always return 0 unless called during combine, which
8668 implies that it must be called from a define_split. */
8669
770ae6cc 8670unsigned int
1a26b032
RK
8671extended_count (x, mode, unsignedp)
8672 rtx x;
8673 enum machine_mode mode;
8674 int unsignedp;
8675{
951553af 8676 if (nonzero_sign_valid == 0)
1a26b032
RK
8677 return 0;
8678
8679 return (unsignedp
ac49a949 8680 ? (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
770ae6cc
RK
8681 ? (GET_MODE_BITSIZE (mode) - 1
8682 - floor_log2 (nonzero_bits (x, mode)))
8683 : 0)
1a26b032
RK
8684 : num_sign_bit_copies (x, mode) - 1);
8685}
8686\f
230d793d
RS
8687/* This function is called from `simplify_shift_const' to merge two
8688 outer operations. Specifically, we have already found that we need
8689 to perform operation *POP0 with constant *PCONST0 at the outermost
8690 position. We would now like to also perform OP1 with constant CONST1
8691 (with *POP0 being done last).
8692
8693 Return 1 if we can do the operation and update *POP0 and *PCONST0 with
663522cb 8694 the resulting operation. *PCOMP_P is set to 1 if we would need to
230d793d
RS
8695 complement the innermost operand, otherwise it is unchanged.
8696
8697 MODE is the mode in which the operation will be done. No bits outside
8698 the width of this mode matter. It is assumed that the width of this mode
5f4f0e22 8699 is smaller than or equal to HOST_BITS_PER_WIDE_INT.
230d793d
RS
8700
8701 If *POP0 or OP1 are NIL, it means no operation is required. Only NEG, PLUS,
8702 IOR, XOR, and AND are supported. We may set *POP0 to SET if the proper
8703 result is simply *PCONST0.
8704
8705 If the resulting operation cannot be expressed as one operation, we
8706 return 0 and do not change *POP0, *PCONST0, and *PCOMP_P. */
8707
8708static int
8709merge_outer_ops (pop0, pconst0, op1, const1, mode, pcomp_p)
8710 enum rtx_code *pop0;
5f4f0e22 8711 HOST_WIDE_INT *pconst0;
230d793d 8712 enum rtx_code op1;
5f4f0e22 8713 HOST_WIDE_INT const1;
230d793d
RS
8714 enum machine_mode mode;
8715 int *pcomp_p;
8716{
8717 enum rtx_code op0 = *pop0;
5f4f0e22 8718 HOST_WIDE_INT const0 = *pconst0;
230d793d
RS
8719
8720 const0 &= GET_MODE_MASK (mode);
8721 const1 &= GET_MODE_MASK (mode);
8722
8723 /* If OP0 is an AND, clear unimportant bits in CONST1. */
8724 if (op0 == AND)
8725 const1 &= const0;
8726
8727 /* If OP0 or OP1 is NIL, this is easy. Similarly if they are the same or
8728 if OP0 is SET. */
8729
8730 if (op1 == NIL || op0 == SET)
8731 return 1;
8732
8733 else if (op0 == NIL)
8734 op0 = op1, const0 = const1;
8735
8736 else if (op0 == op1)
8737 {
8738 switch (op0)
8739 {
8740 case AND:
8741 const0 &= const1;
8742 break;
8743 case IOR:
8744 const0 |= const1;
8745 break;
8746 case XOR:
8747 const0 ^= const1;
8748 break;
8749 case PLUS:
8750 const0 += const1;
8751 break;
8752 case NEG:
8753 op0 = NIL;
8754 break;
e9a25f70
JL
8755 default:
8756 break;
230d793d
RS
8757 }
8758 }
8759
8760 /* Otherwise, if either is a PLUS or NEG, we can't do anything. */
8761 else if (op0 == PLUS || op1 == PLUS || op0 == NEG || op1 == NEG)
8762 return 0;
8763
8764 /* If the two constants aren't the same, we can't do anything. The
8765 remaining six cases can all be done. */
8766 else if (const0 != const1)
8767 return 0;
8768
8769 else
8770 switch (op0)
8771 {
8772 case IOR:
8773 if (op1 == AND)
8774 /* (a & b) | b == b */
8775 op0 = SET;
8776 else /* op1 == XOR */
8777 /* (a ^ b) | b == a | b */
b729186a 8778 {;}
230d793d
RS
8779 break;
8780
8781 case XOR:
8782 if (op1 == AND)
8783 /* (a & b) ^ b == (~a) & b */
8784 op0 = AND, *pcomp_p = 1;
8785 else /* op1 == IOR */
8786 /* (a | b) ^ b == a & ~b */
663522cb 8787 op0 = AND, *pconst0 = ~const0;
230d793d
RS
8788 break;
8789
8790 case AND:
8791 if (op1 == IOR)
8792 /* (a | b) & b == b */
8793 op0 = SET;
8794 else /* op1 == XOR */
8795 /* (a ^ b) & b) == (~a) & b */
8796 *pcomp_p = 1;
8797 break;
e9a25f70
JL
8798 default:
8799 break;
230d793d
RS
8800 }
8801
8802 /* Check for NO-OP cases. */
8803 const0 &= GET_MODE_MASK (mode);
8804 if (const0 == 0
8805 && (op0 == IOR || op0 == XOR || op0 == PLUS))
8806 op0 = NIL;
8807 else if (const0 == 0 && op0 == AND)
8808 op0 = SET;
e51712db
KG
8809 else if ((unsigned HOST_WIDE_INT) const0 == GET_MODE_MASK (mode)
8810 && op0 == AND)
230d793d
RS
8811 op0 = NIL;
8812
7e4ce834
RH
8813 /* ??? Slightly redundant with the above mask, but not entirely.
8814 Moving this above means we'd have to sign-extend the mode mask
8815 for the final test. */
8816 const0 = trunc_int_for_mode (const0, mode);
9fa6d012 8817
230d793d
RS
8818 *pop0 = op0;
8819 *pconst0 = const0;
8820
8821 return 1;
8822}
8823\f
8824/* Simplify a shift of VAROP by COUNT bits. CODE says what kind of shift.
8825 The result of the shift is RESULT_MODE. X, if non-zero, is an expression
8826 that we started with.
8827
8828 The shift is normally computed in the widest mode we find in VAROP, as
8829 long as it isn't a different number of words than RESULT_MODE. Exceptions
8830 are ASHIFTRT and ROTATE, which are always done in their original mode, */
8831
8832static rtx
770ae6cc 8833simplify_shift_const (x, code, result_mode, varop, input_count)
230d793d
RS
8834 rtx x;
8835 enum rtx_code code;
8836 enum machine_mode result_mode;
8837 rtx varop;
770ae6cc 8838 int input_count;
230d793d
RS
8839{
8840 enum rtx_code orig_code = code;
770ae6cc
RK
8841 int orig_count = input_count;
8842 unsigned int count;
8843 int signed_count;
230d793d
RS
8844 enum machine_mode mode = result_mode;
8845 enum machine_mode shift_mode, tmode;
770ae6cc 8846 unsigned int mode_words
230d793d
RS
8847 = (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
8848 /* We form (outer_op (code varop count) (outer_const)). */
8849 enum rtx_code outer_op = NIL;
c4e861e8 8850 HOST_WIDE_INT outer_const = 0;
230d793d
RS
8851 rtx const_rtx;
8852 int complement_p = 0;
8853 rtx new;
8854
8855 /* If we were given an invalid count, don't do anything except exactly
8856 what was requested. */
8857
770ae6cc 8858 if (input_count < 0 || input_count > (int) GET_MODE_BITSIZE (mode))
230d793d
RS
8859 {
8860 if (x)
8861 return x;
8862
770ae6cc 8863 return gen_rtx_fmt_ee (code, mode, varop, GEN_INT (input_count));
230d793d
RS
8864 }
8865
770ae6cc
RK
8866 count = input_count;
8867
853d8828
RH
8868 /* Make sure and truncate the "natural" shift on the way in. We don't
8869 want to do this inside the loop as it makes it more difficult to
8870 combine shifts. */
8871#ifdef SHIFT_COUNT_TRUNCATED
8872 if (SHIFT_COUNT_TRUNCATED)
8873 count %= GET_MODE_BITSIZE (mode);
8874#endif
8875
230d793d
RS
8876 /* Unless one of the branches of the `if' in this loop does a `continue',
8877 we will `break' the loop after the `if'. */
8878
8879 while (count != 0)
8880 {
8881 /* If we have an operand of (clobber (const_int 0)), just return that
8882 value. */
8883 if (GET_CODE (varop) == CLOBBER)
8884 return varop;
8885
8886 /* If we discovered we had to complement VAROP, leave. Making a NOT
8887 here would cause an infinite loop. */
8888 if (complement_p)
8889 break;
8890
abc95ed3 8891 /* Convert ROTATERT to ROTATE. */
230d793d
RS
8892 if (code == ROTATERT)
8893 code = ROTATE, count = GET_MODE_BITSIZE (result_mode) - count;
8894
230d793d 8895 /* We need to determine what mode we will do the shift in. If the
f6789c77
RK
8896 shift is a right shift or a ROTATE, we must always do it in the mode
8897 it was originally done in. Otherwise, we can do it in MODE, the
0f41302f 8898 widest mode encountered. */
f6789c77
RK
8899 shift_mode
8900 = (code == ASHIFTRT || code == LSHIFTRT || code == ROTATE
8901 ? result_mode : mode);
230d793d
RS
8902
8903 /* Handle cases where the count is greater than the size of the mode
853d8828
RH
8904 minus 1. For ASHIFT, use the size minus one as the count (this can
8905 occur when simplifying (lshiftrt (ashiftrt ..))). For rotates,
8906 take the count modulo the size. For other shifts, the result is
8907 zero.
230d793d
RS
8908
8909 Since these shifts are being produced by the compiler by combining
8910 multiple operations, each of which are defined, we know what the
8911 result is supposed to be. */
663522cb 8912
230d793d
RS
8913 if (count > GET_MODE_BITSIZE (shift_mode) - 1)
8914 {
8915 if (code == ASHIFTRT)
8916 count = GET_MODE_BITSIZE (shift_mode) - 1;
8917 else if (code == ROTATE || code == ROTATERT)
8918 count %= GET_MODE_BITSIZE (shift_mode);
8919 else
8920 {
8921 /* We can't simply return zero because there may be an
8922 outer op. */
8923 varop = const0_rtx;
8924 count = 0;
8925 break;
8926 }
8927 }
8928
312def2e
RK
8929 /* An arithmetic right shift of a quantity known to be -1 or 0
8930 is a no-op. */
8931 if (code == ASHIFTRT
8932 && (num_sign_bit_copies (varop, shift_mode)
8933 == GET_MODE_BITSIZE (shift_mode)))
d0ab8cd3 8934 {
312def2e
RK
8935 count = 0;
8936 break;
8937 }
d0ab8cd3 8938
312def2e
RK
8939 /* If we are doing an arithmetic right shift and discarding all but
8940 the sign bit copies, this is equivalent to doing a shift by the
8941 bitsize minus one. Convert it into that shift because it will often
8942 allow other simplifications. */
500c518b 8943
312def2e
RK
8944 if (code == ASHIFTRT
8945 && (count + num_sign_bit_copies (varop, shift_mode)
8946 >= GET_MODE_BITSIZE (shift_mode)))
8947 count = GET_MODE_BITSIZE (shift_mode) - 1;
500c518b 8948
230d793d
RS
8949 /* We simplify the tests below and elsewhere by converting
8950 ASHIFTRT to LSHIFTRT if we know the sign bit is clear.
8951 `make_compound_operation' will convert it to a ASHIFTRT for
8952 those machines (such as Vax) that don't have a LSHIFTRT. */
5f4f0e22 8953 if (GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT
230d793d 8954 && code == ASHIFTRT
951553af 8955 && ((nonzero_bits (varop, shift_mode)
5f4f0e22
CH
8956 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (shift_mode) - 1)))
8957 == 0))
230d793d
RS
8958 code = LSHIFTRT;
8959
8960 switch (GET_CODE (varop))
8961 {
8962 case SIGN_EXTEND:
8963 case ZERO_EXTEND:
8964 case SIGN_EXTRACT:
8965 case ZERO_EXTRACT:
8966 new = expand_compound_operation (varop);
8967 if (new != varop)
8968 {
8969 varop = new;
8970 continue;
8971 }
8972 break;
8973
8974 case MEM:
8975 /* If we have (xshiftrt (mem ...) C) and C is MODE_WIDTH
8976 minus the width of a smaller mode, we can do this with a
8977 SIGN_EXTEND or ZERO_EXTEND from the narrower memory location. */
8978 if ((code == ASHIFTRT || code == LSHIFTRT)
8979 && ! mode_dependent_address_p (XEXP (varop, 0))
8980 && ! MEM_VOLATILE_P (varop)
8981 && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
8982 MODE_INT, 1)) != BLKmode)
8983 {
f1ec5147
RK
8984 new = adjust_address_nv (varop, tmode,
8985 BYTES_BIG_ENDIAN ? 0
8986 : count / BITS_PER_UNIT);
bf49b139 8987
f1c6ba8b
RK
8988 varop = gen_rtx_fmt_e (code == ASHIFTRT ? SIGN_EXTEND
8989 : ZERO_EXTEND, mode, new);
230d793d
RS
8990 count = 0;
8991 continue;
8992 }
8993 break;
8994
8995 case USE:
8996 /* Similar to the case above, except that we can only do this if
8997 the resulting mode is the same as that of the underlying
8998 MEM and adjust the address depending on the *bits* endianness
8999 because of the way that bit-field extract insns are defined. */
9000 if ((code == ASHIFTRT || code == LSHIFTRT)
9001 && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
9002 MODE_INT, 1)) != BLKmode
9003 && tmode == GET_MODE (XEXP (varop, 0)))
9004 {
f76b9db2
ILT
9005 if (BITS_BIG_ENDIAN)
9006 new = XEXP (varop, 0);
9007 else
9008 {
9009 new = copy_rtx (XEXP (varop, 0));
663522cb 9010 SUBST (XEXP (new, 0),
f76b9db2
ILT
9011 plus_constant (XEXP (new, 0),
9012 count / BITS_PER_UNIT));
9013 }
230d793d 9014
f1c6ba8b
RK
9015 varop = gen_rtx_fmt_e (code == ASHIFTRT ? SIGN_EXTEND
9016 : ZERO_EXTEND, mode, new);
230d793d
RS
9017 count = 0;
9018 continue;
9019 }
9020 break;
9021
9022 case SUBREG:
9023 /* If VAROP is a SUBREG, strip it as long as the inner operand has
9024 the same number of words as what we've seen so far. Then store
9025 the widest mode in MODE. */
f9e67232
RS
9026 if (subreg_lowpart_p (varop)
9027 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
9028 > GET_MODE_SIZE (GET_MODE (varop)))
230d793d
RS
9029 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
9030 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
9031 == mode_words))
9032 {
9033 varop = SUBREG_REG (varop);
9034 if (GET_MODE_SIZE (GET_MODE (varop)) > GET_MODE_SIZE (mode))
9035 mode = GET_MODE (varop);
9036 continue;
9037 }
9038 break;
9039
9040 case MULT:
9041 /* Some machines use MULT instead of ASHIFT because MULT
9042 is cheaper. But it is still better on those machines to
9043 merge two shifts into one. */
9044 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
9045 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
9046 {
770ae6cc
RK
9047 varop
9048 = gen_binary (ASHIFT, GET_MODE (varop), XEXP (varop, 0),
9049 GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));
230d793d
RS
9050 continue;
9051 }
9052 break;
9053
9054 case UDIV:
9055 /* Similar, for when divides are cheaper. */
9056 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
9057 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
9058 {
770ae6cc
RK
9059 varop
9060 = gen_binary (LSHIFTRT, GET_MODE (varop), XEXP (varop, 0),
9061 GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));
230d793d
RS
9062 continue;
9063 }
9064 break;
9065
9066 case ASHIFTRT:
8f8d8d6e
AO
9067 /* If we are extracting just the sign bit of an arithmetic
9068 right shift, that shift is not needed. However, the sign
9069 bit of a wider mode may be different from what would be
9070 interpreted as the sign bit in a narrower mode, so, if
9071 the result is narrower, don't discard the shift. */
9072 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1
9073 && (GET_MODE_BITSIZE (result_mode)
9074 >= GET_MODE_BITSIZE (GET_MODE (varop))))
230d793d
RS
9075 {
9076 varop = XEXP (varop, 0);
9077 continue;
9078 }
9079
0f41302f 9080 /* ... fall through ... */
230d793d
RS
9081
9082 case LSHIFTRT:
9083 case ASHIFT:
230d793d
RS
9084 case ROTATE:
9085 /* Here we have two nested shifts. The result is usually the
9086 AND of a new shift with a mask. We compute the result below. */
9087 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
9088 && INTVAL (XEXP (varop, 1)) >= 0
9089 && INTVAL (XEXP (varop, 1)) < GET_MODE_BITSIZE (GET_MODE (varop))
5f4f0e22
CH
9090 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
9091 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
230d793d
RS
9092 {
9093 enum rtx_code first_code = GET_CODE (varop);
770ae6cc 9094 unsigned int first_count = INTVAL (XEXP (varop, 1));
5f4f0e22 9095 unsigned HOST_WIDE_INT mask;
230d793d 9096 rtx mask_rtx;
230d793d 9097
230d793d
RS
9098 /* We have one common special case. We can't do any merging if
9099 the inner code is an ASHIFTRT of a smaller mode. However, if
9100 we have (ashift:M1 (subreg:M1 (ashiftrt:M2 FOO C1) 0) C2)
9101 with C2 == GET_MODE_BITSIZE (M1) - GET_MODE_BITSIZE (M2),
9102 we can convert it to
9103 (ashiftrt:M1 (ashift:M1 (and:M1 (subreg:M1 FOO 0 C2) C3) C1).
9104 This simplifies certain SIGN_EXTEND operations. */
9105 if (code == ASHIFT && first_code == ASHIFTRT
9106 && (GET_MODE_BITSIZE (result_mode)
9107 - GET_MODE_BITSIZE (GET_MODE (varop))) == count)
9108 {
9109 /* C3 has the low-order C1 bits zero. */
663522cb 9110
5f4f0e22 9111 mask = (GET_MODE_MASK (mode)
663522cb 9112 & ~(((HOST_WIDE_INT) 1 << first_count) - 1));
230d793d 9113
5f4f0e22 9114 varop = simplify_and_const_int (NULL_RTX, result_mode,
230d793d 9115 XEXP (varop, 0), mask);
5f4f0e22 9116 varop = simplify_shift_const (NULL_RTX, ASHIFT, result_mode,
230d793d
RS
9117 varop, count);
9118 count = first_count;
9119 code = ASHIFTRT;
9120 continue;
9121 }
663522cb 9122
d0ab8cd3
RK
9123 /* If this was (ashiftrt (ashift foo C1) C2) and FOO has more
9124 than C1 high-order bits equal to the sign bit, we can convert
9125 this to either an ASHIFT or a ASHIFTRT depending on the
663522cb 9126 two counts.
230d793d
RS
9127
9128 We cannot do this if VAROP's mode is not SHIFT_MODE. */
9129
9130 if (code == ASHIFTRT && first_code == ASHIFT
9131 && GET_MODE (varop) == shift_mode
d0ab8cd3
RK
9132 && (num_sign_bit_copies (XEXP (varop, 0), shift_mode)
9133 > first_count))
230d793d 9134 {
d0ab8cd3 9135 varop = XEXP (varop, 0);
770ae6cc
RK
9136
9137 signed_count = count - first_count;
9138 if (signed_count < 0)
663522cb 9139 count = -signed_count, code = ASHIFT;
770ae6cc
RK
9140 else
9141 count = signed_count;
9142
d0ab8cd3 9143 continue;
230d793d
RS
9144 }
9145
9146 /* There are some cases we can't do. If CODE is ASHIFTRT,
9147 we can only do this if FIRST_CODE is also ASHIFTRT.
9148
9149 We can't do the case when CODE is ROTATE and FIRST_CODE is
9150 ASHIFTRT.
9151
9152 If the mode of this shift is not the mode of the outer shift,
bdaae9a0 9153 we can't do this if either shift is a right shift or ROTATE.
230d793d
RS
9154
9155 Finally, we can't do any of these if the mode is too wide
9156 unless the codes are the same.
9157
9158 Handle the case where the shift codes are the same
9159 first. */
9160
9161 if (code == first_code)
9162 {
9163 if (GET_MODE (varop) != result_mode
bdaae9a0
RK
9164 && (code == ASHIFTRT || code == LSHIFTRT
9165 || code == ROTATE))
230d793d
RS
9166 break;
9167
9168 count += first_count;
9169 varop = XEXP (varop, 0);
9170 continue;
9171 }
9172
9173 if (code == ASHIFTRT
9174 || (code == ROTATE && first_code == ASHIFTRT)
5f4f0e22 9175 || GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT
230d793d 9176 || (GET_MODE (varop) != result_mode
bdaae9a0
RK
9177 && (first_code == ASHIFTRT || first_code == LSHIFTRT
9178 || first_code == ROTATE
230d793d
RS
9179 || code == ROTATE)))
9180 break;
9181
9182 /* To compute the mask to apply after the shift, shift the
663522cb 9183 nonzero bits of the inner shift the same way the
230d793d
RS
9184 outer shift will. */
9185
951553af 9186 mask_rtx = GEN_INT (nonzero_bits (varop, GET_MODE (varop)));
230d793d
RS
9187
9188 mask_rtx
9189 = simplify_binary_operation (code, result_mode, mask_rtx,
5f4f0e22 9190 GEN_INT (count));
663522cb 9191
230d793d
RS
9192 /* Give up if we can't compute an outer operation to use. */
9193 if (mask_rtx == 0
9194 || GET_CODE (mask_rtx) != CONST_INT
9195 || ! merge_outer_ops (&outer_op, &outer_const, AND,
9196 INTVAL (mask_rtx),
9197 result_mode, &complement_p))
9198 break;
9199
9200 /* If the shifts are in the same direction, we add the
9201 counts. Otherwise, we subtract them. */
770ae6cc 9202 signed_count = count;
230d793d
RS
9203 if ((code == ASHIFTRT || code == LSHIFTRT)
9204 == (first_code == ASHIFTRT || first_code == LSHIFTRT))
770ae6cc 9205 signed_count += first_count;
230d793d 9206 else
770ae6cc 9207 signed_count -= first_count;
230d793d 9208
663522cb 9209 /* If COUNT is positive, the new shift is usually CODE,
230d793d
RS
9210 except for the two exceptions below, in which case it is
9211 FIRST_CODE. If the count is negative, FIRST_CODE should
9212 always be used */
770ae6cc 9213 if (signed_count > 0
230d793d
RS
9214 && ((first_code == ROTATE && code == ASHIFT)
9215 || (first_code == ASHIFTRT && code == LSHIFTRT)))
770ae6cc
RK
9216 code = first_code, count = signed_count;
9217 else if (signed_count < 0)
663522cb 9218 code = first_code, count = -signed_count;
770ae6cc
RK
9219 else
9220 count = signed_count;
230d793d
RS
9221
9222 varop = XEXP (varop, 0);
9223 continue;
9224 }
9225
9226 /* If we have (A << B << C) for any shift, we can convert this to
9227 (A << C << B). This wins if A is a constant. Only try this if
9228 B is not a constant. */
9229
9230 else if (GET_CODE (varop) == code
9231 && GET_CODE (XEXP (varop, 1)) != CONST_INT
9232 && 0 != (new
9233 = simplify_binary_operation (code, mode,
9234 XEXP (varop, 0),
5f4f0e22 9235 GEN_INT (count))))
230d793d 9236 {
f1c6ba8b 9237 varop = gen_rtx_fmt_ee (code, mode, new, XEXP (varop, 1));
230d793d
RS
9238 count = 0;
9239 continue;
9240 }
9241 break;
9242
9243 case NOT:
9244 /* Make this fit the case below. */
f1c6ba8b
RK
9245 varop = gen_rtx_XOR (mode, XEXP (varop, 0),
9246 GEN_INT (GET_MODE_MASK (mode)));
230d793d
RS
9247 continue;
9248
9249 case IOR:
9250 case AND:
9251 case XOR:
9252 /* If we have (xshiftrt (ior (plus X (const_int -1)) X) C)
9253 with C the size of VAROP - 1 and the shift is logical if
9254 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
9255 we have an (le X 0) operation. If we have an arithmetic shift
9256 and STORE_FLAG_VALUE is 1 or we have a logical shift with
9257 STORE_FLAG_VALUE of -1, we have a (neg (le X 0)) operation. */
9258
9259 if (GET_CODE (varop) == IOR && GET_CODE (XEXP (varop, 0)) == PLUS
9260 && XEXP (XEXP (varop, 0), 1) == constm1_rtx
9261 && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
9262 && (code == LSHIFTRT || code == ASHIFTRT)
9263 && count == GET_MODE_BITSIZE (GET_MODE (varop)) - 1
9264 && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
9265 {
9266 count = 0;
f1c6ba8b
RK
9267 varop = gen_rtx_LE (GET_MODE (varop), XEXP (varop, 1),
9268 const0_rtx);
230d793d
RS
9269
9270 if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
f1c6ba8b 9271 varop = gen_rtx_NEG (GET_MODE (varop), varop);
230d793d
RS
9272
9273 continue;
9274 }
9275
9276 /* If we have (shift (logical)), move the logical to the outside
9277 to allow it to possibly combine with another logical and the
9278 shift to combine with another shift. This also canonicalizes to
9279 what a ZERO_EXTRACT looks like. Also, some machines have
9280 (and (shift)) insns. */
9281
9282 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
9283 && (new = simplify_binary_operation (code, result_mode,
9284 XEXP (varop, 1),
5f4f0e22 9285 GEN_INT (count))) != 0
663522cb 9286 && GET_CODE (new) == CONST_INT
230d793d
RS
9287 && merge_outer_ops (&outer_op, &outer_const, GET_CODE (varop),
9288 INTVAL (new), result_mode, &complement_p))
9289 {
9290 varop = XEXP (varop, 0);
9291 continue;
9292 }
9293
9294 /* If we can't do that, try to simplify the shift in each arm of the
9295 logical expression, make a new logical expression, and apply
9296 the inverse distributive law. */
9297 {
00d4ca1c 9298 rtx lhs = simplify_shift_const (NULL_RTX, code, shift_mode,
230d793d 9299 XEXP (varop, 0), count);
00d4ca1c 9300 rtx rhs = simplify_shift_const (NULL_RTX, code, shift_mode,
230d793d
RS
9301 XEXP (varop, 1), count);
9302
21a64bf1 9303 varop = gen_binary (GET_CODE (varop), shift_mode, lhs, rhs);
230d793d
RS
9304 varop = apply_distributive_law (varop);
9305
9306 count = 0;
9307 }
9308 break;
9309
9310 case EQ:
45620ed4 9311 /* convert (lshiftrt (eq FOO 0) C) to (xor FOO 1) if STORE_FLAG_VALUE
230d793d 9312 says that the sign bit can be tested, FOO has mode MODE, C is
45620ed4
RK
9313 GET_MODE_BITSIZE (MODE) - 1, and FOO has only its low-order bit
9314 that may be nonzero. */
9315 if (code == LSHIFTRT
230d793d
RS
9316 && XEXP (varop, 1) == const0_rtx
9317 && GET_MODE (XEXP (varop, 0)) == result_mode
9318 && count == GET_MODE_BITSIZE (result_mode) - 1
5f4f0e22 9319 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
230d793d 9320 && ((STORE_FLAG_VALUE
663522cb 9321 & ((HOST_WIDE_INT) 1
770ae6cc 9322 < (GET_MODE_BITSIZE (result_mode) - 1))))
951553af 9323 && nonzero_bits (XEXP (varop, 0), result_mode) == 1
5f4f0e22
CH
9324 && merge_outer_ops (&outer_op, &outer_const, XOR,
9325 (HOST_WIDE_INT) 1, result_mode,
9326 &complement_p))
230d793d
RS
9327 {
9328 varop = XEXP (varop, 0);
9329 count = 0;
9330 continue;
9331 }
9332 break;
9333
9334 case NEG:
d0ab8cd3
RK
9335 /* (lshiftrt (neg A) C) where A is either 0 or 1 and C is one less
9336 than the number of bits in the mode is equivalent to A. */
9337 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1
951553af 9338 && nonzero_bits (XEXP (varop, 0), result_mode) == 1)
230d793d 9339 {
d0ab8cd3 9340 varop = XEXP (varop, 0);
230d793d
RS
9341 count = 0;
9342 continue;
9343 }
9344
9345 /* NEG commutes with ASHIFT since it is multiplication. Move the
9346 NEG outside to allow shifts to combine. */
9347 if (code == ASHIFT
5f4f0e22
CH
9348 && merge_outer_ops (&outer_op, &outer_const, NEG,
9349 (HOST_WIDE_INT) 0, result_mode,
9350 &complement_p))
230d793d
RS
9351 {
9352 varop = XEXP (varop, 0);
9353 continue;
9354 }
9355 break;
9356
9357 case PLUS:
d0ab8cd3
RK
9358 /* (lshiftrt (plus A -1) C) where A is either 0 or 1 and C
9359 is one less than the number of bits in the mode is
9360 equivalent to (xor A 1). */
230d793d
RS
9361 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1
9362 && XEXP (varop, 1) == constm1_rtx
951553af 9363 && nonzero_bits (XEXP (varop, 0), result_mode) == 1
5f4f0e22
CH
9364 && merge_outer_ops (&outer_op, &outer_const, XOR,
9365 (HOST_WIDE_INT) 1, result_mode,
9366 &complement_p))
230d793d
RS
9367 {
9368 count = 0;
9369 varop = XEXP (varop, 0);
9370 continue;
9371 }
9372
3f508eca 9373 /* If we have (xshiftrt (plus FOO BAR) C), and the only bits
951553af 9374 that might be nonzero in BAR are those being shifted out and those
3f508eca
RK
9375 bits are known zero in FOO, we can replace the PLUS with FOO.
9376 Similarly in the other operand order. This code occurs when
9377 we are computing the size of a variable-size array. */
9378
9379 if ((code == ASHIFTRT || code == LSHIFTRT)
5f4f0e22 9380 && count < HOST_BITS_PER_WIDE_INT
951553af
RK
9381 && nonzero_bits (XEXP (varop, 1), result_mode) >> count == 0
9382 && (nonzero_bits (XEXP (varop, 1), result_mode)
9383 & nonzero_bits (XEXP (varop, 0), result_mode)) == 0)
3f508eca
RK
9384 {
9385 varop = XEXP (varop, 0);
9386 continue;
9387 }
9388 else if ((code == ASHIFTRT || code == LSHIFTRT)
5f4f0e22 9389 && count < HOST_BITS_PER_WIDE_INT
ac49a949 9390 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
951553af 9391 && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
3f508eca 9392 >> count)
951553af
RK
9393 && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
9394 & nonzero_bits (XEXP (varop, 1),
3f508eca
RK
9395 result_mode)))
9396 {
9397 varop = XEXP (varop, 1);
9398 continue;
9399 }
9400
230d793d
RS
9401 /* (ashift (plus foo C) N) is (plus (ashift foo N) C'). */
9402 if (code == ASHIFT
9403 && GET_CODE (XEXP (varop, 1)) == CONST_INT
9404 && (new = simplify_binary_operation (ASHIFT, result_mode,
9405 XEXP (varop, 1),
5f4f0e22 9406 GEN_INT (count))) != 0
770ae6cc 9407 && GET_CODE (new) == CONST_INT
230d793d
RS
9408 && merge_outer_ops (&outer_op, &outer_const, PLUS,
9409 INTVAL (new), result_mode, &complement_p))
9410 {
9411 varop = XEXP (varop, 0);
9412 continue;
9413 }
9414 break;
9415
9416 case MINUS:
9417 /* If we have (xshiftrt (minus (ashiftrt X C)) X) C)
9418 with C the size of VAROP - 1 and the shift is logical if
9419 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
9420 we have a (gt X 0) operation. If the shift is arithmetic with
9421 STORE_FLAG_VALUE of 1 or logical with STORE_FLAG_VALUE == -1,
9422 we have a (neg (gt X 0)) operation. */
9423
0802d516
RK
9424 if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
9425 && GET_CODE (XEXP (varop, 0)) == ASHIFTRT
230d793d 9426 && count == GET_MODE_BITSIZE (GET_MODE (varop)) - 1
230d793d
RS
9427 && (code == LSHIFTRT || code == ASHIFTRT)
9428 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
9429 && INTVAL (XEXP (XEXP (varop, 0), 1)) == count
9430 && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
9431 {
9432 count = 0;
f1c6ba8b
RK
9433 varop = gen_rtx_GT (GET_MODE (varop), XEXP (varop, 1),
9434 const0_rtx);
230d793d
RS
9435
9436 if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
f1c6ba8b 9437 varop = gen_rtx_NEG (GET_MODE (varop), varop);
230d793d
RS
9438
9439 continue;
9440 }
9441 break;
6e0ef100
JC
9442
9443 case TRUNCATE:
9444 /* Change (lshiftrt (truncate (lshiftrt))) to (truncate (lshiftrt))
9445 if the truncate does not affect the value. */
9446 if (code == LSHIFTRT
9447 && GET_CODE (XEXP (varop, 0)) == LSHIFTRT
9448 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
9449 && (INTVAL (XEXP (XEXP (varop, 0), 1))
b577a8ff
JL
9450 >= (GET_MODE_BITSIZE (GET_MODE (XEXP (varop, 0)))
9451 - GET_MODE_BITSIZE (GET_MODE (varop)))))
6e0ef100
JC
9452 {
9453 rtx varop_inner = XEXP (varop, 0);
9454
770ae6cc 9455 varop_inner
f1c6ba8b
RK
9456 = gen_rtx_LSHIFTRT (GET_MODE (varop_inner),
9457 XEXP (varop_inner, 0),
9458 GEN_INT
9459 (count + INTVAL (XEXP (varop_inner, 1))));
9460 varop = gen_rtx_TRUNCATE (GET_MODE (varop), varop_inner);
6e0ef100
JC
9461 count = 0;
9462 continue;
9463 }
9464 break;
663522cb 9465
e9a25f70
JL
9466 default:
9467 break;
230d793d
RS
9468 }
9469
9470 break;
9471 }
9472
9473 /* We need to determine what mode to do the shift in. If the shift is
f6789c77
RK
9474 a right shift or ROTATE, we must always do it in the mode it was
9475 originally done in. Otherwise, we can do it in MODE, the widest mode
9476 encountered. The code we care about is that of the shift that will
9477 actually be done, not the shift that was originally requested. */
9478 shift_mode
9479 = (code == ASHIFTRT || code == LSHIFTRT || code == ROTATE
9480 ? result_mode : mode);
230d793d
RS
9481
9482 /* We have now finished analyzing the shift. The result should be
9483 a shift of type CODE with SHIFT_MODE shifting VAROP COUNT places. If
9484 OUTER_OP is non-NIL, it is an operation that needs to be applied
9485 to the result of the shift. OUTER_CONST is the relevant constant,
9486 but we must turn off all bits turned off in the shift.
9487
9488 If we were passed a value for X, see if we can use any pieces of
9489 it. If not, make new rtx. */
9490
9491 if (x && GET_RTX_CLASS (GET_CODE (x)) == '2'
9492 && GET_CODE (XEXP (x, 1)) == CONST_INT
9493 && INTVAL (XEXP (x, 1)) == count)
9494 const_rtx = XEXP (x, 1);
9495 else
5f4f0e22 9496 const_rtx = GEN_INT (count);
230d793d
RS
9497
9498 if (x && GET_CODE (XEXP (x, 0)) == SUBREG
9499 && GET_MODE (XEXP (x, 0)) == shift_mode
9500 && SUBREG_REG (XEXP (x, 0)) == varop)
9501 varop = XEXP (x, 0);
9502 else if (GET_MODE (varop) != shift_mode)
9503 varop = gen_lowpart_for_combine (shift_mode, varop);
9504
0f41302f 9505 /* If we can't make the SUBREG, try to return what we were given. */
230d793d
RS
9506 if (GET_CODE (varop) == CLOBBER)
9507 return x ? x : varop;
9508
9509 new = simplify_binary_operation (code, shift_mode, varop, const_rtx);
9510 if (new != 0)
9511 x = new;
9512 else
9513 {
9514 if (x == 0 || GET_CODE (x) != code || GET_MODE (x) != shift_mode)
f1c6ba8b 9515 x = gen_rtx_fmt_ee (code, shift_mode, varop, const_rtx);
230d793d
RS
9516
9517 SUBST (XEXP (x, 0), varop);
9518 SUBST (XEXP (x, 1), const_rtx);
9519 }
9520
224eeff2
RK
9521 /* If we have an outer operation and we just made a shift, it is
9522 possible that we could have simplified the shift were it not
9523 for the outer operation. So try to do the simplification
9524 recursively. */
9525
9526 if (outer_op != NIL && GET_CODE (x) == code
9527 && GET_CODE (XEXP (x, 1)) == CONST_INT)
9528 x = simplify_shift_const (x, code, shift_mode, XEXP (x, 0),
9529 INTVAL (XEXP (x, 1)));
9530
230d793d
RS
9531 /* If we were doing a LSHIFTRT in a wider mode than it was originally,
9532 turn off all the bits that the shift would have turned off. */
9533 if (orig_code == LSHIFTRT && result_mode != shift_mode)
5f4f0e22 9534 x = simplify_and_const_int (NULL_RTX, shift_mode, x,
230d793d 9535 GET_MODE_MASK (result_mode) >> orig_count);
663522cb 9536
230d793d
RS
9537 /* Do the remainder of the processing in RESULT_MODE. */
9538 x = gen_lowpart_for_combine (result_mode, x);
9539
9540 /* If COMPLEMENT_P is set, we have to complement X before doing the outer
9541 operation. */
9542 if (complement_p)
f1c6ba8b 9543 x =simplify_gen_unary (NOT, result_mode, x, result_mode);
230d793d
RS
9544
9545 if (outer_op != NIL)
9546 {
5f4f0e22 9547 if (GET_MODE_BITSIZE (result_mode) < HOST_BITS_PER_WIDE_INT)
7e4ce834 9548 outer_const = trunc_int_for_mode (outer_const, result_mode);
230d793d
RS
9549
9550 if (outer_op == AND)
5f4f0e22 9551 x = simplify_and_const_int (NULL_RTX, result_mode, x, outer_const);
230d793d
RS
9552 else if (outer_op == SET)
9553 /* This means that we have determined that the result is
9554 equivalent to a constant. This should be rare. */
5f4f0e22 9555 x = GEN_INT (outer_const);
230d793d 9556 else if (GET_RTX_CLASS (outer_op) == '1')
f1c6ba8b 9557 x = simplify_gen_unary (outer_op, result_mode, x, result_mode);
230d793d 9558 else
5f4f0e22 9559 x = gen_binary (outer_op, result_mode, x, GEN_INT (outer_const));
230d793d
RS
9560 }
9561
9562 return x;
663522cb 9563}
230d793d
RS
9564\f
9565/* Like recog, but we receive the address of a pointer to a new pattern.
9566 We try to match the rtx that the pointer points to.
9567 If that fails, we may try to modify or replace the pattern,
9568 storing the replacement into the same pointer object.
9569
9570 Modifications include deletion or addition of CLOBBERs.
9571
9572 PNOTES is a pointer to a location where any REG_UNUSED notes added for
9573 the CLOBBERs are placed.
9574
9575 The value is the final insn code from the pattern ultimately matched,
9576 or -1. */
9577
9578static int
8e2f6e35 9579recog_for_combine (pnewpat, insn, pnotes)
230d793d
RS
9580 rtx *pnewpat;
9581 rtx insn;
9582 rtx *pnotes;
9583{
9584 register rtx pat = *pnewpat;
9585 int insn_code_number;
9586 int num_clobbers_to_add = 0;
9587 int i;
9588 rtx notes = 0;
c1194d74 9589 rtx old_notes;
230d793d 9590
974f4146
RK
9591 /* If PAT is a PARALLEL, check to see if it contains the CLOBBER
9592 we use to indicate that something didn't match. If we find such a
9593 thing, force rejection. */
d96023cf 9594 if (GET_CODE (pat) == PARALLEL)
974f4146 9595 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
d96023cf
RK
9596 if (GET_CODE (XVECEXP (pat, 0, i)) == CLOBBER
9597 && XEXP (XVECEXP (pat, 0, i), 0) == const0_rtx)
974f4146
RK
9598 return -1;
9599
c1194d74
JW
9600 /* Remove the old notes prior to trying to recognize the new pattern. */
9601 old_notes = REG_NOTES (insn);
9602 REG_NOTES (insn) = 0;
9603
230d793d
RS
9604 /* Is the result of combination a valid instruction? */
9605 insn_code_number = recog (pat, insn, &num_clobbers_to_add);
9606
9607 /* If it isn't, there is the possibility that we previously had an insn
9608 that clobbered some register as a side effect, but the combined
9609 insn doesn't need to do that. So try once more without the clobbers
9610 unless this represents an ASM insn. */
9611
9612 if (insn_code_number < 0 && ! check_asm_operands (pat)
9613 && GET_CODE (pat) == PARALLEL)
9614 {
9615 int pos;
9616
9617 for (pos = 0, i = 0; i < XVECLEN (pat, 0); i++)
9618 if (GET_CODE (XVECEXP (pat, 0, i)) != CLOBBER)
9619 {
9620 if (i != pos)
9621 SUBST (XVECEXP (pat, 0, pos), XVECEXP (pat, 0, i));
9622 pos++;
9623 }
9624
9625 SUBST_INT (XVECLEN (pat, 0), pos);
9626
9627 if (pos == 1)
9628 pat = XVECEXP (pat, 0, 0);
9629
9630 insn_code_number = recog (pat, insn, &num_clobbers_to_add);
9631 }
9632
c1194d74
JW
9633 REG_NOTES (insn) = old_notes;
9634
230d793d
RS
9635 /* If we had any clobbers to add, make a new pattern than contains
9636 them. Then check to make sure that all of them are dead. */
9637 if (num_clobbers_to_add)
9638 {
38a448ca 9639 rtx newpat = gen_rtx_PARALLEL (VOIDmode,
bf103ec2
R
9640 rtvec_alloc (GET_CODE (pat) == PARALLEL
9641 ? (XVECLEN (pat, 0)
9642 + num_clobbers_to_add)
9643 : num_clobbers_to_add + 1));
230d793d
RS
9644
9645 if (GET_CODE (pat) == PARALLEL)
9646 for (i = 0; i < XVECLEN (pat, 0); i++)
9647 XVECEXP (newpat, 0, i) = XVECEXP (pat, 0, i);
9648 else
9649 XVECEXP (newpat, 0, 0) = pat;
9650
9651 add_clobbers (newpat, insn_code_number);
9652
9653 for (i = XVECLEN (newpat, 0) - num_clobbers_to_add;
9654 i < XVECLEN (newpat, 0); i++)
9655 {
9656 if (GET_CODE (XEXP (XVECEXP (newpat, 0, i), 0)) == REG
9657 && ! reg_dead_at_p (XEXP (XVECEXP (newpat, 0, i), 0), insn))
9658 return -1;
38a448ca
RH
9659 notes = gen_rtx_EXPR_LIST (REG_UNUSED,
9660 XEXP (XVECEXP (newpat, 0, i), 0), notes);
230d793d
RS
9661 }
9662 pat = newpat;
9663 }
9664
9665 *pnewpat = pat;
9666 *pnotes = notes;
9667
9668 return insn_code_number;
9669}
9670\f
9671/* Like gen_lowpart but for use by combine. In combine it is not possible
9672 to create any new pseudoregs. However, it is safe to create
9673 invalid memory addresses, because combine will try to recognize
9674 them and all they will do is make the combine attempt fail.
9675
9676 If for some reason this cannot do its job, an rtx
9677 (clobber (const_int 0)) is returned.
9678 An insn containing that will not be recognized. */
9679
9680#undef gen_lowpart
9681
9682static rtx
9683gen_lowpart_for_combine (mode, x)
9684 enum machine_mode mode;
9685 register rtx x;
9686{
9687 rtx result;
9688
9689 if (GET_MODE (x) == mode)
9690 return x;
9691
eae957a8
RK
9692 /* We can only support MODE being wider than a word if X is a
9693 constant integer or has a mode the same size. */
9694
9695 if (GET_MODE_SIZE (mode) > UNITS_PER_WORD
9696 && ! ((GET_MODE (x) == VOIDmode
9697 && (GET_CODE (x) == CONST_INT
9698 || GET_CODE (x) == CONST_DOUBLE))
9699 || GET_MODE_SIZE (GET_MODE (x)) == GET_MODE_SIZE (mode)))
38a448ca 9700 return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
230d793d
RS
9701
9702 /* X might be a paradoxical (subreg (mem)). In that case, gen_lowpart
9703 won't know what to do. So we will strip off the SUBREG here and
9704 process normally. */
9705 if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
9706 {
9707 x = SUBREG_REG (x);
9708 if (GET_MODE (x) == mode)
9709 return x;
9710 }
9711
9712 result = gen_lowpart_common (mode, x);
02188693 9713#ifdef CLASS_CANNOT_CHANGE_MODE
64bf47a2
RK
9714 if (result != 0
9715 && GET_CODE (result) == SUBREG
9716 && GET_CODE (SUBREG_REG (result)) == REG
9717 && REGNO (SUBREG_REG (result)) >= FIRST_PSEUDO_REGISTER
02188693
RH
9718 && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (result),
9719 GET_MODE (SUBREG_REG (result))))
9720 REG_CHANGES_MODE (REGNO (SUBREG_REG (result))) = 1;
9721#endif
64bf47a2 9722
230d793d
RS
9723 if (result)
9724 return result;
9725
9726 if (GET_CODE (x) == MEM)
9727 {
9728 register int offset = 0;
230d793d
RS
9729
9730 /* Refuse to work on a volatile memory ref or one with a mode-dependent
9731 address. */
9732 if (MEM_VOLATILE_P (x) || mode_dependent_address_p (XEXP (x, 0)))
38a448ca 9733 return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
230d793d
RS
9734
9735 /* If we want to refer to something bigger than the original memref,
9736 generate a perverse subreg instead. That will force a reload
9737 of the original memref X. */
9738 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode))
38a448ca 9739 return gen_rtx_SUBREG (mode, x, 0);
230d793d 9740
f76b9db2
ILT
9741 if (WORDS_BIG_ENDIAN)
9742 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
9743 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
c5c76735 9744
f76b9db2
ILT
9745 if (BYTES_BIG_ENDIAN)
9746 {
9747 /* Adjust the address so that the address-after-the-data is
9748 unchanged. */
9749 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
9750 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
9751 }
f1ec5147
RK
9752
9753 return adjust_address_nv (x, mode, offset);
230d793d
RS
9754 }
9755
9756 /* If X is a comparison operator, rewrite it in a new mode. This
9757 probably won't match, but may allow further simplifications. */
9758 else if (GET_RTX_CLASS (GET_CODE (x)) == '<')
f1c6ba8b 9759 return gen_rtx_fmt_ee (GET_CODE (x), mode, XEXP (x, 0), XEXP (x, 1));
230d793d
RS
9760
9761 /* If we couldn't simplify X any other way, just enclose it in a
9762 SUBREG. Normally, this SUBREG won't match, but some patterns may
a7c99304 9763 include an explicit SUBREG or we may simplify it further in combine. */
230d793d 9764 else
dfbe1b2f 9765 {
ddef6bc7 9766 int offset = 0;
e0e08ac2 9767 rtx res;
dfbe1b2f 9768
e0e08ac2
JH
9769 offset = subreg_lowpart_offset (mode, GET_MODE (x));
9770 res = simplify_gen_subreg (mode, x, GET_MODE (x), offset);
9771 if (res)
9772 return res;
9773 return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
dfbe1b2f 9774 }
230d793d
RS
9775}
9776\f
230d793d
RS
9777/* These routines make binary and unary operations by first seeing if they
9778 fold; if not, a new expression is allocated. */
9779
9780static rtx
9781gen_binary (code, mode, op0, op1)
9782 enum rtx_code code;
9783 enum machine_mode mode;
9784 rtx op0, op1;
9785{
9786 rtx result;
1a26b032
RK
9787 rtx tem;
9788
9789 if (GET_RTX_CLASS (code) == 'c'
8c9864f3 9790 && swap_commutative_operands_p (op0, op1))
1a26b032 9791 tem = op0, op0 = op1, op1 = tem;
230d793d 9792
663522cb 9793 if (GET_RTX_CLASS (code) == '<')
230d793d
RS
9794 {
9795 enum machine_mode op_mode = GET_MODE (op0);
9210df58 9796
663522cb 9797 /* Strip the COMPARE from (REL_OP (compare X Y) 0) to get
0f41302f 9798 just (REL_OP X Y). */
9210df58
RK
9799 if (GET_CODE (op0) == COMPARE && op1 == const0_rtx)
9800 {
9801 op1 = XEXP (op0, 1);
9802 op0 = XEXP (op0, 0);
9803 op_mode = GET_MODE (op0);
9804 }
9805
230d793d
RS
9806 if (op_mode == VOIDmode)
9807 op_mode = GET_MODE (op1);
9808 result = simplify_relational_operation (code, op_mode, op0, op1);
9809 }
9810 else
9811 result = simplify_binary_operation (code, mode, op0, op1);
9812
9813 if (result)
9814 return result;
9815
9816 /* Put complex operands first and constants second. */
9817 if (GET_RTX_CLASS (code) == 'c'
e5c56fd9 9818 && swap_commutative_operands_p (op0, op1))
f1c6ba8b 9819 return gen_rtx_fmt_ee (code, mode, op1, op0);
230d793d 9820
e5e809f4
JL
9821 /* If we are turning off bits already known off in OP0, we need not do
9822 an AND. */
9823 else if (code == AND && GET_CODE (op1) == CONST_INT
9824 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
663522cb 9825 && (nonzero_bits (op0, mode) & ~INTVAL (op1)) == 0)
e5e809f4
JL
9826 return op0;
9827
f1c6ba8b 9828 return gen_rtx_fmt_ee (code, mode, op0, op1);
230d793d
RS
9829}
9830\f
9831/* Simplify a comparison between *POP0 and *POP1 where CODE is the
9832 comparison code that will be tested.
9833
9834 The result is a possibly different comparison code to use. *POP0 and
9835 *POP1 may be updated.
9836
9837 It is possible that we might detect that a comparison is either always
9838 true or always false. However, we do not perform general constant
5089e22e 9839 folding in combine, so this knowledge isn't useful. Such tautologies
230d793d
RS
9840 should have been detected earlier. Hence we ignore all such cases. */
9841
9842static enum rtx_code
9843simplify_comparison (code, pop0, pop1)
9844 enum rtx_code code;
9845 rtx *pop0;
9846 rtx *pop1;
9847{
9848 rtx op0 = *pop0;
9849 rtx op1 = *pop1;
9850 rtx tem, tem1;
9851 int i;
9852 enum machine_mode mode, tmode;
9853
9854 /* Try a few ways of applying the same transformation to both operands. */
9855 while (1)
9856 {
3a19aabc
RK
9857#ifndef WORD_REGISTER_OPERATIONS
9858 /* The test below this one won't handle SIGN_EXTENDs on these machines,
9859 so check specially. */
9860 if (code != GTU && code != GEU && code != LTU && code != LEU
9861 && GET_CODE (op0) == ASHIFTRT && GET_CODE (op1) == ASHIFTRT
9862 && GET_CODE (XEXP (op0, 0)) == ASHIFT
9863 && GET_CODE (XEXP (op1, 0)) == ASHIFT
9864 && GET_CODE (XEXP (XEXP (op0, 0), 0)) == SUBREG
9865 && GET_CODE (XEXP (XEXP (op1, 0), 0)) == SUBREG
9866 && (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0)))
ad25ba17 9867 == GET_MODE (SUBREG_REG (XEXP (XEXP (op1, 0), 0))))
3a19aabc
RK
9868 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9869 && GET_CODE (XEXP (op1, 1)) == CONST_INT
9870 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
9871 && GET_CODE (XEXP (XEXP (op1, 0), 1)) == CONST_INT
9872 && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (op1, 1))
9873 && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (XEXP (op0, 0), 1))
9874 && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (XEXP (op1, 0), 1))
9875 && (INTVAL (XEXP (op0, 1))
9876 == (GET_MODE_BITSIZE (GET_MODE (op0))
9877 - (GET_MODE_BITSIZE
9878 (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0))))))))
9879 {
9880 op0 = SUBREG_REG (XEXP (XEXP (op0, 0), 0));
9881 op1 = SUBREG_REG (XEXP (XEXP (op1, 0), 0));
9882 }
9883#endif
9884
230d793d
RS
9885 /* If both operands are the same constant shift, see if we can ignore the
9886 shift. We can if the shift is a rotate or if the bits shifted out of
951553af 9887 this shift are known to be zero for both inputs and if the type of
230d793d 9888 comparison is compatible with the shift. */
67232b23
RK
9889 if (GET_CODE (op0) == GET_CODE (op1)
9890 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
9891 && ((GET_CODE (op0) == ROTATE && (code == NE || code == EQ))
45620ed4 9892 || ((GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFT)
67232b23
RK
9893 && (code != GT && code != LT && code != GE && code != LE))
9894 || (GET_CODE (op0) == ASHIFTRT
9895 && (code != GTU && code != LTU
99dc5306 9896 && code != GEU && code != LEU)))
67232b23
RK
9897 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9898 && INTVAL (XEXP (op0, 1)) >= 0
9899 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
9900 && XEXP (op0, 1) == XEXP (op1, 1))
230d793d
RS
9901 {
9902 enum machine_mode mode = GET_MODE (op0);
5f4f0e22 9903 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
230d793d
RS
9904 int shift_count = INTVAL (XEXP (op0, 1));
9905
9906 if (GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFTRT)
9907 mask &= (mask >> shift_count) << shift_count;
45620ed4 9908 else if (GET_CODE (op0) == ASHIFT)
230d793d
RS
9909 mask = (mask & (mask << shift_count)) >> shift_count;
9910
663522cb
KH
9911 if ((nonzero_bits (XEXP (op0, 0), mode) & ~mask) == 0
9912 && (nonzero_bits (XEXP (op1, 0), mode) & ~mask) == 0)
230d793d
RS
9913 op0 = XEXP (op0, 0), op1 = XEXP (op1, 0);
9914 else
9915 break;
9916 }
9917
9918 /* If both operands are AND's of a paradoxical SUBREG by constant, the
9919 SUBREGs are of the same mode, and, in both cases, the AND would
9920 be redundant if the comparison was done in the narrower mode,
9921 do the comparison in the narrower mode (e.g., we are AND'ing with 1
951553af
RK
9922 and the operand's possibly nonzero bits are 0xffffff01; in that case
9923 if we only care about QImode, we don't need the AND). This case
9924 occurs if the output mode of an scc insn is not SImode and
7e4dc511
RK
9925 STORE_FLAG_VALUE == 1 (e.g., the 386).
9926
9927 Similarly, check for a case where the AND's are ZERO_EXTEND
9928 operations from some narrower mode even though a SUBREG is not
9929 present. */
230d793d 9930
663522cb
KH
9931 else if (GET_CODE (op0) == AND && GET_CODE (op1) == AND
9932 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9933 && GET_CODE (XEXP (op1, 1)) == CONST_INT)
230d793d 9934 {
7e4dc511
RK
9935 rtx inner_op0 = XEXP (op0, 0);
9936 rtx inner_op1 = XEXP (op1, 0);
9937 HOST_WIDE_INT c0 = INTVAL (XEXP (op0, 1));
9938 HOST_WIDE_INT c1 = INTVAL (XEXP (op1, 1));
9939 int changed = 0;
663522cb 9940
7e4dc511
RK
9941 if (GET_CODE (inner_op0) == SUBREG && GET_CODE (inner_op1) == SUBREG
9942 && (GET_MODE_SIZE (GET_MODE (inner_op0))
9943 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (inner_op0))))
9944 && (GET_MODE (SUBREG_REG (inner_op0))
9945 == GET_MODE (SUBREG_REG (inner_op1)))
729a2bc6 9946 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (inner_op0)))
7e4dc511 9947 <= HOST_BITS_PER_WIDE_INT)
01c82bbb 9948 && (0 == ((~c0) & nonzero_bits (SUBREG_REG (inner_op0),
729a2bc6 9949 GET_MODE (SUBREG_REG (inner_op0)))))
01c82bbb
RK
9950 && (0 == ((~c1) & nonzero_bits (SUBREG_REG (inner_op1),
9951 GET_MODE (SUBREG_REG (inner_op1))))))
7e4dc511
RK
9952 {
9953 op0 = SUBREG_REG (inner_op0);
9954 op1 = SUBREG_REG (inner_op1);
9955
9956 /* The resulting comparison is always unsigned since we masked
0f41302f 9957 off the original sign bit. */
7e4dc511
RK
9958 code = unsigned_condition (code);
9959
9960 changed = 1;
9961 }
230d793d 9962
7e4dc511
RK
9963 else if (c0 == c1)
9964 for (tmode = GET_CLASS_NARROWEST_MODE
9965 (GET_MODE_CLASS (GET_MODE (op0)));
9966 tmode != GET_MODE (op0); tmode = GET_MODE_WIDER_MODE (tmode))
e51712db 9967 if ((unsigned HOST_WIDE_INT) c0 == GET_MODE_MASK (tmode))
7e4dc511
RK
9968 {
9969 op0 = gen_lowpart_for_combine (tmode, inner_op0);
9970 op1 = gen_lowpart_for_combine (tmode, inner_op1);
66415c8b 9971 code = unsigned_condition (code);
7e4dc511
RK
9972 changed = 1;
9973 break;
9974 }
9975
9976 if (! changed)
9977 break;
230d793d 9978 }
3a19aabc 9979
ad25ba17
RK
9980 /* If both operands are NOT, we can strip off the outer operation
9981 and adjust the comparison code for swapped operands; similarly for
9982 NEG, except that this must be an equality comparison. */
9983 else if ((GET_CODE (op0) == NOT && GET_CODE (op1) == NOT)
9984 || (GET_CODE (op0) == NEG && GET_CODE (op1) == NEG
9985 && (code == EQ || code == NE)))
9986 op0 = XEXP (op0, 0), op1 = XEXP (op1, 0), code = swap_condition (code);
3a19aabc 9987
230d793d
RS
9988 else
9989 break;
9990 }
663522cb 9991
230d793d 9992 /* If the first operand is a constant, swap the operands and adjust the
3aceff0d
RK
9993 comparison code appropriately, but don't do this if the second operand
9994 is already a constant integer. */
8c9864f3 9995 if (swap_commutative_operands_p (op0, op1))
230d793d
RS
9996 {
9997 tem = op0, op0 = op1, op1 = tem;
9998 code = swap_condition (code);
9999 }
10000
10001 /* We now enter a loop during which we will try to simplify the comparison.
10002 For the most part, we only are concerned with comparisons with zero,
10003 but some things may really be comparisons with zero but not start
10004 out looking that way. */
10005
10006 while (GET_CODE (op1) == CONST_INT)
10007 {
10008 enum machine_mode mode = GET_MODE (op0);
770ae6cc 10009 unsigned int mode_width = GET_MODE_BITSIZE (mode);
5f4f0e22 10010 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
230d793d
RS
10011 int equality_comparison_p;
10012 int sign_bit_comparison_p;
10013 int unsigned_comparison_p;
5f4f0e22 10014 HOST_WIDE_INT const_op;
230d793d
RS
10015
10016 /* We only want to handle integral modes. This catches VOIDmode,
10017 CCmode, and the floating-point modes. An exception is that we
10018 can handle VOIDmode if OP0 is a COMPARE or a comparison
10019 operation. */
10020
10021 if (GET_MODE_CLASS (mode) != MODE_INT
10022 && ! (mode == VOIDmode
10023 && (GET_CODE (op0) == COMPARE
10024 || GET_RTX_CLASS (GET_CODE (op0)) == '<')))
10025 break;
10026
10027 /* Get the constant we are comparing against and turn off all bits
10028 not on in our mode. */
3c094e22 10029 const_op = trunc_int_for_mode (INTVAL (op1), mode);
b4fbaca7 10030 op1 = GEN_INT (const_op);
230d793d
RS
10031
10032 /* If we are comparing against a constant power of two and the value
951553af 10033 being compared can only have that single bit nonzero (e.g., it was
230d793d
RS
10034 `and'ed with that bit), we can replace this with a comparison
10035 with zero. */
10036 if (const_op
10037 && (code == EQ || code == NE || code == GE || code == GEU
10038 || code == LT || code == LTU)
5f4f0e22 10039 && mode_width <= HOST_BITS_PER_WIDE_INT
230d793d 10040 && exact_log2 (const_op) >= 0
e51712db 10041 && nonzero_bits (op0, mode) == (unsigned HOST_WIDE_INT) const_op)
230d793d
RS
10042 {
10043 code = (code == EQ || code == GE || code == GEU ? NE : EQ);
10044 op1 = const0_rtx, const_op = 0;
10045 }
10046
d0ab8cd3
RK
10047 /* Similarly, if we are comparing a value known to be either -1 or
10048 0 with -1, change it to the opposite comparison against zero. */
10049
10050 if (const_op == -1
10051 && (code == EQ || code == NE || code == GT || code == LE
10052 || code == GEU || code == LTU)
10053 && num_sign_bit_copies (op0, mode) == mode_width)
10054 {
10055 code = (code == EQ || code == LE || code == GEU ? NE : EQ);
10056 op1 = const0_rtx, const_op = 0;
10057 }
10058
230d793d 10059 /* Do some canonicalizations based on the comparison code. We prefer
663522cb 10060 comparisons against zero and then prefer equality comparisons.
4803a34a 10061 If we can reduce the size of a constant, we will do that too. */
230d793d
RS
10062
10063 switch (code)
10064 {
10065 case LT:
4803a34a
RK
10066 /* < C is equivalent to <= (C - 1) */
10067 if (const_op > 0)
230d793d 10068 {
4803a34a 10069 const_op -= 1;
5f4f0e22 10070 op1 = GEN_INT (const_op);
230d793d
RS
10071 code = LE;
10072 /* ... fall through to LE case below. */
10073 }
10074 else
10075 break;
10076
10077 case LE:
4803a34a
RK
10078 /* <= C is equivalent to < (C + 1); we do this for C < 0 */
10079 if (const_op < 0)
10080 {
10081 const_op += 1;
5f4f0e22 10082 op1 = GEN_INT (const_op);
4803a34a
RK
10083 code = LT;
10084 }
230d793d
RS
10085
10086 /* If we are doing a <= 0 comparison on a value known to have
10087 a zero sign bit, we can replace this with == 0. */
10088 else if (const_op == 0
5f4f0e22 10089 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 10090 && (nonzero_bits (op0, mode)
5f4f0e22 10091 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
230d793d
RS
10092 code = EQ;
10093 break;
10094
10095 case GE:
0f41302f 10096 /* >= C is equivalent to > (C - 1). */
4803a34a 10097 if (const_op > 0)
230d793d 10098 {
4803a34a 10099 const_op -= 1;
5f4f0e22 10100 op1 = GEN_INT (const_op);
230d793d
RS
10101 code = GT;
10102 /* ... fall through to GT below. */
10103 }
10104 else
10105 break;
10106
10107 case GT:
663522cb 10108 /* > C is equivalent to >= (C + 1); we do this for C < 0. */
4803a34a
RK
10109 if (const_op < 0)
10110 {
10111 const_op += 1;
5f4f0e22 10112 op1 = GEN_INT (const_op);
4803a34a
RK
10113 code = GE;
10114 }
230d793d
RS
10115
10116 /* If we are doing a > 0 comparison on a value known to have
10117 a zero sign bit, we can replace this with != 0. */
10118 else if (const_op == 0
5f4f0e22 10119 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 10120 && (nonzero_bits (op0, mode)
5f4f0e22 10121 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
230d793d
RS
10122 code = NE;
10123 break;
10124
230d793d 10125 case LTU:
4803a34a
RK
10126 /* < C is equivalent to <= (C - 1). */
10127 if (const_op > 0)
10128 {
10129 const_op -= 1;
5f4f0e22 10130 op1 = GEN_INT (const_op);
4803a34a 10131 code = LEU;
0f41302f 10132 /* ... fall through ... */
4803a34a 10133 }
d0ab8cd3
RK
10134
10135 /* (unsigned) < 0x80000000 is equivalent to >= 0. */
f77aada2
JW
10136 else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
10137 && (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1)))
d0ab8cd3
RK
10138 {
10139 const_op = 0, op1 = const0_rtx;
10140 code = GE;
10141 break;
10142 }
4803a34a
RK
10143 else
10144 break;
230d793d
RS
10145
10146 case LEU:
10147 /* unsigned <= 0 is equivalent to == 0 */
10148 if (const_op == 0)
10149 code = EQ;
d0ab8cd3 10150
0f41302f 10151 /* (unsigned) <= 0x7fffffff is equivalent to >= 0. */
f77aada2
JW
10152 else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
10153 && (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1))
d0ab8cd3
RK
10154 {
10155 const_op = 0, op1 = const0_rtx;
10156 code = GE;
10157 }
230d793d
RS
10158 break;
10159
4803a34a
RK
10160 case GEU:
10161 /* >= C is equivalent to < (C - 1). */
10162 if (const_op > 1)
10163 {
10164 const_op -= 1;
5f4f0e22 10165 op1 = GEN_INT (const_op);
4803a34a 10166 code = GTU;
0f41302f 10167 /* ... fall through ... */
4803a34a 10168 }
d0ab8cd3
RK
10169
10170 /* (unsigned) >= 0x80000000 is equivalent to < 0. */
f77aada2
JW
10171 else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
10172 && (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1)))
d0ab8cd3
RK
10173 {
10174 const_op = 0, op1 = const0_rtx;
10175 code = LT;
8b2e69e1 10176 break;
d0ab8cd3 10177 }
4803a34a
RK
10178 else
10179 break;
10180
230d793d
RS
10181 case GTU:
10182 /* unsigned > 0 is equivalent to != 0 */
10183 if (const_op == 0)
10184 code = NE;
d0ab8cd3
RK
10185
10186 /* (unsigned) > 0x7fffffff is equivalent to < 0. */
f77aada2
JW
10187 else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
10188 && (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1))
d0ab8cd3
RK
10189 {
10190 const_op = 0, op1 = const0_rtx;
10191 code = LT;
10192 }
230d793d 10193 break;
e9a25f70
JL
10194
10195 default:
10196 break;
230d793d
RS
10197 }
10198
10199 /* Compute some predicates to simplify code below. */
10200
10201 equality_comparison_p = (code == EQ || code == NE);
10202 sign_bit_comparison_p = ((code == LT || code == GE) && const_op == 0);
10203 unsigned_comparison_p = (code == LTU || code == LEU || code == GTU
d5010e66 10204 || code == GEU);
230d793d 10205
6139ff20
RK
10206 /* If this is a sign bit comparison and we can do arithmetic in
10207 MODE, say that we will only be needing the sign bit of OP0. */
10208 if (sign_bit_comparison_p
10209 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
10210 op0 = force_to_mode (op0, mode,
10211 ((HOST_WIDE_INT) 1
10212 << (GET_MODE_BITSIZE (mode) - 1)),
e3d616e3 10213 NULL_RTX, 0);
6139ff20 10214
230d793d
RS
10215 /* Now try cases based on the opcode of OP0. If none of the cases
10216 does a "continue", we exit this loop immediately after the
10217 switch. */
10218
10219 switch (GET_CODE (op0))
10220 {
10221 case ZERO_EXTRACT:
10222 /* If we are extracting a single bit from a variable position in
10223 a constant that has only a single bit set and are comparing it
663522cb 10224 with zero, we can convert this into an equality comparison
d7cd794f 10225 between the position and the location of the single bit. */
230d793d 10226
230d793d
RS
10227 if (GET_CODE (XEXP (op0, 0)) == CONST_INT
10228 && XEXP (op0, 1) == const1_rtx
10229 && equality_comparison_p && const_op == 0
d7cd794f 10230 && (i = exact_log2 (INTVAL (XEXP (op0, 0)))) >= 0)
230d793d 10231 {
f76b9db2 10232 if (BITS_BIG_ENDIAN)
0d8e55d8 10233 {
d7cd794f 10234#ifdef HAVE_extzv
a995e389 10235 mode = insn_data[(int) CODE_FOR_extzv].operand[1].mode;
0d8e55d8
JL
10236 if (mode == VOIDmode)
10237 mode = word_mode;
10238 i = (GET_MODE_BITSIZE (mode) - 1 - i);
d7cd794f 10239#else
663522cb 10240 i = BITS_PER_WORD - 1 - i;
230d793d 10241#endif
0d8e55d8 10242 }
230d793d
RS
10243
10244 op0 = XEXP (op0, 2);
5f4f0e22 10245 op1 = GEN_INT (i);
230d793d
RS
10246 const_op = i;
10247
10248 /* Result is nonzero iff shift count is equal to I. */
10249 code = reverse_condition (code);
10250 continue;
10251 }
230d793d 10252
0f41302f 10253 /* ... fall through ... */
230d793d
RS
10254
10255 case SIGN_EXTRACT:
10256 tem = expand_compound_operation (op0);
10257 if (tem != op0)
10258 {
10259 op0 = tem;
10260 continue;
10261 }
10262 break;
10263
10264 case NOT:
10265 /* If testing for equality, we can take the NOT of the constant. */
10266 if (equality_comparison_p
10267 && (tem = simplify_unary_operation (NOT, mode, op1, mode)) != 0)
10268 {
10269 op0 = XEXP (op0, 0);
10270 op1 = tem;
10271 continue;
10272 }
10273
10274 /* If just looking at the sign bit, reverse the sense of the
10275 comparison. */
10276 if (sign_bit_comparison_p)
10277 {
10278 op0 = XEXP (op0, 0);
10279 code = (code == GE ? LT : GE);
10280 continue;
10281 }
10282 break;
10283
10284 case NEG:
10285 /* If testing for equality, we can take the NEG of the constant. */
10286 if (equality_comparison_p
10287 && (tem = simplify_unary_operation (NEG, mode, op1, mode)) != 0)
10288 {
10289 op0 = XEXP (op0, 0);
10290 op1 = tem;
10291 continue;
10292 }
10293
10294 /* The remaining cases only apply to comparisons with zero. */
10295 if (const_op != 0)
10296 break;
10297
10298 /* When X is ABS or is known positive,
10299 (neg X) is < 0 if and only if X != 0. */
10300
10301 if (sign_bit_comparison_p
10302 && (GET_CODE (XEXP (op0, 0)) == ABS
5f4f0e22 10303 || (mode_width <= HOST_BITS_PER_WIDE_INT
951553af 10304 && (nonzero_bits (XEXP (op0, 0), mode)
5f4f0e22 10305 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)))
230d793d
RS
10306 {
10307 op0 = XEXP (op0, 0);
10308 code = (code == LT ? NE : EQ);
10309 continue;
10310 }
10311
3bed8141 10312 /* If we have NEG of something whose two high-order bits are the
0f41302f 10313 same, we know that "(-a) < 0" is equivalent to "a > 0". */
3bed8141 10314 if (num_sign_bit_copies (op0, mode) >= 2)
230d793d
RS
10315 {
10316 op0 = XEXP (op0, 0);
10317 code = swap_condition (code);
10318 continue;
10319 }
10320 break;
10321
10322 case ROTATE:
10323 /* If we are testing equality and our count is a constant, we
10324 can perform the inverse operation on our RHS. */
10325 if (equality_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
10326 && (tem = simplify_binary_operation (ROTATERT, mode,
10327 op1, XEXP (op0, 1))) != 0)
10328 {
10329 op0 = XEXP (op0, 0);
10330 op1 = tem;
10331 continue;
10332 }
10333
10334 /* If we are doing a < 0 or >= 0 comparison, it means we are testing
10335 a particular bit. Convert it to an AND of a constant of that
10336 bit. This will be converted into a ZERO_EXTRACT. */
10337 if (const_op == 0 && sign_bit_comparison_p
10338 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5f4f0e22 10339 && mode_width <= HOST_BITS_PER_WIDE_INT)
230d793d 10340 {
5f4f0e22
CH
10341 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
10342 ((HOST_WIDE_INT) 1
10343 << (mode_width - 1
10344 - INTVAL (XEXP (op0, 1)))));
230d793d
RS
10345 code = (code == LT ? NE : EQ);
10346 continue;
10347 }
10348
663522cb 10349 /* Fall through. */
230d793d
RS
10350
10351 case ABS:
10352 /* ABS is ignorable inside an equality comparison with zero. */
10353 if (const_op == 0 && equality_comparison_p)
10354 {
10355 op0 = XEXP (op0, 0);
10356 continue;
10357 }
10358 break;
230d793d
RS
10359
10360 case SIGN_EXTEND:
10361 /* Can simplify (compare (zero/sign_extend FOO) CONST)
663522cb 10362 to (compare FOO CONST) if CONST fits in FOO's mode and we
230d793d
RS
10363 are either testing inequality or have an unsigned comparison
10364 with ZERO_EXTEND or a signed comparison with SIGN_EXTEND. */
10365 if (! unsigned_comparison_p
10366 && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
5f4f0e22
CH
10367 <= HOST_BITS_PER_WIDE_INT)
10368 && ((unsigned HOST_WIDE_INT) const_op
e51712db 10369 < (((unsigned HOST_WIDE_INT) 1
5f4f0e22 10370 << (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0))) - 1)))))
230d793d
RS
10371 {
10372 op0 = XEXP (op0, 0);
10373 continue;
10374 }
10375 break;
10376
10377 case SUBREG:
a687e897 10378 /* Check for the case where we are comparing A - C1 with C2,
abc95ed3 10379 both constants are smaller than 1/2 the maximum positive
a687e897
RK
10380 value in MODE, and the comparison is equality or unsigned.
10381 In that case, if A is either zero-extended to MODE or has
10382 sufficient sign bits so that the high-order bit in MODE
10383 is a copy of the sign in the inner mode, we can prove that it is
10384 safe to do the operation in the wider mode. This simplifies
10385 many range checks. */
10386
10387 if (mode_width <= HOST_BITS_PER_WIDE_INT
10388 && subreg_lowpart_p (op0)
10389 && GET_CODE (SUBREG_REG (op0)) == PLUS
10390 && GET_CODE (XEXP (SUBREG_REG (op0), 1)) == CONST_INT
10391 && INTVAL (XEXP (SUBREG_REG (op0), 1)) < 0
663522cb
KH
10392 && (-INTVAL (XEXP (SUBREG_REG (op0), 1))
10393 < (HOST_WIDE_INT) (GET_MODE_MASK (mode) / 2))
adb7a1cb 10394 && (unsigned HOST_WIDE_INT) const_op < GET_MODE_MASK (mode) / 2
951553af
RK
10395 && (0 == (nonzero_bits (XEXP (SUBREG_REG (op0), 0),
10396 GET_MODE (SUBREG_REG (op0)))
663522cb 10397 & ~GET_MODE_MASK (mode))
a687e897
RK
10398 || (num_sign_bit_copies (XEXP (SUBREG_REG (op0), 0),
10399 GET_MODE (SUBREG_REG (op0)))
10400 > (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
10401 - GET_MODE_BITSIZE (mode)))))
10402 {
10403 op0 = SUBREG_REG (op0);
10404 continue;
10405 }
10406
fe0cf571
RK
10407 /* If the inner mode is narrower and we are extracting the low part,
10408 we can treat the SUBREG as if it were a ZERO_EXTEND. */
10409 if (subreg_lowpart_p (op0)
89f1c7f2
RS
10410 && GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) < mode_width)
10411 /* Fall through */ ;
10412 else
230d793d
RS
10413 break;
10414
0f41302f 10415 /* ... fall through ... */
230d793d
RS
10416
10417 case ZERO_EXTEND:
10418 if ((unsigned_comparison_p || equality_comparison_p)
10419 && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
5f4f0e22
CH
10420 <= HOST_BITS_PER_WIDE_INT)
10421 && ((unsigned HOST_WIDE_INT) const_op
230d793d
RS
10422 < GET_MODE_MASK (GET_MODE (XEXP (op0, 0)))))
10423 {
10424 op0 = XEXP (op0, 0);
10425 continue;
10426 }
10427 break;
10428
10429 case PLUS:
20fdd649 10430 /* (eq (plus X A) B) -> (eq X (minus B A)). We can only do
5089e22e 10431 this for equality comparisons due to pathological cases involving
230d793d 10432 overflows. */
20fdd649
RK
10433 if (equality_comparison_p
10434 && 0 != (tem = simplify_binary_operation (MINUS, mode,
10435 op1, XEXP (op0, 1))))
230d793d
RS
10436 {
10437 op0 = XEXP (op0, 0);
10438 op1 = tem;
10439 continue;
10440 }
10441
10442 /* (plus (abs X) (const_int -1)) is < 0 if and only if X == 0. */
10443 if (const_op == 0 && XEXP (op0, 1) == constm1_rtx
10444 && GET_CODE (XEXP (op0, 0)) == ABS && sign_bit_comparison_p)
10445 {
10446 op0 = XEXP (XEXP (op0, 0), 0);
10447 code = (code == LT ? EQ : NE);
10448 continue;
10449 }
10450 break;
10451
10452 case MINUS:
65945ec1
HPN
10453 /* We used to optimize signed comparisons against zero, but that
10454 was incorrect. Unsigned comparisons against zero (GTU, LEU)
10455 arrive here as equality comparisons, or (GEU, LTU) are
10456 optimized away. No need to special-case them. */
0bd4b461 10457
20fdd649
RK
10458 /* (eq (minus A B) C) -> (eq A (plus B C)) or
10459 (eq B (minus A C)), whichever simplifies. We can only do
10460 this for equality comparisons due to pathological cases involving
10461 overflows. */
10462 if (equality_comparison_p
10463 && 0 != (tem = simplify_binary_operation (PLUS, mode,
10464 XEXP (op0, 1), op1)))
10465 {
10466 op0 = XEXP (op0, 0);
10467 op1 = tem;
10468 continue;
10469 }
10470
10471 if (equality_comparison_p
10472 && 0 != (tem = simplify_binary_operation (MINUS, mode,
10473 XEXP (op0, 0), op1)))
10474 {
10475 op0 = XEXP (op0, 1);
10476 op1 = tem;
10477 continue;
10478 }
10479
230d793d
RS
10480 /* The sign bit of (minus (ashiftrt X C) X), where C is the number
10481 of bits in X minus 1, is one iff X > 0. */
10482 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == ASHIFTRT
10483 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
10484 && INTVAL (XEXP (XEXP (op0, 0), 1)) == mode_width - 1
10485 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
10486 {
10487 op0 = XEXP (op0, 1);
10488 code = (code == GE ? LE : GT);
10489 continue;
10490 }
10491 break;
10492
10493 case XOR:
10494 /* (eq (xor A B) C) -> (eq A (xor B C)). This is a simplification
10495 if C is zero or B is a constant. */
10496 if (equality_comparison_p
10497 && 0 != (tem = simplify_binary_operation (XOR, mode,
10498 XEXP (op0, 1), op1)))
10499 {
10500 op0 = XEXP (op0, 0);
10501 op1 = tem;
10502 continue;
10503 }
10504 break;
10505
10506 case EQ: case NE:
69bc0a1f
JH
10507 case UNEQ: case LTGT:
10508 case LT: case LTU: case UNLT: case LE: case LEU: case UNLE:
10509 case GT: case GTU: case UNGT: case GE: case GEU: case UNGE:
10510 case UNORDERED: case ORDERED:
230d793d
RS
10511 /* We can't do anything if OP0 is a condition code value, rather
10512 than an actual data value. */
10513 if (const_op != 0
10514#ifdef HAVE_cc0
10515 || XEXP (op0, 0) == cc0_rtx
10516#endif
10517 || GET_MODE_CLASS (GET_MODE (XEXP (op0, 0))) == MODE_CC)
10518 break;
10519
10520 /* Get the two operands being compared. */
10521 if (GET_CODE (XEXP (op0, 0)) == COMPARE)
10522 tem = XEXP (XEXP (op0, 0), 0), tem1 = XEXP (XEXP (op0, 0), 1);
10523 else
10524 tem = XEXP (op0, 0), tem1 = XEXP (op0, 1);
10525
10526 /* Check for the cases where we simply want the result of the
10527 earlier test or the opposite of that result. */
9a915772 10528 if (code == NE || code == EQ
5f4f0e22 10529 || (GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
3f508eca 10530 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
230d793d 10531 && (STORE_FLAG_VALUE
5f4f0e22
CH
10532 & (((HOST_WIDE_INT) 1
10533 << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
aa6683f7 10534 && (code == LT || code == GE)))
230d793d 10535 {
aa6683f7
GK
10536 enum rtx_code new_code;
10537 if (code == LT || code == NE)
10538 new_code = GET_CODE (op0);
10539 else
10540 new_code = combine_reversed_comparison_code (op0);
23190837 10541
aa6683f7 10542 if (new_code != UNKNOWN)
9a915772 10543 {
aa6683f7
GK
10544 code = new_code;
10545 op0 = tem;
10546 op1 = tem1;
9a915772
JH
10547 continue;
10548 }
230d793d
RS
10549 }
10550 break;
10551
10552 case IOR:
10553 /* The sign bit of (ior (plus X (const_int -1)) X) is non-zero
10554 iff X <= 0. */
10555 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == PLUS
10556 && XEXP (XEXP (op0, 0), 1) == constm1_rtx
10557 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
10558 {
10559 op0 = XEXP (op0, 1);
10560 code = (code == GE ? GT : LE);
10561 continue;
10562 }
10563 break;
10564
10565 case AND:
10566 /* Convert (and (xshift 1 X) Y) to (and (lshiftrt Y X) 1). This
10567 will be converted to a ZERO_EXTRACT later. */
10568 if (const_op == 0 && equality_comparison_p
45620ed4 10569 && GET_CODE (XEXP (op0, 0)) == ASHIFT
230d793d
RS
10570 && XEXP (XEXP (op0, 0), 0) == const1_rtx)
10571 {
10572 op0 = simplify_and_const_int
f1c6ba8b
RK
10573 (op0, mode, gen_rtx_LSHIFTRT (mode,
10574 XEXP (op0, 1),
10575 XEXP (XEXP (op0, 0), 1)),
5f4f0e22 10576 (HOST_WIDE_INT) 1);
230d793d
RS
10577 continue;
10578 }
10579
10580 /* If we are comparing (and (lshiftrt X C1) C2) for equality with
10581 zero and X is a comparison and C1 and C2 describe only bits set
10582 in STORE_FLAG_VALUE, we can compare with X. */
10583 if (const_op == 0 && equality_comparison_p
5f4f0e22 10584 && mode_width <= HOST_BITS_PER_WIDE_INT
230d793d
RS
10585 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10586 && GET_CODE (XEXP (op0, 0)) == LSHIFTRT
10587 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
10588 && INTVAL (XEXP (XEXP (op0, 0), 1)) >= 0
5f4f0e22 10589 && INTVAL (XEXP (XEXP (op0, 0), 1)) < HOST_BITS_PER_WIDE_INT)
230d793d
RS
10590 {
10591 mask = ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
10592 << INTVAL (XEXP (XEXP (op0, 0), 1)));
663522cb 10593 if ((~STORE_FLAG_VALUE & mask) == 0
230d793d
RS
10594 && (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (op0, 0), 0))) == '<'
10595 || ((tem = get_last_value (XEXP (XEXP (op0, 0), 0))) != 0
10596 && GET_RTX_CLASS (GET_CODE (tem)) == '<')))
10597 {
10598 op0 = XEXP (XEXP (op0, 0), 0);
10599 continue;
10600 }
10601 }
10602
10603 /* If we are doing an equality comparison of an AND of a bit equal
10604 to the sign bit, replace this with a LT or GE comparison of
10605 the underlying value. */
10606 if (equality_comparison_p
10607 && const_op == 0
10608 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5f4f0e22 10609 && mode_width <= HOST_BITS_PER_WIDE_INT
230d793d 10610 && ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
e51712db 10611 == (unsigned HOST_WIDE_INT) 1 << (mode_width - 1)))
230d793d
RS
10612 {
10613 op0 = XEXP (op0, 0);
10614 code = (code == EQ ? GE : LT);
10615 continue;
10616 }
10617
10618 /* If this AND operation is really a ZERO_EXTEND from a narrower
10619 mode, the constant fits within that mode, and this is either an
10620 equality or unsigned comparison, try to do this comparison in
10621 the narrower mode. */
10622 if ((equality_comparison_p || unsigned_comparison_p)
10623 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10624 && (i = exact_log2 ((INTVAL (XEXP (op0, 1))
10625 & GET_MODE_MASK (mode))
10626 + 1)) >= 0
10627 && const_op >> i == 0
10628 && (tmode = mode_for_size (i, MODE_INT, 1)) != BLKmode)
10629 {
10630 op0 = gen_lowpart_for_combine (tmode, XEXP (op0, 0));
10631 continue;
10632 }
e5e809f4
JL
10633
10634 /* If this is (and:M1 (subreg:M2 X 0) (const_int C1)) where C1 fits
10635 in both M1 and M2 and the SUBREG is either paradoxical or
10636 represents the low part, permute the SUBREG and the AND and
10637 try again. */
10638 if (GET_CODE (XEXP (op0, 0)) == SUBREG
c5c76735 10639 && (0
9ec36da5 10640#ifdef WORD_REGISTER_OPERATIONS
c5c76735
JL
10641 || ((mode_width
10642 > (GET_MODE_BITSIZE
10643 (GET_MODE (SUBREG_REG (XEXP (op0, 0))))))
10644 && mode_width <= BITS_PER_WORD)
9ec36da5 10645#endif
c5c76735
JL
10646 || ((mode_width
10647 <= (GET_MODE_BITSIZE
10648 (GET_MODE (SUBREG_REG (XEXP (op0, 0))))))
10649 && subreg_lowpart_p (XEXP (op0, 0))))
adc05e6c
JL
10650#ifndef WORD_REGISTER_OPERATIONS
10651 /* It is unsafe to commute the AND into the SUBREG if the SUBREG
10652 is paradoxical and WORD_REGISTER_OPERATIONS is not defined.
10653 As originally written the upper bits have a defined value
10654 due to the AND operation. However, if we commute the AND
10655 inside the SUBREG then they no longer have defined values
10656 and the meaning of the code has been changed. */
10657 && (GET_MODE_SIZE (GET_MODE (XEXP (op0, 0)))
10658 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (op0, 0)))))
10659#endif
e5e809f4
JL
10660 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10661 && mode_width <= HOST_BITS_PER_WIDE_INT
10662 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (XEXP (op0, 0))))
10663 <= HOST_BITS_PER_WIDE_INT)
663522cb
KH
10664 && (INTVAL (XEXP (op0, 1)) & ~mask) == 0
10665 && 0 == (~GET_MODE_MASK (GET_MODE (SUBREG_REG (XEXP (op0, 0))))
9ec36da5 10666 & INTVAL (XEXP (op0, 1)))
e51712db
KG
10667 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (op0, 1)) != mask
10668 && ((unsigned HOST_WIDE_INT) INTVAL (XEXP (op0, 1))
9ec36da5 10669 != GET_MODE_MASK (GET_MODE (SUBREG_REG (XEXP (op0, 0))))))
663522cb 10670
e5e809f4
JL
10671 {
10672 op0
10673 = gen_lowpart_for_combine
10674 (mode,
10675 gen_binary (AND, GET_MODE (SUBREG_REG (XEXP (op0, 0))),
10676 SUBREG_REG (XEXP (op0, 0)), XEXP (op0, 1)));
10677 continue;
10678 }
10679
9f8e169e
RH
10680 /* Convert (ne (and (lshiftrt (not X)) 1) 0) to
10681 (eq (and (lshiftrt X) 1) 0). */
10682 if (const_op == 0 && equality_comparison_p
10683 && XEXP (op0, 1) == const1_rtx
10684 && GET_CODE (XEXP (op0, 0)) == LSHIFTRT
10685 && GET_CODE (XEXP (XEXP (op0, 0), 0)) == NOT)
10686 {
10687 op0 = simplify_and_const_int
f1c6ba8b
RK
10688 (op0, mode,
10689 gen_rtx_LSHIFTRT (mode, XEXP (XEXP (XEXP (op0, 0), 0), 0),
10690 XEXP (XEXP (op0, 0), 1)),
9f8e169e
RH
10691 (HOST_WIDE_INT) 1);
10692 code = (code == NE ? EQ : NE);
10693 continue;
10694 }
230d793d
RS
10695 break;
10696
10697 case ASHIFT:
45620ed4 10698 /* If we have (compare (ashift FOO N) (const_int C)) and
230d793d 10699 the high order N bits of FOO (N+1 if an inequality comparison)
951553af 10700 are known to be zero, we can do this by comparing FOO with C
230d793d
RS
10701 shifted right N bits so long as the low-order N bits of C are
10702 zero. */
10703 if (GET_CODE (XEXP (op0, 1)) == CONST_INT
10704 && INTVAL (XEXP (op0, 1)) >= 0
10705 && ((INTVAL (XEXP (op0, 1)) + ! equality_comparison_p)
5f4f0e22
CH
10706 < HOST_BITS_PER_WIDE_INT)
10707 && ((const_op
34785d05 10708 & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0)
5f4f0e22 10709 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 10710 && (nonzero_bits (XEXP (op0, 0), mode)
663522cb
KH
10711 & ~(mask >> (INTVAL (XEXP (op0, 1))
10712 + ! equality_comparison_p))) == 0)
230d793d 10713 {
7ce787fe
NC
10714 /* We must perform a logical shift, not an arithmetic one,
10715 as we want the top N bits of C to be zero. */
aaaec114 10716 unsigned HOST_WIDE_INT temp = const_op & GET_MODE_MASK (mode);
663522cb 10717
7ce787fe 10718 temp >>= INTVAL (XEXP (op0, 1));
aaaec114 10719 op1 = GEN_INT (trunc_int_for_mode (temp, mode));
230d793d
RS
10720 op0 = XEXP (op0, 0);
10721 continue;
10722 }
10723
dfbe1b2f 10724 /* If we are doing a sign bit comparison, it means we are testing
230d793d 10725 a particular bit. Convert it to the appropriate AND. */
dfbe1b2f 10726 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
5f4f0e22 10727 && mode_width <= HOST_BITS_PER_WIDE_INT)
230d793d 10728 {
5f4f0e22
CH
10729 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
10730 ((HOST_WIDE_INT) 1
10731 << (mode_width - 1
10732 - INTVAL (XEXP (op0, 1)))));
230d793d
RS
10733 code = (code == LT ? NE : EQ);
10734 continue;
10735 }
dfbe1b2f
RK
10736
10737 /* If this an equality comparison with zero and we are shifting
10738 the low bit to the sign bit, we can convert this to an AND of the
10739 low-order bit. */
10740 if (const_op == 0 && equality_comparison_p
10741 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10742 && INTVAL (XEXP (op0, 1)) == mode_width - 1)
10743 {
5f4f0e22
CH
10744 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
10745 (HOST_WIDE_INT) 1);
dfbe1b2f
RK
10746 continue;
10747 }
230d793d
RS
10748 break;
10749
10750 case ASHIFTRT:
d0ab8cd3
RK
10751 /* If this is an equality comparison with zero, we can do this
10752 as a logical shift, which might be much simpler. */
10753 if (equality_comparison_p && const_op == 0
10754 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
10755 {
10756 op0 = simplify_shift_const (NULL_RTX, LSHIFTRT, mode,
10757 XEXP (op0, 0),
10758 INTVAL (XEXP (op0, 1)));
10759 continue;
10760 }
10761
230d793d
RS
10762 /* If OP0 is a sign extension and CODE is not an unsigned comparison,
10763 do the comparison in a narrower mode. */
10764 if (! unsigned_comparison_p
10765 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10766 && GET_CODE (XEXP (op0, 0)) == ASHIFT
10767 && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1)
10768 && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)),
22331794 10769 MODE_INT, 1)) != BLKmode
5f4f0e22 10770 && ((unsigned HOST_WIDE_INT) const_op <= GET_MODE_MASK (tmode)
663522cb 10771 || ((unsigned HOST_WIDE_INT) -const_op
5f4f0e22 10772 <= GET_MODE_MASK (tmode))))
230d793d
RS
10773 {
10774 op0 = gen_lowpart_for_combine (tmode, XEXP (XEXP (op0, 0), 0));
10775 continue;
10776 }
10777
14a774a9
RK
10778 /* Likewise if OP0 is a PLUS of a sign extension with a
10779 constant, which is usually represented with the PLUS
10780 between the shifts. */
10781 if (! unsigned_comparison_p
10782 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10783 && GET_CODE (XEXP (op0, 0)) == PLUS
10784 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
10785 && GET_CODE (XEXP (XEXP (op0, 0), 0)) == ASHIFT
10786 && XEXP (op0, 1) == XEXP (XEXP (XEXP (op0, 0), 0), 1)
10787 && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)),
10788 MODE_INT, 1)) != BLKmode
10789 && ((unsigned HOST_WIDE_INT) const_op <= GET_MODE_MASK (tmode)
663522cb 10790 || ((unsigned HOST_WIDE_INT) -const_op
14a774a9
RK
10791 <= GET_MODE_MASK (tmode))))
10792 {
10793 rtx inner = XEXP (XEXP (XEXP (op0, 0), 0), 0);
10794 rtx add_const = XEXP (XEXP (op0, 0), 1);
10795 rtx new_const = gen_binary (ASHIFTRT, GET_MODE (op0), add_const,
10796 XEXP (op0, 1));
10797
10798 op0 = gen_binary (PLUS, tmode,
10799 gen_lowpart_for_combine (tmode, inner),
10800 new_const);
10801 continue;
10802 }
10803
0f41302f 10804 /* ... fall through ... */
230d793d
RS
10805 case LSHIFTRT:
10806 /* If we have (compare (xshiftrt FOO N) (const_int C)) and
951553af 10807 the low order N bits of FOO are known to be zero, we can do this
230d793d
RS
10808 by comparing FOO with C shifted left N bits so long as no
10809 overflow occurs. */
10810 if (GET_CODE (XEXP (op0, 1)) == CONST_INT
10811 && INTVAL (XEXP (op0, 1)) >= 0
5f4f0e22
CH
10812 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
10813 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 10814 && (nonzero_bits (XEXP (op0, 0), mode)
5f4f0e22 10815 & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0
230d793d
RS
10816 && (const_op == 0
10817 || (floor_log2 (const_op) + INTVAL (XEXP (op0, 1))
10818 < mode_width)))
10819 {
10820 const_op <<= INTVAL (XEXP (op0, 1));
5f4f0e22 10821 op1 = GEN_INT (const_op);
230d793d
RS
10822 op0 = XEXP (op0, 0);
10823 continue;
10824 }
10825
10826 /* If we are using this shift to extract just the sign bit, we
10827 can replace this with an LT or GE comparison. */
10828 if (const_op == 0
10829 && (equality_comparison_p || sign_bit_comparison_p)
10830 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10831 && INTVAL (XEXP (op0, 1)) == mode_width - 1)
10832 {
10833 op0 = XEXP (op0, 0);
10834 code = (code == NE || code == GT ? LT : GE);
10835 continue;
10836 }
10837 break;
663522cb 10838
e9a25f70
JL
10839 default:
10840 break;
230d793d
RS
10841 }
10842
10843 break;
10844 }
10845
10846 /* Now make any compound operations involved in this comparison. Then,
76d31c63 10847 check for an outmost SUBREG on OP0 that is not doing anything or is
230d793d
RS
10848 paradoxical. The latter case can only occur when it is known that the
10849 "extra" bits will be zero. Therefore, it is safe to remove the SUBREG.
10850 We can never remove a SUBREG for a non-equality comparison because the
10851 sign bit is in a different place in the underlying object. */
10852
10853 op0 = make_compound_operation (op0, op1 == const0_rtx ? COMPARE : SET);
10854 op1 = make_compound_operation (op1, SET);
10855
10856 if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
10857 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
10858 && (code == NE || code == EQ)
10859 && ((GET_MODE_SIZE (GET_MODE (op0))
10860 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0))))))
10861 {
10862 op0 = SUBREG_REG (op0);
10863 op1 = gen_lowpart_for_combine (GET_MODE (op0), op1);
10864 }
10865
10866 else if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
10867 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
10868 && (code == NE || code == EQ)
ac49a949
RS
10869 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
10870 <= HOST_BITS_PER_WIDE_INT)
951553af 10871 && (nonzero_bits (SUBREG_REG (op0), GET_MODE (SUBREG_REG (op0)))
663522cb 10872 & ~GET_MODE_MASK (GET_MODE (op0))) == 0
230d793d
RS
10873 && (tem = gen_lowpart_for_combine (GET_MODE (SUBREG_REG (op0)),
10874 op1),
951553af 10875 (nonzero_bits (tem, GET_MODE (SUBREG_REG (op0)))
663522cb 10876 & ~GET_MODE_MASK (GET_MODE (op0))) == 0))
230d793d
RS
10877 op0 = SUBREG_REG (op0), op1 = tem;
10878
10879 /* We now do the opposite procedure: Some machines don't have compare
10880 insns in all modes. If OP0's mode is an integer mode smaller than a
10881 word and we can't do a compare in that mode, see if there is a larger
a687e897
RK
10882 mode for which we can do the compare. There are a number of cases in
10883 which we can use the wider mode. */
230d793d
RS
10884
10885 mode = GET_MODE (op0);
10886 if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT
10887 && GET_MODE_SIZE (mode) < UNITS_PER_WORD
10888 && cmp_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
10889 for (tmode = GET_MODE_WIDER_MODE (mode);
5f4f0e22
CH
10890 (tmode != VOIDmode
10891 && GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT);
230d793d 10892 tmode = GET_MODE_WIDER_MODE (tmode))
a687e897 10893 if (cmp_optab->handlers[(int) tmode].insn_code != CODE_FOR_nothing)
230d793d 10894 {
951553af 10895 /* If the only nonzero bits in OP0 and OP1 are those in the
a687e897
RK
10896 narrower mode and this is an equality or unsigned comparison,
10897 we can use the wider mode. Similarly for sign-extended
7e4dc511 10898 values, in which case it is true for all comparisons. */
a687e897
RK
10899 if (((code == EQ || code == NE
10900 || code == GEU || code == GTU || code == LEU || code == LTU)
663522cb
KH
10901 && (nonzero_bits (op0, tmode) & ~GET_MODE_MASK (mode)) == 0
10902 && (nonzero_bits (op1, tmode) & ~GET_MODE_MASK (mode)) == 0)
7e4dc511
RK
10903 || ((num_sign_bit_copies (op0, tmode)
10904 > GET_MODE_BITSIZE (tmode) - GET_MODE_BITSIZE (mode))
a687e897 10905 && (num_sign_bit_copies (op1, tmode)
58744483 10906 > GET_MODE_BITSIZE (tmode) - GET_MODE_BITSIZE (mode))))
a687e897 10907 {
14a774a9
RK
10908 /* If OP0 is an AND and we don't have an AND in MODE either,
10909 make a new AND in the proper mode. */
10910 if (GET_CODE (op0) == AND
10911 && (add_optab->handlers[(int) mode].insn_code
10912 == CODE_FOR_nothing))
10913 op0 = gen_binary (AND, tmode,
10914 gen_lowpart_for_combine (tmode,
10915 XEXP (op0, 0)),
10916 gen_lowpart_for_combine (tmode,
10917 XEXP (op0, 1)));
10918
a687e897
RK
10919 op0 = gen_lowpart_for_combine (tmode, op0);
10920 op1 = gen_lowpart_for_combine (tmode, op1);
10921 break;
10922 }
230d793d 10923
a687e897
RK
10924 /* If this is a test for negative, we can make an explicit
10925 test of the sign bit. */
10926
10927 if (op1 == const0_rtx && (code == LT || code == GE)
10928 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
230d793d 10929 {
a687e897
RK
10930 op0 = gen_binary (AND, tmode,
10931 gen_lowpart_for_combine (tmode, op0),
5f4f0e22
CH
10932 GEN_INT ((HOST_WIDE_INT) 1
10933 << (GET_MODE_BITSIZE (mode) - 1)));
230d793d 10934 code = (code == LT) ? NE : EQ;
a687e897 10935 break;
230d793d 10936 }
230d793d
RS
10937 }
10938
b7a775b2
RK
10939#ifdef CANONICALIZE_COMPARISON
10940 /* If this machine only supports a subset of valid comparisons, see if we
10941 can convert an unsupported one into a supported one. */
10942 CANONICALIZE_COMPARISON (code, op0, op1);
10943#endif
10944
230d793d
RS
10945 *pop0 = op0;
10946 *pop1 = op1;
10947
10948 return code;
10949}
10950\f
9a915772
JH
10951/* Like jump.c' reversed_comparison_code, but use combine infrastructure for
10952 searching backward. */
c3ffea50 10953static enum rtx_code
9a915772
JH
10954combine_reversed_comparison_code (exp)
10955 rtx exp;
230d793d 10956{
9a915772
JH
10957 enum rtx_code code1 = reversed_comparison_code (exp, NULL);
10958 rtx x;
10959
10960 if (code1 != UNKNOWN
10961 || GET_MODE_CLASS (GET_MODE (XEXP (exp, 0))) != MODE_CC)
10962 return code1;
10963 /* Otherwise try and find where the condition codes were last set and
10964 use that. */
c3ffea50
AJ
10965 x = get_last_value (XEXP (exp, 0));
10966 if (!x || GET_CODE (x) != COMPARE)
9a915772
JH
10967 return UNKNOWN;
10968 return reversed_comparison_code_parts (GET_CODE (exp),
10969 XEXP (x, 0), XEXP (x, 1), NULL);
10970}
10971/* Return comparison with reversed code of EXP and operands OP0 and OP1.
10972 Return NULL_RTX in case we fail to do the reversal. */
10973static rtx
10974reversed_comparison (exp, mode, op0, op1)
10975 rtx exp, op0, op1;
10976 enum machine_mode mode;
10977{
10978 enum rtx_code reversed_code = combine_reversed_comparison_code (exp);
10979 if (reversed_code == UNKNOWN)
10980 return NULL_RTX;
10981 else
10982 return gen_binary (reversed_code, mode, op0, op1);
230d793d
RS
10983}
10984\f
10985/* Utility function for following routine. Called when X is part of a value
10986 being stored into reg_last_set_value. Sets reg_last_set_table_tick
10987 for each register mentioned. Similar to mention_regs in cse.c */
10988
10989static void
10990update_table_tick (x)
10991 rtx x;
10992{
10993 register enum rtx_code code = GET_CODE (x);
6f7d635c 10994 register const char *fmt = GET_RTX_FORMAT (code);
230d793d
RS
10995 register int i;
10996
10997 if (code == REG)
10998 {
770ae6cc
RK
10999 unsigned int regno = REGNO (x);
11000 unsigned int endregno
11001 = regno + (regno < FIRST_PSEUDO_REGISTER
11002 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
11003 unsigned int r;
230d793d 11004
770ae6cc
RK
11005 for (r = regno; r < endregno; r++)
11006 reg_last_set_table_tick[r] = label_tick;
230d793d
RS
11007
11008 return;
11009 }
663522cb 11010
230d793d
RS
11011 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
11012 /* Note that we can't have an "E" in values stored; see
11013 get_last_value_validate. */
11014 if (fmt[i] == 'e')
11015 update_table_tick (XEXP (x, i));
11016}
11017
11018/* Record that REG is set to VALUE in insn INSN. If VALUE is zero, we
11019 are saying that the register is clobbered and we no longer know its
7988fd36
RK
11020 value. If INSN is zero, don't update reg_last_set; this is only permitted
11021 with VALUE also zero and is used to invalidate the register. */
230d793d
RS
11022
11023static void
11024record_value_for_reg (reg, insn, value)
11025 rtx reg;
11026 rtx insn;
11027 rtx value;
11028{
770ae6cc
RK
11029 unsigned int regno = REGNO (reg);
11030 unsigned int endregno
11031 = regno + (regno < FIRST_PSEUDO_REGISTER
11032 ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1);
11033 unsigned int i;
230d793d
RS
11034
11035 /* If VALUE contains REG and we have a previous value for REG, substitute
11036 the previous value. */
11037 if (value && insn && reg_overlap_mentioned_p (reg, value))
11038 {
11039 rtx tem;
11040
11041 /* Set things up so get_last_value is allowed to see anything set up to
11042 our insn. */
11043 subst_low_cuid = INSN_CUID (insn);
663522cb 11044 tem = get_last_value (reg);
230d793d 11045
14a774a9
RK
11046 /* If TEM is simply a binary operation with two CLOBBERs as operands,
11047 it isn't going to be useful and will take a lot of time to process,
11048 so just use the CLOBBER. */
11049
230d793d 11050 if (tem)
14a774a9
RK
11051 {
11052 if ((GET_RTX_CLASS (GET_CODE (tem)) == '2'
11053 || GET_RTX_CLASS (GET_CODE (tem)) == 'c')
11054 && GET_CODE (XEXP (tem, 0)) == CLOBBER
11055 && GET_CODE (XEXP (tem, 1)) == CLOBBER)
11056 tem = XEXP (tem, 0);
11057
11058 value = replace_rtx (copy_rtx (value), reg, tem);
11059 }
230d793d
RS
11060 }
11061
11062 /* For each register modified, show we don't know its value, that
ef026f91
RS
11063 we don't know about its bitwise content, that its value has been
11064 updated, and that we don't know the location of the death of the
11065 register. */
770ae6cc 11066 for (i = regno; i < endregno; i++)
230d793d
RS
11067 {
11068 if (insn)
11069 reg_last_set[i] = insn;
770ae6cc 11070
230d793d 11071 reg_last_set_value[i] = 0;
ef026f91
RS
11072 reg_last_set_mode[i] = 0;
11073 reg_last_set_nonzero_bits[i] = 0;
11074 reg_last_set_sign_bit_copies[i] = 0;
230d793d
RS
11075 reg_last_death[i] = 0;
11076 }
11077
11078 /* Mark registers that are being referenced in this value. */
11079 if (value)
11080 update_table_tick (value);
11081
11082 /* Now update the status of each register being set.
11083 If someone is using this register in this block, set this register
11084 to invalid since we will get confused between the two lives in this
11085 basic block. This makes using this register always invalid. In cse, we
11086 scan the table to invalidate all entries using this register, but this
11087 is too much work for us. */
11088
11089 for (i = regno; i < endregno; i++)
11090 {
11091 reg_last_set_label[i] = label_tick;
11092 if (value && reg_last_set_table_tick[i] == label_tick)
11093 reg_last_set_invalid[i] = 1;
11094 else
11095 reg_last_set_invalid[i] = 0;
11096 }
11097
11098 /* The value being assigned might refer to X (like in "x++;"). In that
11099 case, we must replace it with (clobber (const_int 0)) to prevent
11100 infinite loops. */
9a893315 11101 if (value && ! get_last_value_validate (&value, insn,
230d793d
RS
11102 reg_last_set_label[regno], 0))
11103 {
11104 value = copy_rtx (value);
9a893315
JW
11105 if (! get_last_value_validate (&value, insn,
11106 reg_last_set_label[regno], 1))
230d793d
RS
11107 value = 0;
11108 }
11109
55310dad
RK
11110 /* For the main register being modified, update the value, the mode, the
11111 nonzero bits, and the number of sign bit copies. */
11112
230d793d
RS
11113 reg_last_set_value[regno] = value;
11114
55310dad
RK
11115 if (value)
11116 {
2afabb48 11117 subst_low_cuid = INSN_CUID (insn);
55310dad
RK
11118 reg_last_set_mode[regno] = GET_MODE (reg);
11119 reg_last_set_nonzero_bits[regno] = nonzero_bits (value, GET_MODE (reg));
11120 reg_last_set_sign_bit_copies[regno]
11121 = num_sign_bit_copies (value, GET_MODE (reg));
11122 }
230d793d
RS
11123}
11124
230d793d 11125/* Called via note_stores from record_dead_and_set_regs to handle one
84832317
MM
11126 SET or CLOBBER in an insn. DATA is the instruction in which the
11127 set is occurring. */
230d793d
RS
11128
11129static void
84832317 11130record_dead_and_set_regs_1 (dest, setter, data)
230d793d 11131 rtx dest, setter;
84832317 11132 void *data;
230d793d 11133{
84832317
MM
11134 rtx record_dead_insn = (rtx) data;
11135
ca89d290
RK
11136 if (GET_CODE (dest) == SUBREG)
11137 dest = SUBREG_REG (dest);
11138
230d793d
RS
11139 if (GET_CODE (dest) == REG)
11140 {
11141 /* If we are setting the whole register, we know its value. Otherwise
11142 show that we don't know the value. We can handle SUBREG in
11143 some cases. */
11144 if (GET_CODE (setter) == SET && dest == SET_DEST (setter))
11145 record_value_for_reg (dest, record_dead_insn, SET_SRC (setter));
11146 else if (GET_CODE (setter) == SET
11147 && GET_CODE (SET_DEST (setter)) == SUBREG
11148 && SUBREG_REG (SET_DEST (setter)) == dest
90bf8081 11149 && GET_MODE_BITSIZE (GET_MODE (dest)) <= BITS_PER_WORD
230d793d 11150 && subreg_lowpart_p (SET_DEST (setter)))
d0ab8cd3
RK
11151 record_value_for_reg (dest, record_dead_insn,
11152 gen_lowpart_for_combine (GET_MODE (dest),
11153 SET_SRC (setter)));
230d793d 11154 else
5f4f0e22 11155 record_value_for_reg (dest, record_dead_insn, NULL_RTX);
230d793d
RS
11156 }
11157 else if (GET_CODE (dest) == MEM
11158 /* Ignore pushes, they clobber nothing. */
11159 && ! push_operand (dest, GET_MODE (dest)))
11160 mem_last_set = INSN_CUID (record_dead_insn);
11161}
11162
11163/* Update the records of when each REG was most recently set or killed
11164 for the things done by INSN. This is the last thing done in processing
11165 INSN in the combiner loop.
11166
ef026f91
RS
11167 We update reg_last_set, reg_last_set_value, reg_last_set_mode,
11168 reg_last_set_nonzero_bits, reg_last_set_sign_bit_copies, reg_last_death,
11169 and also the similar information mem_last_set (which insn most recently
11170 modified memory) and last_call_cuid (which insn was the most recent
11171 subroutine call). */
230d793d
RS
11172
11173static void
11174record_dead_and_set_regs (insn)
11175 rtx insn;
11176{
11177 register rtx link;
770ae6cc 11178 unsigned int i;
55310dad 11179
230d793d
RS
11180 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
11181 {
dbc131f3
RK
11182 if (REG_NOTE_KIND (link) == REG_DEAD
11183 && GET_CODE (XEXP (link, 0)) == REG)
11184 {
770ae6cc
RK
11185 unsigned int regno = REGNO (XEXP (link, 0));
11186 unsigned int endregno
dbc131f3
RK
11187 = regno + (regno < FIRST_PSEUDO_REGISTER
11188 ? HARD_REGNO_NREGS (regno, GET_MODE (XEXP (link, 0)))
11189 : 1);
dbc131f3
RK
11190
11191 for (i = regno; i < endregno; i++)
11192 reg_last_death[i] = insn;
11193 }
230d793d 11194 else if (REG_NOTE_KIND (link) == REG_INC)
5f4f0e22 11195 record_value_for_reg (XEXP (link, 0), insn, NULL_RTX);
230d793d
RS
11196 }
11197
11198 if (GET_CODE (insn) == CALL_INSN)
55310dad
RK
11199 {
11200 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
11201 if (call_used_regs[i])
11202 {
11203 reg_last_set_value[i] = 0;
ef026f91
RS
11204 reg_last_set_mode[i] = 0;
11205 reg_last_set_nonzero_bits[i] = 0;
11206 reg_last_set_sign_bit_copies[i] = 0;
55310dad
RK
11207 reg_last_death[i] = 0;
11208 }
11209
11210 last_call_cuid = mem_last_set = INSN_CUID (insn);
11211 }
230d793d 11212
84832317 11213 note_stores (PATTERN (insn), record_dead_and_set_regs_1, insn);
230d793d 11214}
732f2ac9 11215
732f2ac9
JJ
11216/* If a SUBREG has the promoted bit set, it is in fact a property of the
11217 register present in the SUBREG, so for each such SUBREG go back and
11218 adjust nonzero and sign bit information of the registers that are
11219 known to have some zero/sign bits set.
11220
11221 This is needed because when combine blows the SUBREGs away, the
11222 information on zero/sign bits is lost and further combines can be
11223 missed because of that. */
11224
11225static void
11226record_promoted_value (insn, subreg)
663522cb
KH
11227 rtx insn;
11228 rtx subreg;
732f2ac9 11229{
4a71b24f 11230 rtx links, set;
770ae6cc 11231 unsigned int regno = REGNO (SUBREG_REG (subreg));
732f2ac9
JJ
11232 enum machine_mode mode = GET_MODE (subreg);
11233
25af74a0 11234 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
732f2ac9
JJ
11235 return;
11236
663522cb 11237 for (links = LOG_LINKS (insn); links;)
732f2ac9
JJ
11238 {
11239 insn = XEXP (links, 0);
11240 set = single_set (insn);
11241
11242 if (! set || GET_CODE (SET_DEST (set)) != REG
11243 || REGNO (SET_DEST (set)) != regno
11244 || GET_MODE (SET_DEST (set)) != GET_MODE (SUBREG_REG (subreg)))
11245 {
11246 links = XEXP (links, 1);
11247 continue;
11248 }
11249
663522cb
KH
11250 if (reg_last_set[regno] == insn)
11251 {
732f2ac9 11252 if (SUBREG_PROMOTED_UNSIGNED_P (subreg))
663522cb
KH
11253 reg_last_set_nonzero_bits[regno] &= GET_MODE_MASK (mode);
11254 }
732f2ac9
JJ
11255
11256 if (GET_CODE (SET_SRC (set)) == REG)
11257 {
11258 regno = REGNO (SET_SRC (set));
11259 links = LOG_LINKS (insn);
11260 }
11261 else
11262 break;
11263 }
11264}
11265
11266/* Scan X for promoted SUBREGs. For each one found,
11267 note what it implies to the registers used in it. */
11268
11269static void
11270check_promoted_subreg (insn, x)
663522cb
KH
11271 rtx insn;
11272 rtx x;
732f2ac9
JJ
11273{
11274 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
11275 && GET_CODE (SUBREG_REG (x)) == REG)
11276 record_promoted_value (insn, x);
11277 else
11278 {
11279 const char *format = GET_RTX_FORMAT (GET_CODE (x));
11280 int i, j;
11281
11282 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (x)); i++)
663522cb 11283 switch (format[i])
732f2ac9
JJ
11284 {
11285 case 'e':
11286 check_promoted_subreg (insn, XEXP (x, i));
11287 break;
11288 case 'V':
11289 case 'E':
11290 if (XVEC (x, i) != 0)
11291 for (j = 0; j < XVECLEN (x, i); j++)
11292 check_promoted_subreg (insn, XVECEXP (x, i, j));
11293 break;
11294 }
11295 }
11296}
230d793d
RS
11297\f
11298/* Utility routine for the following function. Verify that all the registers
11299 mentioned in *LOC are valid when *LOC was part of a value set when
11300 label_tick == TICK. Return 0 if some are not.
11301
11302 If REPLACE is non-zero, replace the invalid reference with
11303 (clobber (const_int 0)) and return 1. This replacement is useful because
11304 we often can get useful information about the form of a value (e.g., if
11305 it was produced by a shift that always produces -1 or 0) even though
11306 we don't know exactly what registers it was produced from. */
11307
11308static int
9a893315 11309get_last_value_validate (loc, insn, tick, replace)
230d793d 11310 rtx *loc;
9a893315 11311 rtx insn;
230d793d
RS
11312 int tick;
11313 int replace;
11314{
11315 rtx x = *loc;
6f7d635c 11316 const char *fmt = GET_RTX_FORMAT (GET_CODE (x));
230d793d
RS
11317 int len = GET_RTX_LENGTH (GET_CODE (x));
11318 int i;
11319
11320 if (GET_CODE (x) == REG)
11321 {
770ae6cc
RK
11322 unsigned int regno = REGNO (x);
11323 unsigned int endregno
11324 = regno + (regno < FIRST_PSEUDO_REGISTER
11325 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
11326 unsigned int j;
230d793d
RS
11327
11328 for (j = regno; j < endregno; j++)
11329 if (reg_last_set_invalid[j]
57cf50a4
GRK
11330 /* If this is a pseudo-register that was only set once and not
11331 live at the beginning of the function, it is always valid. */
663522cb 11332 || (! (regno >= FIRST_PSEUDO_REGISTER
57cf50a4 11333 && REG_N_SETS (regno) == 1
770ae6cc
RK
11334 && (! REGNO_REG_SET_P
11335 (BASIC_BLOCK (0)->global_live_at_start, regno)))
230d793d
RS
11336 && reg_last_set_label[j] > tick))
11337 {
11338 if (replace)
38a448ca 11339 *loc = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
230d793d
RS
11340 return replace;
11341 }
11342
11343 return 1;
11344 }
9a893315
JW
11345 /* If this is a memory reference, make sure that there were
11346 no stores after it that might have clobbered the value. We don't
11347 have alias info, so we assume any store invalidates it. */
11348 else if (GET_CODE (x) == MEM && ! RTX_UNCHANGING_P (x)
11349 && INSN_CUID (insn) <= mem_last_set)
11350 {
11351 if (replace)
38a448ca 11352 *loc = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
9a893315
JW
11353 return replace;
11354 }
230d793d
RS
11355
11356 for (i = 0; i < len; i++)
11357 if ((fmt[i] == 'e'
9a893315 11358 && get_last_value_validate (&XEXP (x, i), insn, tick, replace) == 0)
230d793d
RS
11359 /* Don't bother with these. They shouldn't occur anyway. */
11360 || fmt[i] == 'E')
11361 return 0;
11362
11363 /* If we haven't found a reason for it to be invalid, it is valid. */
11364 return 1;
11365}
11366
11367/* Get the last value assigned to X, if known. Some registers
11368 in the value may be replaced with (clobber (const_int 0)) if their value
11369 is known longer known reliably. */
11370
11371static rtx
11372get_last_value (x)
11373 rtx x;
11374{
770ae6cc 11375 unsigned int regno;
230d793d
RS
11376 rtx value;
11377
11378 /* If this is a non-paradoxical SUBREG, get the value of its operand and
11379 then convert it to the desired mode. If this is a paradoxical SUBREG,
0f41302f 11380 we cannot predict what values the "extra" bits might have. */
230d793d
RS
11381 if (GET_CODE (x) == SUBREG
11382 && subreg_lowpart_p (x)
11383 && (GET_MODE_SIZE (GET_MODE (x))
11384 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
11385 && (value = get_last_value (SUBREG_REG (x))) != 0)
11386 return gen_lowpart_for_combine (GET_MODE (x), value);
11387
11388 if (GET_CODE (x) != REG)
11389 return 0;
11390
11391 regno = REGNO (x);
11392 value = reg_last_set_value[regno];
11393
57cf50a4
GRK
11394 /* If we don't have a value, or if it isn't for this basic block and
11395 it's either a hard register, set more than once, or it's a live
663522cb 11396 at the beginning of the function, return 0.
57cf50a4 11397
663522cb 11398 Because if it's not live at the beginnning of the function then the reg
57cf50a4
GRK
11399 is always set before being used (is never used without being set).
11400 And, if it's set only once, and it's always set before use, then all
11401 uses must have the same last value, even if it's not from this basic
11402 block. */
230d793d
RS
11403
11404 if (value == 0
57cf50a4
GRK
11405 || (reg_last_set_label[regno] != label_tick
11406 && (regno < FIRST_PSEUDO_REGISTER
11407 || REG_N_SETS (regno) != 1
770ae6cc
RK
11408 || (REGNO_REG_SET_P
11409 (BASIC_BLOCK (0)->global_live_at_start, regno)))))
230d793d
RS
11410 return 0;
11411
4255220d 11412 /* If the value was set in a later insn than the ones we are processing,
ca4cd906 11413 we can't use it even if the register was only set once. */
bcd49eb7 11414 if (INSN_CUID (reg_last_set[regno]) >= subst_low_cuid)
ca4cd906 11415 return 0;
d0ab8cd3
RK
11416
11417 /* If the value has all its registers valid, return it. */
9a893315
JW
11418 if (get_last_value_validate (&value, reg_last_set[regno],
11419 reg_last_set_label[regno], 0))
230d793d
RS
11420 return value;
11421
11422 /* Otherwise, make a copy and replace any invalid register with
11423 (clobber (const_int 0)). If that fails for some reason, return 0. */
11424
11425 value = copy_rtx (value);
9a893315
JW
11426 if (get_last_value_validate (&value, reg_last_set[regno],
11427 reg_last_set_label[regno], 1))
230d793d
RS
11428 return value;
11429
11430 return 0;
11431}
11432\f
11433/* Return nonzero if expression X refers to a REG or to memory
11434 that is set in an instruction more recent than FROM_CUID. */
11435
11436static int
11437use_crosses_set_p (x, from_cuid)
11438 register rtx x;
11439 int from_cuid;
11440{
6f7d635c 11441 register const char *fmt;
230d793d
RS
11442 register int i;
11443 register enum rtx_code code = GET_CODE (x);
11444
11445 if (code == REG)
11446 {
770ae6cc
RK
11447 unsigned int regno = REGNO (x);
11448 unsigned endreg = regno + (regno < FIRST_PSEUDO_REGISTER
663522cb
KH
11449 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
11450
230d793d
RS
11451#ifdef PUSH_ROUNDING
11452 /* Don't allow uses of the stack pointer to be moved,
11453 because we don't know whether the move crosses a push insn. */
f73ad30e 11454 if (regno == STACK_POINTER_REGNUM && PUSH_ARGS)
230d793d
RS
11455 return 1;
11456#endif
770ae6cc 11457 for (; regno < endreg; regno++)
e28f5732
RK
11458 if (reg_last_set[regno]
11459 && INSN_CUID (reg_last_set[regno]) > from_cuid)
11460 return 1;
11461 return 0;
230d793d
RS
11462 }
11463
11464 if (code == MEM && mem_last_set > from_cuid)
11465 return 1;
11466
11467 fmt = GET_RTX_FORMAT (code);
11468
11469 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
11470 {
11471 if (fmt[i] == 'E')
11472 {
11473 register int j;
11474 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
11475 if (use_crosses_set_p (XVECEXP (x, i, j), from_cuid))
11476 return 1;
11477 }
11478 else if (fmt[i] == 'e'
11479 && use_crosses_set_p (XEXP (x, i), from_cuid))
11480 return 1;
11481 }
11482 return 0;
11483}
11484\f
11485/* Define three variables used for communication between the following
11486 routines. */
11487
770ae6cc 11488static unsigned int reg_dead_regno, reg_dead_endregno;
230d793d
RS
11489static int reg_dead_flag;
11490
11491/* Function called via note_stores from reg_dead_at_p.
11492
663522cb 11493 If DEST is within [reg_dead_regno, reg_dead_endregno), set
230d793d
RS
11494 reg_dead_flag to 1 if X is a CLOBBER and to -1 it is a SET. */
11495
11496static void
84832317 11497reg_dead_at_p_1 (dest, x, data)
230d793d
RS
11498 rtx dest;
11499 rtx x;
84832317 11500 void *data ATTRIBUTE_UNUSED;
230d793d 11501{
770ae6cc 11502 unsigned int regno, endregno;
230d793d
RS
11503
11504 if (GET_CODE (dest) != REG)
11505 return;
11506
11507 regno = REGNO (dest);
663522cb 11508 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
230d793d
RS
11509 ? HARD_REGNO_NREGS (regno, GET_MODE (dest)) : 1);
11510
11511 if (reg_dead_endregno > regno && reg_dead_regno < endregno)
11512 reg_dead_flag = (GET_CODE (x) == CLOBBER) ? 1 : -1;
11513}
11514
11515/* Return non-zero if REG is known to be dead at INSN.
11516
11517 We scan backwards from INSN. If we hit a REG_DEAD note or a CLOBBER
11518 referencing REG, it is dead. If we hit a SET referencing REG, it is
11519 live. Otherwise, see if it is live or dead at the start of the basic
6e25d159
RK
11520 block we are in. Hard regs marked as being live in NEWPAT_USED_REGS
11521 must be assumed to be always live. */
230d793d
RS
11522
11523static int
11524reg_dead_at_p (reg, insn)
11525 rtx reg;
11526 rtx insn;
11527{
770ae6cc
RK
11528 int block;
11529 unsigned int i;
230d793d
RS
11530
11531 /* Set variables for reg_dead_at_p_1. */
11532 reg_dead_regno = REGNO (reg);
11533 reg_dead_endregno = reg_dead_regno + (reg_dead_regno < FIRST_PSEUDO_REGISTER
11534 ? HARD_REGNO_NREGS (reg_dead_regno,
11535 GET_MODE (reg))
11536 : 1);
11537
11538 reg_dead_flag = 0;
11539
6e25d159
RK
11540 /* Check that reg isn't mentioned in NEWPAT_USED_REGS. */
11541 if (reg_dead_regno < FIRST_PSEUDO_REGISTER)
11542 {
11543 for (i = reg_dead_regno; i < reg_dead_endregno; i++)
11544 if (TEST_HARD_REG_BIT (newpat_used_regs, i))
11545 return 0;
11546 }
11547
230d793d
RS
11548 /* Scan backwards until we find a REG_DEAD note, SET, CLOBBER, label, or
11549 beginning of function. */
60715d0b 11550 for (; insn && GET_CODE (insn) != CODE_LABEL && GET_CODE (insn) != BARRIER;
230d793d
RS
11551 insn = prev_nonnote_insn (insn))
11552 {
84832317 11553 note_stores (PATTERN (insn), reg_dead_at_p_1, NULL);
230d793d
RS
11554 if (reg_dead_flag)
11555 return reg_dead_flag == 1 ? 1 : 0;
11556
11557 if (find_regno_note (insn, REG_DEAD, reg_dead_regno))
11558 return 1;
11559 }
11560
11561 /* Get the basic block number that we were in. */
11562 if (insn == 0)
11563 block = 0;
11564 else
11565 {
11566 for (block = 0; block < n_basic_blocks; block++)
3b413743 11567 if (insn == BLOCK_HEAD (block))
230d793d
RS
11568 break;
11569
11570 if (block == n_basic_blocks)
11571 return 0;
11572 }
11573
11574 for (i = reg_dead_regno; i < reg_dead_endregno; i++)
e881bb1b 11575 if (REGNO_REG_SET_P (BASIC_BLOCK (block)->global_live_at_start, i))
230d793d
RS
11576 return 0;
11577
11578 return 1;
11579}
6e25d159
RK
11580\f
11581/* Note hard registers in X that are used. This code is similar to
11582 that in flow.c, but much simpler since we don't care about pseudos. */
11583
11584static void
11585mark_used_regs_combine (x)
11586 rtx x;
11587{
770ae6cc
RK
11588 RTX_CODE code = GET_CODE (x);
11589 unsigned int regno;
6e25d159
RK
11590 int i;
11591
11592 switch (code)
11593 {
11594 case LABEL_REF:
11595 case SYMBOL_REF:
11596 case CONST_INT:
11597 case CONST:
11598 case CONST_DOUBLE:
11599 case PC:
11600 case ADDR_VEC:
11601 case ADDR_DIFF_VEC:
11602 case ASM_INPUT:
11603#ifdef HAVE_cc0
11604 /* CC0 must die in the insn after it is set, so we don't need to take
11605 special note of it here. */
11606 case CC0:
11607#endif
11608 return;
11609
11610 case CLOBBER:
11611 /* If we are clobbering a MEM, mark any hard registers inside the
11612 address as used. */
11613 if (GET_CODE (XEXP (x, 0)) == MEM)
11614 mark_used_regs_combine (XEXP (XEXP (x, 0), 0));
11615 return;
11616
11617 case REG:
11618 regno = REGNO (x);
11619 /* A hard reg in a wide mode may really be multiple registers.
11620 If so, mark all of them just like the first. */
11621 if (regno < FIRST_PSEUDO_REGISTER)
11622 {
770ae6cc
RK
11623 unsigned int endregno, r;
11624
6e25d159
RK
11625 /* None of this applies to the stack, frame or arg pointers */
11626 if (regno == STACK_POINTER_REGNUM
11627#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
11628 || regno == HARD_FRAME_POINTER_REGNUM
11629#endif
11630#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
11631 || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
11632#endif
11633 || regno == FRAME_POINTER_REGNUM)
11634 return;
11635
770ae6cc
RK
11636 endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
11637 for (r = regno; r < endregno; r++)
11638 SET_HARD_REG_BIT (newpat_used_regs, r);
6e25d159
RK
11639 }
11640 return;
11641
11642 case SET:
11643 {
11644 /* If setting a MEM, or a SUBREG of a MEM, then note any hard regs in
11645 the address. */
11646 register rtx testreg = SET_DEST (x);
11647
e048778f
RK
11648 while (GET_CODE (testreg) == SUBREG
11649 || GET_CODE (testreg) == ZERO_EXTRACT
11650 || GET_CODE (testreg) == SIGN_EXTRACT
11651 || GET_CODE (testreg) == STRICT_LOW_PART)
6e25d159
RK
11652 testreg = XEXP (testreg, 0);
11653
11654 if (GET_CODE (testreg) == MEM)
11655 mark_used_regs_combine (XEXP (testreg, 0));
11656
11657 mark_used_regs_combine (SET_SRC (x));
6e25d159 11658 }
e9a25f70
JL
11659 return;
11660
11661 default:
11662 break;
6e25d159
RK
11663 }
11664
11665 /* Recursively scan the operands of this expression. */
11666
11667 {
6f7d635c 11668 register const char *fmt = GET_RTX_FORMAT (code);
6e25d159
RK
11669
11670 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
11671 {
663522cb 11672 if (fmt[i] == 'e')
6e25d159 11673 mark_used_regs_combine (XEXP (x, i));
663522cb
KH
11674 else if (fmt[i] == 'E')
11675 {
11676 register int j;
6e25d159 11677
663522cb
KH
11678 for (j = 0; j < XVECLEN (x, i); j++)
11679 mark_used_regs_combine (XVECEXP (x, i, j));
11680 }
6e25d159
RK
11681 }
11682 }
11683}
230d793d
RS
11684\f
11685/* Remove register number REGNO from the dead registers list of INSN.
11686
11687 Return the note used to record the death, if there was one. */
11688
11689rtx
11690remove_death (regno, insn)
770ae6cc 11691 unsigned int regno;
230d793d
RS
11692 rtx insn;
11693{
11694 register rtx note = find_regno_note (insn, REG_DEAD, regno);
11695
11696 if (note)
1a26b032 11697 {
b1f21e0a 11698 REG_N_DEATHS (regno)--;
1a26b032
RK
11699 remove_note (insn, note);
11700 }
230d793d
RS
11701
11702 return note;
11703}
11704
11705/* For each register (hardware or pseudo) used within expression X, if its
11706 death is in an instruction with cuid between FROM_CUID (inclusive) and
11707 TO_INSN (exclusive), put a REG_DEAD note for that register in the
663522cb 11708 list headed by PNOTES.
230d793d 11709
6eb12cef
RK
11710 That said, don't move registers killed by maybe_kill_insn.
11711
230d793d
RS
11712 This is done when X is being merged by combination into TO_INSN. These
11713 notes will then be distributed as needed. */
11714
11715static void
6eb12cef 11716move_deaths (x, maybe_kill_insn, from_cuid, to_insn, pnotes)
230d793d 11717 rtx x;
6eb12cef 11718 rtx maybe_kill_insn;
230d793d
RS
11719 int from_cuid;
11720 rtx to_insn;
11721 rtx *pnotes;
11722{
6f7d635c 11723 register const char *fmt;
230d793d
RS
11724 register int len, i;
11725 register enum rtx_code code = GET_CODE (x);
11726
11727 if (code == REG)
11728 {
770ae6cc 11729 unsigned int regno = REGNO (x);
230d793d 11730 register rtx where_dead = reg_last_death[regno];
e340018d
JW
11731 register rtx before_dead, after_dead;
11732
6eb12cef
RK
11733 /* Don't move the register if it gets killed in between from and to */
11734 if (maybe_kill_insn && reg_set_p (x, maybe_kill_insn)
770ae6cc 11735 && ! reg_referenced_p (x, maybe_kill_insn))
6eb12cef
RK
11736 return;
11737
e340018d
JW
11738 /* WHERE_DEAD could be a USE insn made by combine, so first we
11739 make sure that we have insns with valid INSN_CUID values. */
11740 before_dead = where_dead;
11741 while (before_dead && INSN_UID (before_dead) > max_uid_cuid)
11742 before_dead = PREV_INSN (before_dead);
770ae6cc 11743
e340018d
JW
11744 after_dead = where_dead;
11745 while (after_dead && INSN_UID (after_dead) > max_uid_cuid)
11746 after_dead = NEXT_INSN (after_dead);
11747
11748 if (before_dead && after_dead
11749 && INSN_CUID (before_dead) >= from_cuid
11750 && (INSN_CUID (after_dead) < INSN_CUID (to_insn)
11751 || (where_dead != after_dead
11752 && INSN_CUID (after_dead) == INSN_CUID (to_insn))))
230d793d 11753 {
dbc131f3 11754 rtx note = remove_death (regno, where_dead);
230d793d
RS
11755
11756 /* It is possible for the call above to return 0. This can occur
11757 when reg_last_death points to I2 or I1 that we combined with.
dbc131f3
RK
11758 In that case make a new note.
11759
11760 We must also check for the case where X is a hard register
11761 and NOTE is a death note for a range of hard registers
11762 including X. In that case, we must put REG_DEAD notes for
11763 the remaining registers in place of NOTE. */
11764
11765 if (note != 0 && regno < FIRST_PSEUDO_REGISTER
11766 && (GET_MODE_SIZE (GET_MODE (XEXP (note, 0)))
24e46fc4 11767 > GET_MODE_SIZE (GET_MODE (x))))
dbc131f3 11768 {
770ae6cc
RK
11769 unsigned int deadregno = REGNO (XEXP (note, 0));
11770 unsigned int deadend
dbc131f3
RK
11771 = (deadregno + HARD_REGNO_NREGS (deadregno,
11772 GET_MODE (XEXP (note, 0))));
770ae6cc
RK
11773 unsigned int ourend
11774 = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
11775 unsigned int i;
dbc131f3
RK
11776
11777 for (i = deadregno; i < deadend; i++)
11778 if (i < regno || i >= ourend)
11779 REG_NOTES (where_dead)
38a448ca
RH
11780 = gen_rtx_EXPR_LIST (REG_DEAD,
11781 gen_rtx_REG (reg_raw_mode[i], i),
11782 REG_NOTES (where_dead));
dbc131f3 11783 }
770ae6cc 11784
24e46fc4
JW
11785 /* If we didn't find any note, or if we found a REG_DEAD note that
11786 covers only part of the given reg, and we have a multi-reg hard
fabd69e8
RK
11787 register, then to be safe we must check for REG_DEAD notes
11788 for each register other than the first. They could have
11789 their own REG_DEAD notes lying around. */
24e46fc4
JW
11790 else if ((note == 0
11791 || (note != 0
11792 && (GET_MODE_SIZE (GET_MODE (XEXP (note, 0)))
11793 < GET_MODE_SIZE (GET_MODE (x)))))
11794 && regno < FIRST_PSEUDO_REGISTER
fabd69e8
RK
11795 && HARD_REGNO_NREGS (regno, GET_MODE (x)) > 1)
11796 {
770ae6cc
RK
11797 unsigned int ourend
11798 = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
11799 unsigned int i, offset;
fabd69e8
RK
11800 rtx oldnotes = 0;
11801
24e46fc4
JW
11802 if (note)
11803 offset = HARD_REGNO_NREGS (regno, GET_MODE (XEXP (note, 0)));
11804 else
11805 offset = 1;
11806
11807 for (i = regno + offset; i < ourend; i++)
38a448ca 11808 move_deaths (gen_rtx_REG (reg_raw_mode[i], i),
6eb12cef 11809 maybe_kill_insn, from_cuid, to_insn, &oldnotes);
fabd69e8 11810 }
230d793d 11811
dbc131f3 11812 if (note != 0 && GET_MODE (XEXP (note, 0)) == GET_MODE (x))
230d793d
RS
11813 {
11814 XEXP (note, 1) = *pnotes;
11815 *pnotes = note;
11816 }
11817 else
38a448ca 11818 *pnotes = gen_rtx_EXPR_LIST (REG_DEAD, x, *pnotes);
1a26b032 11819
b1f21e0a 11820 REG_N_DEATHS (regno)++;
230d793d
RS
11821 }
11822
11823 return;
11824 }
11825
11826 else if (GET_CODE (x) == SET)
11827 {
11828 rtx dest = SET_DEST (x);
11829
6eb12cef 11830 move_deaths (SET_SRC (x), maybe_kill_insn, from_cuid, to_insn, pnotes);
230d793d 11831
a7c99304
RK
11832 /* In the case of a ZERO_EXTRACT, a STRICT_LOW_PART, or a SUBREG
11833 that accesses one word of a multi-word item, some
11834 piece of everything register in the expression is used by
11835 this insn, so remove any old death. */
ddef6bc7 11836 /* ??? So why do we test for equality of the sizes? */
a7c99304
RK
11837
11838 if (GET_CODE (dest) == ZERO_EXTRACT
11839 || GET_CODE (dest) == STRICT_LOW_PART
11840 || (GET_CODE (dest) == SUBREG
11841 && (((GET_MODE_SIZE (GET_MODE (dest))
11842 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
11843 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
11844 + UNITS_PER_WORD - 1) / UNITS_PER_WORD))))
230d793d 11845 {
6eb12cef 11846 move_deaths (dest, maybe_kill_insn, from_cuid, to_insn, pnotes);
a7c99304 11847 return;
230d793d
RS
11848 }
11849
a7c99304
RK
11850 /* If this is some other SUBREG, we know it replaces the entire
11851 value, so use that as the destination. */
11852 if (GET_CODE (dest) == SUBREG)
11853 dest = SUBREG_REG (dest);
11854
11855 /* If this is a MEM, adjust deaths of anything used in the address.
11856 For a REG (the only other possibility), the entire value is
11857 being replaced so the old value is not used in this insn. */
230d793d
RS
11858
11859 if (GET_CODE (dest) == MEM)
6eb12cef
RK
11860 move_deaths (XEXP (dest, 0), maybe_kill_insn, from_cuid,
11861 to_insn, pnotes);
230d793d
RS
11862 return;
11863 }
11864
11865 else if (GET_CODE (x) == CLOBBER)
11866 return;
11867
11868 len = GET_RTX_LENGTH (code);
11869 fmt = GET_RTX_FORMAT (code);
11870
11871 for (i = 0; i < len; i++)
11872 {
11873 if (fmt[i] == 'E')
11874 {
11875 register int j;
11876 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6eb12cef
RK
11877 move_deaths (XVECEXP (x, i, j), maybe_kill_insn, from_cuid,
11878 to_insn, pnotes);
230d793d
RS
11879 }
11880 else if (fmt[i] == 'e')
6eb12cef 11881 move_deaths (XEXP (x, i), maybe_kill_insn, from_cuid, to_insn, pnotes);
230d793d
RS
11882 }
11883}
11884\f
a7c99304
RK
11885/* Return 1 if X is the target of a bit-field assignment in BODY, the
11886 pattern of an insn. X must be a REG. */
230d793d
RS
11887
11888static int
a7c99304
RK
11889reg_bitfield_target_p (x, body)
11890 rtx x;
230d793d
RS
11891 rtx body;
11892{
11893 int i;
11894
11895 if (GET_CODE (body) == SET)
a7c99304
RK
11896 {
11897 rtx dest = SET_DEST (body);
11898 rtx target;
770ae6cc 11899 unsigned int regno, tregno, endregno, endtregno;
a7c99304
RK
11900
11901 if (GET_CODE (dest) == ZERO_EXTRACT)
11902 target = XEXP (dest, 0);
11903 else if (GET_CODE (dest) == STRICT_LOW_PART)
11904 target = SUBREG_REG (XEXP (dest, 0));
11905 else
11906 return 0;
11907
11908 if (GET_CODE (target) == SUBREG)
11909 target = SUBREG_REG (target);
11910
11911 if (GET_CODE (target) != REG)
11912 return 0;
11913
11914 tregno = REGNO (target), regno = REGNO (x);
11915 if (tregno >= FIRST_PSEUDO_REGISTER || regno >= FIRST_PSEUDO_REGISTER)
11916 return target == x;
11917
11918 endtregno = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (target));
11919 endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
11920
11921 return endregno > tregno && regno < endtregno;
11922 }
230d793d
RS
11923
11924 else if (GET_CODE (body) == PARALLEL)
11925 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
a7c99304 11926 if (reg_bitfield_target_p (x, XVECEXP (body, 0, i)))
230d793d
RS
11927 return 1;
11928
11929 return 0;
663522cb 11930}
230d793d
RS
11931\f
11932/* Given a chain of REG_NOTES originally from FROM_INSN, try to place them
11933 as appropriate. I3 and I2 are the insns resulting from the combination
11934 insns including FROM (I2 may be zero).
11935
11936 ELIM_I2 and ELIM_I1 are either zero or registers that we know will
11937 not need REG_DEAD notes because they are being substituted for. This
11938 saves searching in the most common cases.
11939
11940 Each note in the list is either ignored or placed on some insns, depending
11941 on the type of note. */
11942
11943static void
11944distribute_notes (notes, from_insn, i3, i2, elim_i2, elim_i1)
11945 rtx notes;
11946 rtx from_insn;
11947 rtx i3, i2;
11948 rtx elim_i2, elim_i1;
11949{
11950 rtx note, next_note;
11951 rtx tem;
11952
11953 for (note = notes; note; note = next_note)
11954 {
11955 rtx place = 0, place2 = 0;
11956
11957 /* If this NOTE references a pseudo register, ensure it references
11958 the latest copy of that register. */
11959 if (XEXP (note, 0) && GET_CODE (XEXP (note, 0)) == REG
11960 && REGNO (XEXP (note, 0)) >= FIRST_PSEUDO_REGISTER)
11961 XEXP (note, 0) = regno_reg_rtx[REGNO (XEXP (note, 0))];
11962
11963 next_note = XEXP (note, 1);
11964 switch (REG_NOTE_KIND (note))
11965 {
c9903b44 11966 case REG_BR_PROB:
4db384c9 11967 case REG_BR_PRED:
c9903b44
DE
11968 case REG_EXEC_COUNT:
11969 /* Doesn't matter much where we put this, as long as it's somewhere.
11970 It is preferable to keep these notes on branches, which is most
11971 likely to be i3. */
11972 place = i3;
11973 break;
11974
f7cfa78d
GS
11975 case REG_NON_LOCAL_GOTO:
11976 if (GET_CODE (i3) == JUMP_INSN)
11977 place = i3;
11978 else if (i2 && GET_CODE (i2) == JUMP_INSN)
11979 place = i2;
11980 else
11981 abort();
11982 break;
11983
4b7c585f 11984 case REG_EH_REGION:
662795a8
RH
11985 /* These notes must remain with the call or trapping instruction. */
11986 if (GET_CODE (i3) == CALL_INSN)
11987 place = i3;
11988 else if (i2 && GET_CODE (i2) == CALL_INSN)
11989 place = i2;
11990 else if (flag_non_call_exceptions)
11991 {
11992 if (may_trap_p (i3))
11993 place = i3;
11994 else if (i2 && may_trap_p (i2))
11995 place = i2;
11996 /* ??? Otherwise assume we've combined things such that we
11997 can now prove that the instructions can't trap. Drop the
11998 note in this case. */
11999 }
12000 else
12001 abort ();
12002 break;
12003
0e403ec3 12004 case REG_EH_RETHROW:
ca3920ad 12005 case REG_NORETURN:
0e403ec3
AS
12006 /* These notes must remain with the call. It should not be
12007 possible for both I2 and I3 to be a call. */
663522cb 12008 if (GET_CODE (i3) == CALL_INSN)
4b7c585f
JL
12009 place = i3;
12010 else if (i2 && GET_CODE (i2) == CALL_INSN)
12011 place = i2;
12012 else
12013 abort ();
12014 break;
12015
230d793d 12016 case REG_UNUSED:
07d0cbdd 12017 /* Any clobbers for i3 may still exist, and so we must process
176c9e6b
JW
12018 REG_UNUSED notes from that insn.
12019
12020 Any clobbers from i2 or i1 can only exist if they were added by
12021 recog_for_combine. In that case, recog_for_combine created the
12022 necessary REG_UNUSED notes. Trying to keep any original
12023 REG_UNUSED notes from these insns can cause incorrect output
12024 if it is for the same register as the original i3 dest.
12025 In that case, we will notice that the register is set in i3,
12026 and then add a REG_UNUSED note for the destination of i3, which
07d0cbdd
JW
12027 is wrong. However, it is possible to have REG_UNUSED notes from
12028 i2 or i1 for register which were both used and clobbered, so
12029 we keep notes from i2 or i1 if they will turn into REG_DEAD
12030 notes. */
176c9e6b 12031
230d793d
RS
12032 /* If this register is set or clobbered in I3, put the note there
12033 unless there is one already. */
07d0cbdd 12034 if (reg_set_p (XEXP (note, 0), PATTERN (i3)))
230d793d 12035 {
07d0cbdd
JW
12036 if (from_insn != i3)
12037 break;
12038
230d793d
RS
12039 if (! (GET_CODE (XEXP (note, 0)) == REG
12040 ? find_regno_note (i3, REG_UNUSED, REGNO (XEXP (note, 0)))
12041 : find_reg_note (i3, REG_UNUSED, XEXP (note, 0))))
12042 place = i3;
12043 }
12044 /* Otherwise, if this register is used by I3, then this register
12045 now dies here, so we must put a REG_DEAD note here unless there
12046 is one already. */
12047 else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3))
12048 && ! (GET_CODE (XEXP (note, 0)) == REG
770ae6cc
RK
12049 ? find_regno_note (i3, REG_DEAD,
12050 REGNO (XEXP (note, 0)))
230d793d
RS
12051 : find_reg_note (i3, REG_DEAD, XEXP (note, 0))))
12052 {
12053 PUT_REG_NOTE_KIND (note, REG_DEAD);
12054 place = i3;
12055 }
12056 break;
12057
12058 case REG_EQUAL:
12059 case REG_EQUIV:
9ae8ffe7 12060 case REG_NOALIAS:
230d793d
RS
12061 /* These notes say something about results of an insn. We can
12062 only support them if they used to be on I3 in which case they
a687e897
RK
12063 remain on I3. Otherwise they are ignored.
12064
12065 If the note refers to an expression that is not a constant, we
12066 must also ignore the note since we cannot tell whether the
12067 equivalence is still true. It might be possible to do
12068 slightly better than this (we only have a problem if I2DEST
12069 or I1DEST is present in the expression), but it doesn't
12070 seem worth the trouble. */
12071
12072 if (from_insn == i3
12073 && (XEXP (note, 0) == 0 || CONSTANT_P (XEXP (note, 0))))
230d793d
RS
12074 place = i3;
12075 break;
12076
12077 case REG_INC:
12078 case REG_NO_CONFLICT:
230d793d
RS
12079 /* These notes say something about how a register is used. They must
12080 be present on any use of the register in I2 or I3. */
12081 if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3)))
12082 place = i3;
12083
12084 if (i2 && reg_mentioned_p (XEXP (note, 0), PATTERN (i2)))
12085 {
12086 if (place)
12087 place2 = i2;
12088 else
12089 place = i2;
12090 }
12091 break;
12092
e55b4486
RH
12093 case REG_LABEL:
12094 /* This can show up in several ways -- either directly in the
12095 pattern, or hidden off in the constant pool with (or without?)
12096 a REG_EQUAL note. */
12097 /* ??? Ignore the without-reg_equal-note problem for now. */
12098 if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3))
12099 || ((tem = find_reg_note (i3, REG_EQUAL, NULL_RTX))
12100 && GET_CODE (XEXP (tem, 0)) == LABEL_REF
12101 && XEXP (XEXP (tem, 0), 0) == XEXP (note, 0)))
12102 place = i3;
12103
12104 if (i2
12105 && (reg_mentioned_p (XEXP (note, 0), PATTERN (i2))
663522cb 12106 || ((tem = find_reg_note (i2, REG_EQUAL, NULL_RTX))
e55b4486
RH
12107 && GET_CODE (XEXP (tem, 0)) == LABEL_REF
12108 && XEXP (XEXP (tem, 0), 0) == XEXP (note, 0))))
12109 {
12110 if (place)
12111 place2 = i2;
12112 else
12113 place = i2;
12114 }
12115 break;
12116
c1194d74 12117 case REG_NONNEG:
230d793d 12118 case REG_WAS_0:
c1194d74
JW
12119 /* These notes say something about the value of a register prior
12120 to the execution of an insn. It is too much trouble to see
12121 if the note is still correct in all situations. It is better
12122 to simply delete it. */
230d793d
RS
12123 break;
12124
12125 case REG_RETVAL:
12126 /* If the insn previously containing this note still exists,
12127 put it back where it was. Otherwise move it to the previous
12128 insn. Adjust the corresponding REG_LIBCALL note. */
12129 if (GET_CODE (from_insn) != NOTE)
12130 place = from_insn;
12131 else
12132 {
5f4f0e22 12133 tem = find_reg_note (XEXP (note, 0), REG_LIBCALL, NULL_RTX);
230d793d
RS
12134 place = prev_real_insn (from_insn);
12135 if (tem && place)
12136 XEXP (tem, 0) = place;
c71e1201
AO
12137 /* If we're deleting the last remaining instruction of a
12138 libcall sequence, don't add the notes. */
12139 else if (XEXP (note, 0) == from_insn)
12140 tem = place = 0;
230d793d
RS
12141 }
12142 break;
12143
12144 case REG_LIBCALL:
12145 /* This is handled similarly to REG_RETVAL. */
12146 if (GET_CODE (from_insn) != NOTE)
12147 place = from_insn;
12148 else
12149 {
5f4f0e22 12150 tem = find_reg_note (XEXP (note, 0), REG_RETVAL, NULL_RTX);
230d793d
RS
12151 place = next_real_insn (from_insn);
12152 if (tem && place)
12153 XEXP (tem, 0) = place;
c71e1201
AO
12154 /* If we're deleting the last remaining instruction of a
12155 libcall sequence, don't add the notes. */
12156 else if (XEXP (note, 0) == from_insn)
12157 tem = place = 0;
230d793d
RS
12158 }
12159 break;
12160
12161 case REG_DEAD:
12162 /* If the register is used as an input in I3, it dies there.
12163 Similarly for I2, if it is non-zero and adjacent to I3.
12164
12165 If the register is not used as an input in either I3 or I2
12166 and it is not one of the registers we were supposed to eliminate,
12167 there are two possibilities. We might have a non-adjacent I2
12168 or we might have somehow eliminated an additional register
12169 from a computation. For example, we might have had A & B where
12170 we discover that B will always be zero. In this case we will
12171 eliminate the reference to A.
12172
12173 In both cases, we must search to see if we can find a previous
12174 use of A and put the death note there. */
12175
6e2d1486
RK
12176 if (from_insn
12177 && GET_CODE (from_insn) == CALL_INSN
663522cb 12178 && find_reg_fusage (from_insn, USE, XEXP (note, 0)))
6e2d1486
RK
12179 place = from_insn;
12180 else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3)))
230d793d
RS
12181 place = i3;
12182 else if (i2 != 0 && next_nonnote_insn (i2) == i3
12183 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
12184 place = i2;
12185
03afaf36
R
12186 if (rtx_equal_p (XEXP (note, 0), elim_i2)
12187 || rtx_equal_p (XEXP (note, 0), elim_i1))
230d793d
RS
12188 break;
12189
12190 if (place == 0)
38d8473f 12191 {
d3a923ee
RH
12192 basic_block bb = BASIC_BLOCK (this_basic_block);
12193
12194 for (tem = PREV_INSN (i3); place == 0; tem = PREV_INSN (tem))
38d8473f 12195 {
2c3c49de 12196 if (! INSN_P (tem))
d3a923ee
RH
12197 {
12198 if (tem == bb->head)
12199 break;
12200 continue;
12201 }
12202
38d8473f
RK
12203 /* If the register is being set at TEM, see if that is all
12204 TEM is doing. If so, delete TEM. Otherwise, make this
12205 into a REG_UNUSED note instead. */
12206 if (reg_set_p (XEXP (note, 0), PATTERN (tem)))
12207 {
12208 rtx set = single_set (tem);
e5e809f4 12209 rtx inner_dest = 0;
e51712db 12210#ifdef HAVE_cc0
f5c97640 12211 rtx cc0_setter = NULL_RTX;
e51712db 12212#endif
e5e809f4
JL
12213
12214 if (set != 0)
12215 for (inner_dest = SET_DEST (set);
663522cb
KH
12216 (GET_CODE (inner_dest) == STRICT_LOW_PART
12217 || GET_CODE (inner_dest) == SUBREG
12218 || GET_CODE (inner_dest) == ZERO_EXTRACT);
e5e809f4
JL
12219 inner_dest = XEXP (inner_dest, 0))
12220 ;
38d8473f
RK
12221
12222 /* Verify that it was the set, and not a clobber that
663522cb 12223 modified the register.
f5c97640
RH
12224
12225 CC0 targets must be careful to maintain setter/user
12226 pairs. If we cannot delete the setter due to side
12227 effects, mark the user with an UNUSED note instead
12228 of deleting it. */
38d8473f
RK
12229
12230 if (set != 0 && ! side_effects_p (SET_SRC (set))
f5c97640
RH
12231 && rtx_equal_p (XEXP (note, 0), inner_dest)
12232#ifdef HAVE_cc0
12233 && (! reg_mentioned_p (cc0_rtx, SET_SRC (set))
12234 || ((cc0_setter = prev_cc0_setter (tem)) != NULL
12235 && sets_cc0_p (PATTERN (cc0_setter)) > 0))
12236#endif
12237 )
38d8473f
RK
12238 {
12239 /* Move the notes and links of TEM elsewhere.
663522cb 12240 This might delete other dead insns recursively.
38d8473f
RK
12241 First set the pattern to something that won't use
12242 any register. */
12243
12244 PATTERN (tem) = pc_rtx;
12245
12246 distribute_notes (REG_NOTES (tem), tem, tem,
12247 NULL_RTX, NULL_RTX, NULL_RTX);
12248 distribute_links (LOG_LINKS (tem));
12249
12250 PUT_CODE (tem, NOTE);
12251 NOTE_LINE_NUMBER (tem) = NOTE_INSN_DELETED;
12252 NOTE_SOURCE_FILE (tem) = 0;
f5c97640
RH
12253
12254#ifdef HAVE_cc0
12255 /* Delete the setter too. */
12256 if (cc0_setter)
12257 {
12258 PATTERN (cc0_setter) = pc_rtx;
12259
12260 distribute_notes (REG_NOTES (cc0_setter),
12261 cc0_setter, cc0_setter,
12262 NULL_RTX, NULL_RTX, NULL_RTX);
12263 distribute_links (LOG_LINKS (cc0_setter));
12264
12265 PUT_CODE (cc0_setter, NOTE);
d3a923ee
RH
12266 NOTE_LINE_NUMBER (cc0_setter)
12267 = NOTE_INSN_DELETED;
f5c97640
RH
12268 NOTE_SOURCE_FILE (cc0_setter) = 0;
12269 }
12270#endif
38d8473f 12271 }
e5e809f4
JL
12272 /* If the register is both set and used here, put the
12273 REG_DEAD note here, but place a REG_UNUSED note
12274 here too unless there already is one. */
12275 else if (reg_referenced_p (XEXP (note, 0),
12276 PATTERN (tem)))
12277 {
12278 place = tem;
12279
12280 if (! find_regno_note (tem, REG_UNUSED,
12281 REGNO (XEXP (note, 0))))
12282 REG_NOTES (tem)
c5c76735 12283 = gen_rtx_EXPR_LIST (REG_UNUSED, XEXP (note, 0),
9e6a5703 12284 REG_NOTES (tem));
e5e809f4 12285 }
38d8473f
RK
12286 else
12287 {
12288 PUT_REG_NOTE_KIND (note, REG_UNUSED);
663522cb 12289
38d8473f
RK
12290 /* If there isn't already a REG_UNUSED note, put one
12291 here. */
12292 if (! find_regno_note (tem, REG_UNUSED,
12293 REGNO (XEXP (note, 0))))
12294 place = tem;
12295 break;
d3a923ee
RH
12296 }
12297 }
12298 else if (reg_referenced_p (XEXP (note, 0), PATTERN (tem))
12299 || (GET_CODE (tem) == CALL_INSN
12300 && find_reg_fusage (tem, USE, XEXP (note, 0))))
12301 {
12302 place = tem;
12303
12304 /* If we are doing a 3->2 combination, and we have a
12305 register which formerly died in i3 and was not used
12306 by i2, which now no longer dies in i3 and is used in
12307 i2 but does not die in i2, and place is between i2
12308 and i3, then we may need to move a link from place to
12309 i2. */
12310 if (i2 && INSN_UID (place) <= max_uid_cuid
12311 && INSN_CUID (place) > INSN_CUID (i2)
663522cb
KH
12312 && from_insn
12313 && INSN_CUID (from_insn) > INSN_CUID (i2)
d3a923ee
RH
12314 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
12315 {
12316 rtx links = LOG_LINKS (place);
12317 LOG_LINKS (place) = 0;
12318 distribute_links (links);
12319 }
12320 break;
12321 }
12322
12323 if (tem == bb->head)
230d793d 12324 break;
38d8473f 12325 }
663522cb 12326
d3a923ee
RH
12327 /* We haven't found an insn for the death note and it
12328 is still a REG_DEAD note, but we have hit the beginning
12329 of the block. If the existing life info says the reg
715e7fbc
RH
12330 was dead, there's nothing left to do. Otherwise, we'll
12331 need to do a global life update after combine. */
770ae6cc
RK
12332 if (REG_NOTE_KIND (note) == REG_DEAD && place == 0
12333 && REGNO_REG_SET_P (bb->global_live_at_start,
12334 REGNO (XEXP (note, 0))))
e2cce0cf 12335 {
770ae6cc
RK
12336 SET_BIT (refresh_blocks, this_basic_block);
12337 need_refresh = 1;
e2cce0cf 12338 }
38d8473f 12339 }
230d793d
RS
12340
12341 /* If the register is set or already dead at PLACE, we needn't do
e5e809f4
JL
12342 anything with this note if it is still a REG_DEAD note.
12343 We can here if it is set at all, not if is it totally replace,
12344 which is what `dead_or_set_p' checks, so also check for it being
12345 set partially. */
12346
230d793d
RS
12347 if (place && REG_NOTE_KIND (note) == REG_DEAD)
12348 {
770ae6cc 12349 unsigned int regno = REGNO (XEXP (note, 0));
230d793d
RS
12350
12351 if (dead_or_set_p (place, XEXP (note, 0))
12352 || reg_bitfield_target_p (XEXP (note, 0), PATTERN (place)))
12353 {
12354 /* Unless the register previously died in PLACE, clear
12355 reg_last_death. [I no longer understand why this is
12356 being done.] */
12357 if (reg_last_death[regno] != place)
12358 reg_last_death[regno] = 0;
12359 place = 0;
12360 }
12361 else
12362 reg_last_death[regno] = place;
12363
12364 /* If this is a death note for a hard reg that is occupying
12365 multiple registers, ensure that we are still using all
12366 parts of the object. If we find a piece of the object
03afaf36
R
12367 that is unused, we must arrange for an appropriate REG_DEAD
12368 note to be added for it. However, we can't just emit a USE
12369 and tag the note to it, since the register might actually
12370 be dead; so we recourse, and the recursive call then finds
12371 the previous insn that used this register. */
230d793d
RS
12372
12373 if (place && regno < FIRST_PSEUDO_REGISTER
12374 && HARD_REGNO_NREGS (regno, GET_MODE (XEXP (note, 0))) > 1)
12375 {
770ae6cc 12376 unsigned int endregno
230d793d
RS
12377 = regno + HARD_REGNO_NREGS (regno,
12378 GET_MODE (XEXP (note, 0)));
12379 int all_used = 1;
770ae6cc 12380 unsigned int i;
230d793d
RS
12381
12382 for (i = regno; i < endregno; i++)
03afaf36
R
12383 if ((! refers_to_regno_p (i, i + 1, PATTERN (place), 0)
12384 && ! find_regno_fusage (place, USE, i))
12385 || dead_or_set_regno_p (place, i))
12386 all_used = 0;
a394b17b 12387
230d793d
RS
12388 if (! all_used)
12389 {
12390 /* Put only REG_DEAD notes for pieces that are
03afaf36 12391 not already dead or set. */
230d793d 12392
03afaf36
R
12393 for (i = regno; i < endregno;
12394 i += HARD_REGNO_NREGS (i, reg_raw_mode[i]))
230d793d 12395 {
38a448ca 12396 rtx piece = gen_rtx_REG (reg_raw_mode[i], i);
c762163e 12397 basic_block bb = BASIC_BLOCK (this_basic_block);
230d793d 12398
03afaf36 12399 if (! dead_or_set_p (place, piece)
230d793d
RS
12400 && ! reg_bitfield_target_p (piece,
12401 PATTERN (place)))
03afaf36
R
12402 {
12403 rtx new_note
12404 = gen_rtx_EXPR_LIST (REG_DEAD, piece, NULL_RTX);
12405
12406 distribute_notes (new_note, place, place,
12407 NULL_RTX, NULL_RTX, NULL_RTX);
12408 }
c762163e
R
12409 else if (! refers_to_regno_p (i, i + 1,
12410 PATTERN (place), 0)
12411 && ! find_regno_fusage (place, USE, i))
12412 for (tem = PREV_INSN (place); ;
12413 tem = PREV_INSN (tem))
12414 {
12415 if (! INSN_P (tem))
12416 {
12417 if (tem == bb->head)
12418 {
12419 SET_BIT (refresh_blocks,
12420 this_basic_block);
12421 need_refresh = 1;
12422 break;
12423 }
12424 continue;
12425 }
12426 if (dead_or_set_p (tem, piece)
12427 || reg_bitfield_target_p (piece,
12428 PATTERN (tem)))
12429 {
12430 REG_NOTES (tem)
71fd5a51 12431 = gen_rtx_EXPR_LIST (REG_UNUSED, piece,
c762163e
R
12432 REG_NOTES (tem));
12433 break;
12434 }
12435 }
12436
230d793d
RS
12437 }
12438
12439 place = 0;
12440 }
12441 }
12442 }
12443 break;
12444
12445 default:
12446 /* Any other notes should not be present at this point in the
12447 compilation. */
12448 abort ();
12449 }
12450
12451 if (place)
12452 {
12453 XEXP (note, 1) = REG_NOTES (place);
12454 REG_NOTES (place) = note;
12455 }
1a26b032
RK
12456 else if ((REG_NOTE_KIND (note) == REG_DEAD
12457 || REG_NOTE_KIND (note) == REG_UNUSED)
12458 && GET_CODE (XEXP (note, 0)) == REG)
b1f21e0a 12459 REG_N_DEATHS (REGNO (XEXP (note, 0)))--;
230d793d
RS
12460
12461 if (place2)
1a26b032
RK
12462 {
12463 if ((REG_NOTE_KIND (note) == REG_DEAD
12464 || REG_NOTE_KIND (note) == REG_UNUSED)
12465 && GET_CODE (XEXP (note, 0)) == REG)
b1f21e0a 12466 REG_N_DEATHS (REGNO (XEXP (note, 0)))++;
1a26b032 12467
38a448ca
RH
12468 REG_NOTES (place2) = gen_rtx_fmt_ee (GET_CODE (note),
12469 REG_NOTE_KIND (note),
12470 XEXP (note, 0),
12471 REG_NOTES (place2));
1a26b032 12472 }
230d793d
RS
12473 }
12474}
12475\f
12476/* Similarly to above, distribute the LOG_LINKS that used to be present on
5089e22e
RS
12477 I3, I2, and I1 to new locations. This is also called in one case to
12478 add a link pointing at I3 when I3's destination is changed. */
230d793d
RS
12479
12480static void
12481distribute_links (links)
12482 rtx links;
12483{
12484 rtx link, next_link;
12485
12486 for (link = links; link; link = next_link)
12487 {
12488 rtx place = 0;
12489 rtx insn;
12490 rtx set, reg;
12491
12492 next_link = XEXP (link, 1);
12493
12494 /* If the insn that this link points to is a NOTE or isn't a single
12495 set, ignore it. In the latter case, it isn't clear what we
663522cb 12496 can do other than ignore the link, since we can't tell which
230d793d
RS
12497 register it was for. Such links wouldn't be used by combine
12498 anyway.
12499
12500 It is not possible for the destination of the target of the link to
12501 have been changed by combine. The only potential of this is if we
12502 replace I3, I2, and I1 by I3 and I2. But in that case the
12503 destination of I2 also remains unchanged. */
12504
12505 if (GET_CODE (XEXP (link, 0)) == NOTE
12506 || (set = single_set (XEXP (link, 0))) == 0)
12507 continue;
12508
12509 reg = SET_DEST (set);
12510 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
12511 || GET_CODE (reg) == SIGN_EXTRACT
12512 || GET_CODE (reg) == STRICT_LOW_PART)
12513 reg = XEXP (reg, 0);
12514
12515 /* A LOG_LINK is defined as being placed on the first insn that uses
12516 a register and points to the insn that sets the register. Start
12517 searching at the next insn after the target of the link and stop
12518 when we reach a set of the register or the end of the basic block.
12519
12520 Note that this correctly handles the link that used to point from
5089e22e 12521 I3 to I2. Also note that not much searching is typically done here
230d793d
RS
12522 since most links don't point very far away. */
12523
12524 for (insn = NEXT_INSN (XEXP (link, 0));
0d4d42c3 12525 (insn && (this_basic_block == n_basic_blocks - 1
3b413743 12526 || BLOCK_HEAD (this_basic_block + 1) != insn));
230d793d 12527 insn = NEXT_INSN (insn))
2c3c49de 12528 if (INSN_P (insn) && reg_overlap_mentioned_p (reg, PATTERN (insn)))
230d793d
RS
12529 {
12530 if (reg_referenced_p (reg, PATTERN (insn)))
12531 place = insn;
12532 break;
12533 }
6e2d1486 12534 else if (GET_CODE (insn) == CALL_INSN
663522cb 12535 && find_reg_fusage (insn, USE, reg))
6e2d1486
RK
12536 {
12537 place = insn;
12538 break;
12539 }
230d793d
RS
12540
12541 /* If we found a place to put the link, place it there unless there
12542 is already a link to the same insn as LINK at that point. */
12543
12544 if (place)
12545 {
12546 rtx link2;
12547
12548 for (link2 = LOG_LINKS (place); link2; link2 = XEXP (link2, 1))
12549 if (XEXP (link2, 0) == XEXP (link, 0))
12550 break;
12551
12552 if (link2 == 0)
12553 {
12554 XEXP (link, 1) = LOG_LINKS (place);
12555 LOG_LINKS (place) = link;
abe6e52f
RK
12556
12557 /* Set added_links_insn to the earliest insn we added a
12558 link to. */
663522cb 12559 if (added_links_insn == 0
abe6e52f
RK
12560 || INSN_CUID (added_links_insn) > INSN_CUID (place))
12561 added_links_insn = place;
230d793d
RS
12562 }
12563 }
12564 }
12565}
12566\f
1427d6d2
RK
12567/* Compute INSN_CUID for INSN, which is an insn made by combine. */
12568
12569static int
12570insn_cuid (insn)
12571 rtx insn;
12572{
12573 while (insn != 0 && INSN_UID (insn) > max_uid_cuid
12574 && GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == USE)
12575 insn = NEXT_INSN (insn);
12576
12577 if (INSN_UID (insn) > max_uid_cuid)
12578 abort ();
12579
12580 return INSN_CUID (insn);
12581}
12582\f
230d793d
RS
12583void
12584dump_combine_stats (file)
12585 FILE *file;
12586{
ab87f8c8 12587 fnotice
230d793d
RS
12588 (file,
12589 ";; Combiner statistics: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n\n",
12590 combine_attempts, combine_merges, combine_extras, combine_successes);
12591}
12592
12593void
12594dump_combine_total_stats (file)
12595 FILE *file;
12596{
ab87f8c8 12597 fnotice
230d793d
RS
12598 (file,
12599 "\n;; Combiner totals: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n",
12600 total_attempts, total_merges, total_extras, total_successes);
12601}
This page took 3.299368 seconds and 5 git commands to generate.