]> gcc.gnu.org Git - gcc.git/blame - gcc/combine.c
safe-ctype.h (_sch_test): Cast enum bit to unsigned short int for pcc compatibility.
[gcc.git] / gcc / combine.c
CommitLineData
230d793d 1/* Optimize by combining instructions for GNU compiler.
3c71940f 2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
8f8d8d6e 3 1999, 2000, 2001 Free Software Foundation, Inc.
230d793d
RS
4
5This file is part of GNU CC.
6
7GNU CC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 2, or (at your option)
10any later version.
11
12GNU CC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GNU CC; see the file COPYING. If not, write to
940d9d63
RK
19the Free Software Foundation, 59 Temple Place - Suite 330,
20Boston, MA 02111-1307, USA. */
230d793d 21
230d793d
RS
22/* This module is essentially the "combiner" phase of the U. of Arizona
23 Portable Optimizer, but redone to work on our list-structured
24 representation for RTL instead of their string representation.
25
26 The LOG_LINKS of each insn identify the most recent assignment
27 to each REG used in the insn. It is a list of previous insns,
28 each of which contains a SET for a REG that is used in this insn
29 and not used or set in between. LOG_LINKs never cross basic blocks.
30 They were set up by the preceding pass (lifetime analysis).
31
32 We try to combine each pair of insns joined by a logical link.
33 We also try to combine triples of insns A, B and C when
34 C has a link back to B and B has a link back to A.
35
36 LOG_LINKS does not have links for use of the CC0. They don't
37 need to, because the insn that sets the CC0 is always immediately
38 before the insn that tests it. So we always regard a branch
39 insn as having a logical link to the preceding insn. The same is true
40 for an insn explicitly using CC0.
41
42 We check (with use_crosses_set_p) to avoid combining in such a way
43 as to move a computation to a place where its value would be different.
44
45 Combination is done by mathematically substituting the previous
46 insn(s) values for the regs they set into the expressions in
47 the later insns that refer to these regs. If the result is a valid insn
48 for our target machine, according to the machine description,
49 we install it, delete the earlier insns, and update the data flow
50 information (LOG_LINKS and REG_NOTES) for what we did.
51
52 There are a few exceptions where the dataflow information created by
53 flow.c aren't completely updated:
54
55 - reg_live_length is not updated
56 - reg_n_refs is not adjusted in the rare case when a register is
57 no longer required in a computation
58 - there are extremely rare cases (see distribute_regnotes) when a
59 REG_DEAD note is lost
60 - a LOG_LINKS entry that refers to an insn with multiple SETs may be
663522cb 61 removed because there is no way to know which register it was
230d793d
RS
62 linking
63
64 To simplify substitution, we combine only when the earlier insn(s)
65 consist of only a single assignment. To simplify updating afterward,
66 we never combine when a subroutine call appears in the middle.
67
68 Since we do not represent assignments to CC0 explicitly except when that
69 is all an insn does, there is no LOG_LINKS entry in an insn that uses
70 the condition code for the insn that set the condition code.
71 Fortunately, these two insns must be consecutive.
72 Therefore, every JUMP_INSN is taken to have an implicit logical link
73 to the preceding insn. This is not quite right, since non-jumps can
74 also use the condition code; but in practice such insns would not
75 combine anyway. */
76
230d793d 77#include "config.h"
670ee920 78#include "system.h"
c5c76735 79#include "rtl.h"
a091679a 80#include "tm_p.h"
230d793d
RS
81#include "flags.h"
82#include "regs.h"
55310dad 83#include "hard-reg-set.h"
230d793d
RS
84#include "basic-block.h"
85#include "insn-config.h"
49ad7cfa 86#include "function.h"
d6f4ec51
KG
87/* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
88#include "expr.h"
230d793d
RS
89#include "insn-flags.h"
90#include "insn-codes.h"
91#include "insn-attr.h"
92#include "recog.h"
93#include "real.h"
2e107e9e 94#include "toplev.h"
f73ad30e
JH
95
96#ifndef ACCUMULATE_OUTGOING_ARGS
97#define ACCUMULATE_OUTGOING_ARGS 0
98#endif
99
100/* Supply a default definition for PUSH_ARGS. */
101#ifndef PUSH_ARGS
102#ifdef PUSH_ROUNDING
103#define PUSH_ARGS !ACCUMULATE_OUTGOING_ARGS
104#else
105#define PUSH_ARGS 0
106#endif
107#endif
230d793d
RS
108
109/* It is not safe to use ordinary gen_lowpart in combine.
110 Use gen_lowpart_for_combine instead. See comments there. */
111#define gen_lowpart dont_use_gen_lowpart_you_dummy
112
113/* Number of attempts to combine instructions in this function. */
114
115static int combine_attempts;
116
117/* Number of attempts that got as far as substitution in this function. */
118
119static int combine_merges;
120
121/* Number of instructions combined with added SETs in this function. */
122
123static int combine_extras;
124
125/* Number of instructions combined in this function. */
126
127static int combine_successes;
128
129/* Totals over entire compilation. */
130
131static int total_attempts, total_merges, total_extras, total_successes;
9210df58 132
230d793d
RS
133\f
134/* Vector mapping INSN_UIDs to cuids.
5089e22e 135 The cuids are like uids but increase monotonically always.
230d793d
RS
136 Combine always uses cuids so that it can compare them.
137 But actually renumbering the uids, which we used to do,
138 proves to be a bad idea because it makes it hard to compare
139 the dumps produced by earlier passes with those from later passes. */
140
141static int *uid_cuid;
4255220d 142static int max_uid_cuid;
230d793d
RS
143
144/* Get the cuid of an insn. */
145
1427d6d2
RK
146#define INSN_CUID(INSN) \
147(INSN_UID (INSN) > max_uid_cuid ? insn_cuid (INSN) : uid_cuid[INSN_UID (INSN)])
230d793d 148
42a6ff51
AO
149/* In case BITS_PER_WORD == HOST_BITS_PER_WIDE_INT, shifting by
150 BITS_PER_WORD would invoke undefined behavior. Work around it. */
151
152#define UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD(val) \
153 (((unsigned HOST_WIDE_INT)(val) << (BITS_PER_WORD - 1)) << 1)
154
230d793d
RS
155/* Maximum register number, which is the size of the tables below. */
156
770ae6cc 157static unsigned int combine_max_regno;
230d793d
RS
158
159/* Record last point of death of (hard or pseudo) register n. */
160
161static rtx *reg_last_death;
162
163/* Record last point of modification of (hard or pseudo) register n. */
164
165static rtx *reg_last_set;
166
167/* Record the cuid of the last insn that invalidated memory
168 (anything that writes memory, and subroutine calls, but not pushes). */
169
170static int mem_last_set;
171
172/* Record the cuid of the last CALL_INSN
173 so we can tell whether a potential combination crosses any calls. */
174
175static int last_call_cuid;
176
177/* When `subst' is called, this is the insn that is being modified
178 (by combining in a previous insn). The PATTERN of this insn
179 is still the old pattern partially modified and it should not be
180 looked at, but this may be used to examine the successors of the insn
181 to judge whether a simplification is valid. */
182
183static rtx subst_insn;
184
0d9641d1
JW
185/* This is an insn that belongs before subst_insn, but is not currently
186 on the insn chain. */
187
188static rtx subst_prev_insn;
189
230d793d
RS
190/* This is the lowest CUID that `subst' is currently dealing with.
191 get_last_value will not return a value if the register was set at or
192 after this CUID. If not for this mechanism, we could get confused if
193 I2 or I1 in try_combine were an insn that used the old value of a register
194 to obtain a new value. In that case, we might erroneously get the
195 new value of the register when we wanted the old one. */
196
197static int subst_low_cuid;
198
6e25d159
RK
199/* This contains any hard registers that are used in newpat; reg_dead_at_p
200 must consider all these registers to be always live. */
201
202static HARD_REG_SET newpat_used_regs;
203
abe6e52f
RK
204/* This is an insn to which a LOG_LINKS entry has been added. If this
205 insn is the earlier than I2 or I3, combine should rescan starting at
206 that location. */
207
208static rtx added_links_insn;
209
0d4d42c3
RK
210/* Basic block number of the block in which we are performing combines. */
211static int this_basic_block;
715e7fbc 212
663522cb
KH
213/* A bitmap indicating which blocks had registers go dead at entry.
214 After combine, we'll need to re-do global life analysis with
715e7fbc
RH
215 those blocks as starting points. */
216static sbitmap refresh_blocks;
217static int need_refresh;
230d793d
RS
218\f
219/* The next group of arrays allows the recording of the last value assigned
220 to (hard or pseudo) register n. We use this information to see if a
5089e22e 221 operation being processed is redundant given a prior operation performed
230d793d
RS
222 on the register. For example, an `and' with a constant is redundant if
223 all the zero bits are already known to be turned off.
224
225 We use an approach similar to that used by cse, but change it in the
226 following ways:
227
228 (1) We do not want to reinitialize at each label.
229 (2) It is useful, but not critical, to know the actual value assigned
230 to a register. Often just its form is helpful.
231
232 Therefore, we maintain the following arrays:
233
234 reg_last_set_value the last value assigned
235 reg_last_set_label records the value of label_tick when the
236 register was assigned
237 reg_last_set_table_tick records the value of label_tick when a
238 value using the register is assigned
239 reg_last_set_invalid set to non-zero when it is not valid
240 to use the value of this register in some
241 register's value
242
243 To understand the usage of these tables, it is important to understand
244 the distinction between the value in reg_last_set_value being valid
245 and the register being validly contained in some other expression in the
246 table.
247
248 Entry I in reg_last_set_value is valid if it is non-zero, and either
249 reg_n_sets[i] is 1 or reg_last_set_label[i] == label_tick.
250
251 Register I may validly appear in any expression returned for the value
252 of another register if reg_n_sets[i] is 1. It may also appear in the
253 value for register J if reg_last_set_label[i] < reg_last_set_label[j] or
254 reg_last_set_invalid[j] is zero.
255
256 If an expression is found in the table containing a register which may
257 not validly appear in an expression, the register is replaced by
258 something that won't match, (clobber (const_int 0)).
259
260 reg_last_set_invalid[i] is set non-zero when register I is being assigned
261 to and reg_last_set_table_tick[i] == label_tick. */
262
0f41302f 263/* Record last value assigned to (hard or pseudo) register n. */
230d793d
RS
264
265static rtx *reg_last_set_value;
266
267/* Record the value of label_tick when the value for register n is placed in
268 reg_last_set_value[n]. */
269
568356af 270static int *reg_last_set_label;
230d793d
RS
271
272/* Record the value of label_tick when an expression involving register n
0f41302f 273 is placed in reg_last_set_value. */
230d793d 274
568356af 275static int *reg_last_set_table_tick;
230d793d
RS
276
277/* Set non-zero if references to register n in expressions should not be
278 used. */
279
280static char *reg_last_set_invalid;
281
0f41302f 282/* Incremented for each label. */
230d793d 283
568356af 284static int label_tick;
230d793d
RS
285
286/* Some registers that are set more than once and used in more than one
287 basic block are nevertheless always set in similar ways. For example,
288 a QImode register may be loaded from memory in two places on a machine
289 where byte loads zero extend.
290
951553af 291 We record in the following array what we know about the nonzero
230d793d
RS
292 bits of a register, specifically which bits are known to be zero.
293
294 If an entry is zero, it means that we don't know anything special. */
295
55310dad 296static unsigned HOST_WIDE_INT *reg_nonzero_bits;
230d793d 297
951553af 298/* Mode used to compute significance in reg_nonzero_bits. It is the largest
5f4f0e22 299 integer mode that can fit in HOST_BITS_PER_WIDE_INT. */
230d793d 300
951553af 301static enum machine_mode nonzero_bits_mode;
230d793d 302
d0ab8cd3
RK
303/* Nonzero if we know that a register has some leading bits that are always
304 equal to the sign bit. */
305
770ae6cc 306static unsigned char *reg_sign_bit_copies;
d0ab8cd3 307
951553af 308/* Nonzero when reg_nonzero_bits and reg_sign_bit_copies can be safely used.
1a26b032
RK
309 It is zero while computing them and after combine has completed. This
310 former test prevents propagating values based on previously set values,
311 which can be incorrect if a variable is modified in a loop. */
230d793d 312
951553af 313static int nonzero_sign_valid;
55310dad
RK
314
315/* These arrays are maintained in parallel with reg_last_set_value
316 and are used to store the mode in which the register was last set,
317 the bits that were known to be zero when it was last set, and the
318 number of sign bits copies it was known to have when it was last set. */
319
320static enum machine_mode *reg_last_set_mode;
321static unsigned HOST_WIDE_INT *reg_last_set_nonzero_bits;
322static char *reg_last_set_sign_bit_copies;
230d793d
RS
323\f
324/* Record one modification to rtl structure
325 to be undone by storing old_contents into *where.
326 is_int is 1 if the contents are an int. */
327
328struct undo
329{
241cea85 330 struct undo *next;
230d793d 331 int is_int;
0345195a
RK
332 union {rtx r; unsigned int i;} old_contents;
333 union {rtx *r; unsigned int *i;} where;
230d793d
RS
334};
335
336/* Record a bunch of changes to be undone, up to MAX_UNDO of them.
337 num_undo says how many are currently recorded.
338
230d793d 339 other_insn is nonzero if we have modified some other insn in the process
241cea85 340 of working on subst_insn. It must be verified too.
230d793d 341
241cea85
RK
342 previous_undos is the value of undobuf.undos when we started processing
343 this substitution. This will prevent gen_rtx_combine from re-used a piece
344 from the previous expression. Doing so can produce circular rtl
345 structures. */
230d793d
RS
346
347struct undobuf
348{
241cea85
RK
349 struct undo *undos;
350 struct undo *frees;
351 struct undo *previous_undos;
230d793d
RS
352 rtx other_insn;
353};
354
355static struct undobuf undobuf;
356
230d793d
RS
357/* Number of times the pseudo being substituted for
358 was found and replaced. */
359
360static int n_occurrences;
361
83d2b3b9 362static void do_SUBST PARAMS ((rtx *, rtx));
0345195a
RK
363static void do_SUBST_INT PARAMS ((unsigned int *,
364 unsigned int));
83d2b3b9
KG
365static void init_reg_last_arrays PARAMS ((void));
366static void setup_incoming_promotions PARAMS ((void));
367static void set_nonzero_bits_and_sign_copies PARAMS ((rtx, rtx, void *));
c3410241 368static int cant_combine_insn_p PARAMS ((rtx));
83d2b3b9
KG
369static int can_combine_p PARAMS ((rtx, rtx, rtx, rtx, rtx *, rtx *));
370static int sets_function_arg_p PARAMS ((rtx));
371static int combinable_i3pat PARAMS ((rtx, rtx *, rtx, rtx, int, rtx *));
372static int contains_muldiv PARAMS ((rtx));
44a76fc8 373static rtx try_combine PARAMS ((rtx, rtx, rtx, int *));
83d2b3b9
KG
374static void undo_all PARAMS ((void));
375static void undo_commit PARAMS ((void));
376static rtx *find_split_point PARAMS ((rtx *, rtx));
377static rtx subst PARAMS ((rtx, rtx, rtx, int, int));
378static rtx combine_simplify_rtx PARAMS ((rtx, enum machine_mode, int, int));
379static rtx simplify_if_then_else PARAMS ((rtx));
380static rtx simplify_set PARAMS ((rtx));
381static rtx simplify_logical PARAMS ((rtx, int));
382static rtx expand_compound_operation PARAMS ((rtx));
383static rtx expand_field_assignment PARAMS ((rtx));
770ae6cc
RK
384static rtx make_extraction PARAMS ((enum machine_mode, rtx, HOST_WIDE_INT,
385 rtx, unsigned HOST_WIDE_INT, int,
386 int, int));
83d2b3b9
KG
387static rtx extract_left_shift PARAMS ((rtx, int));
388static rtx make_compound_operation PARAMS ((rtx, enum rtx_code));
770ae6cc
RK
389static int get_pos_from_mask PARAMS ((unsigned HOST_WIDE_INT,
390 unsigned HOST_WIDE_INT *));
83d2b3b9
KG
391static rtx force_to_mode PARAMS ((rtx, enum machine_mode,
392 unsigned HOST_WIDE_INT, rtx, int));
393static rtx if_then_else_cond PARAMS ((rtx, rtx *, rtx *));
394static rtx known_cond PARAMS ((rtx, enum rtx_code, rtx, rtx));
395static int rtx_equal_for_field_assignment_p PARAMS ((rtx, rtx));
396static rtx make_field_assignment PARAMS ((rtx));
397static rtx apply_distributive_law PARAMS ((rtx));
398static rtx simplify_and_const_int PARAMS ((rtx, enum machine_mode, rtx,
399 unsigned HOST_WIDE_INT));
400static unsigned HOST_WIDE_INT nonzero_bits PARAMS ((rtx, enum machine_mode));
770ae6cc 401static unsigned int num_sign_bit_copies PARAMS ((rtx, enum machine_mode));
83d2b3b9
KG
402static int merge_outer_ops PARAMS ((enum rtx_code *, HOST_WIDE_INT *,
403 enum rtx_code, HOST_WIDE_INT,
404 enum machine_mode, int *));
405static rtx simplify_shift_const PARAMS ((rtx, enum rtx_code, enum machine_mode,
406 rtx, int));
407static int recog_for_combine PARAMS ((rtx *, rtx, rtx *));
408static rtx gen_lowpart_for_combine PARAMS ((enum machine_mode, rtx));
409static rtx gen_rtx_combine PARAMS ((enum rtx_code code, enum machine_mode mode,
410 ...));
411static rtx gen_binary PARAMS ((enum rtx_code, enum machine_mode,
412 rtx, rtx));
413static rtx gen_unary PARAMS ((enum rtx_code, enum machine_mode,
414 enum machine_mode, rtx));
415static enum rtx_code simplify_comparison PARAMS ((enum rtx_code, rtx *, rtx *));
83d2b3b9
KG
416static void update_table_tick PARAMS ((rtx));
417static void record_value_for_reg PARAMS ((rtx, rtx, rtx));
418static void check_promoted_subreg PARAMS ((rtx, rtx));
419static void record_dead_and_set_regs_1 PARAMS ((rtx, rtx, void *));
420static void record_dead_and_set_regs PARAMS ((rtx));
421static int get_last_value_validate PARAMS ((rtx *, rtx, int, int));
422static rtx get_last_value PARAMS ((rtx));
423static int use_crosses_set_p PARAMS ((rtx, int));
424static void reg_dead_at_p_1 PARAMS ((rtx, rtx, void *));
425static int reg_dead_at_p PARAMS ((rtx, rtx));
426static void move_deaths PARAMS ((rtx, rtx, int, rtx, rtx *));
427static int reg_bitfield_target_p PARAMS ((rtx, rtx));
428static void distribute_notes PARAMS ((rtx, rtx, rtx, rtx, rtx, rtx));
429static void distribute_links PARAMS ((rtx));
430static void mark_used_regs_combine PARAMS ((rtx));
431static int insn_cuid PARAMS ((rtx));
c6991660 432static void record_promoted_value PARAMS ((rtx, rtx));
9a915772
JH
433static rtx reversed_comparison PARAMS ((rtx, enum machine_mode, rtx, rtx));
434static enum rtx_code combine_reversed_comparison_code PARAMS ((rtx));
230d793d 435\f
76095e2f
RH
436/* Substitute NEWVAL, an rtx expression, into INTO, a place in some
437 insn. The substitution can be undone by undo_all. If INTO is already
438 set to NEWVAL, do not record this change. Because computing NEWVAL might
439 also call SUBST, we have to compute it before we put anything into
440 the undo table. */
441
442static void
663522cb 443do_SUBST (into, newval)
76095e2f
RH
444 rtx *into, newval;
445{
446 struct undo *buf;
447 rtx oldval = *into;
448
449 if (oldval == newval)
450 return;
451
452 if (undobuf.frees)
453 buf = undobuf.frees, undobuf.frees = buf->next;
454 else
455 buf = (struct undo *) xmalloc (sizeof (struct undo));
456
457 buf->is_int = 0;
458 buf->where.r = into;
459 buf->old_contents.r = oldval;
460 *into = newval;
461
462 buf->next = undobuf.undos, undobuf.undos = buf;
463}
464
465#define SUBST(INTO, NEWVAL) do_SUBST(&(INTO), (NEWVAL))
466
467/* Similar to SUBST, but NEWVAL is an int expression. Note that substitution
468 for the value of a HOST_WIDE_INT value (including CONST_INT) is
469 not safe. */
470
471static void
663522cb 472do_SUBST_INT (into, newval)
0345195a 473 unsigned int *into, newval;
76095e2f
RH
474{
475 struct undo *buf;
0345195a 476 unsigned int oldval = *into;
76095e2f
RH
477
478 if (oldval == newval)
479 return;
480
481 if (undobuf.frees)
482 buf = undobuf.frees, undobuf.frees = buf->next;
483 else
484 buf = (struct undo *) xmalloc (sizeof (struct undo));
485
486 buf->is_int = 1;
487 buf->where.i = into;
488 buf->old_contents.i = oldval;
489 *into = newval;
490
491 buf->next = undobuf.undos, undobuf.undos = buf;
492}
493
494#define SUBST_INT(INTO, NEWVAL) do_SUBST_INT(&(INTO), (NEWVAL))
495\f
230d793d 496/* Main entry point for combiner. F is the first insn of the function.
663522cb 497 NREGS is the first unused pseudo-reg number.
230d793d 498
44a76fc8
AG
499 Return non-zero if the combiner has turned an indirect jump
500 instruction into a direct jump. */
501int
230d793d
RS
502combine_instructions (f, nregs)
503 rtx f;
770ae6cc 504 unsigned int nregs;
230d793d 505{
b729186a
JL
506 register rtx insn, next;
507#ifdef HAVE_cc0
508 register rtx prev;
509#endif
230d793d
RS
510 register int i;
511 register rtx links, nextlinks;
512
44a76fc8
AG
513 int new_direct_jump_p = 0;
514
230d793d
RS
515 combine_attempts = 0;
516 combine_merges = 0;
517 combine_extras = 0;
518 combine_successes = 0;
519
520 combine_max_regno = nregs;
521
663522cb 522 reg_nonzero_bits = ((unsigned HOST_WIDE_INT *)
c05ddfa7 523 xcalloc (nregs, sizeof (unsigned HOST_WIDE_INT)));
770ae6cc
RK
524 reg_sign_bit_copies
525 = (unsigned char *) xcalloc (nregs, sizeof (unsigned char));
c05ddfa7
MM
526
527 reg_last_death = (rtx *) xmalloc (nregs * sizeof (rtx));
528 reg_last_set = (rtx *) xmalloc (nregs * sizeof (rtx));
529 reg_last_set_value = (rtx *) xmalloc (nregs * sizeof (rtx));
530 reg_last_set_table_tick = (int *) xmalloc (nregs * sizeof (int));
531 reg_last_set_label = (int *) xmalloc (nregs * sizeof (int));
532 reg_last_set_invalid = (char *) xmalloc (nregs * sizeof (char));
55310dad 533 reg_last_set_mode
c05ddfa7 534 = (enum machine_mode *) xmalloc (nregs * sizeof (enum machine_mode));
55310dad 535 reg_last_set_nonzero_bits
c05ddfa7 536 = (unsigned HOST_WIDE_INT *) xmalloc (nregs * sizeof (HOST_WIDE_INT));
55310dad 537 reg_last_set_sign_bit_copies
c05ddfa7 538 = (char *) xmalloc (nregs * sizeof (char));
55310dad 539
ef026f91 540 init_reg_last_arrays ();
230d793d
RS
541
542 init_recog_no_volatile ();
543
544 /* Compute maximum uid value so uid_cuid can be allocated. */
545
546 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
547 if (INSN_UID (insn) > i)
548 i = INSN_UID (insn);
549
c05ddfa7 550 uid_cuid = (int *) xmalloc ((i + 1) * sizeof (int));
4255220d 551 max_uid_cuid = i;
230d793d 552
951553af 553 nonzero_bits_mode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
230d793d 554
951553af 555 /* Don't use reg_nonzero_bits when computing it. This can cause problems
230d793d
RS
556 when, for example, we have j <<= 1 in a loop. */
557
951553af 558 nonzero_sign_valid = 0;
230d793d
RS
559
560 /* Compute the mapping from uids to cuids.
561 Cuids are numbers assigned to insns, like uids,
663522cb 562 except that cuids increase monotonically through the code.
230d793d
RS
563
564 Scan all SETs and see if we can deduce anything about what
951553af 565 bits are known to be zero for some registers and how many copies
d79f08e0
RK
566 of the sign bit are known to exist for those registers.
567
568 Also set any known values so that we can use it while searching
569 for what bits are known to be set. */
570
571 label_tick = 1;
230d793d 572
bcd49eb7
JW
573 /* We need to initialize it here, because record_dead_and_set_regs may call
574 get_last_value. */
575 subst_prev_insn = NULL_RTX;
576
7988fd36
RK
577 setup_incoming_promotions ();
578
715e7fbc
RH
579 refresh_blocks = sbitmap_alloc (n_basic_blocks);
580 sbitmap_zero (refresh_blocks);
581 need_refresh = 0;
582
230d793d
RS
583 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
584 {
4255220d 585 uid_cuid[INSN_UID (insn)] = ++i;
d79f08e0
RK
586 subst_low_cuid = i;
587 subst_insn = insn;
588
2c3c49de 589 if (INSN_P (insn))
d79f08e0 590 {
663522cb 591 note_stores (PATTERN (insn), set_nonzero_bits_and_sign_copies,
84832317 592 NULL);
d79f08e0 593 record_dead_and_set_regs (insn);
2dab894a
RK
594
595#ifdef AUTO_INC_DEC
596 for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
597 if (REG_NOTE_KIND (links) == REG_INC)
84832317
MM
598 set_nonzero_bits_and_sign_copies (XEXP (links, 0), NULL_RTX,
599 NULL);
2dab894a 600#endif
d79f08e0
RK
601 }
602
603 if (GET_CODE (insn) == CODE_LABEL)
604 label_tick++;
230d793d
RS
605 }
606
951553af 607 nonzero_sign_valid = 1;
230d793d
RS
608
609 /* Now scan all the insns in forward order. */
610
0d4d42c3 611 this_basic_block = -1;
230d793d
RS
612 label_tick = 1;
613 last_call_cuid = 0;
614 mem_last_set = 0;
ef026f91 615 init_reg_last_arrays ();
7988fd36
RK
616 setup_incoming_promotions ();
617
230d793d
RS
618 for (insn = f; insn; insn = next ? next : NEXT_INSN (insn))
619 {
620 next = 0;
621
0d4d42c3 622 /* If INSN starts a new basic block, update our basic block number. */
f085c9cd 623 if (this_basic_block + 1 < n_basic_blocks
3b413743 624 && BLOCK_HEAD (this_basic_block + 1) == insn)
0d4d42c3
RK
625 this_basic_block++;
626
230d793d
RS
627 if (GET_CODE (insn) == CODE_LABEL)
628 label_tick++;
629
2c3c49de 630 else if (INSN_P (insn))
230d793d 631 {
732f2ac9
JJ
632 /* See if we know about function return values before this
633 insn based upon SUBREG flags. */
634 check_promoted_subreg (insn, PATTERN (insn));
732f2ac9 635
230d793d
RS
636 /* Try this insn with each insn it links back to. */
637
638 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
663522cb 639 if ((next = try_combine (insn, XEXP (links, 0),
44a76fc8 640 NULL_RTX, &new_direct_jump_p)) != 0)
230d793d
RS
641 goto retry;
642
643 /* Try each sequence of three linked insns ending with this one. */
644
645 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
aabb6c74
NC
646 {
647 rtx link = XEXP (links, 0);
648
649 /* If the linked insn has been replaced by a note, then there
650 is no point in persuing this chain any further. */
651 if (GET_CODE (link) == NOTE)
652 break;
653
654 for (nextlinks = LOG_LINKS (link);
655 nextlinks;
656 nextlinks = XEXP (nextlinks, 1))
657 if ((next = try_combine (insn, XEXP (links, 0),
865f50c5
RH
658 XEXP (nextlinks, 0),
659 &new_direct_jump_p)) != 0)
aabb6c74
NC
660 goto retry;
661 }
230d793d
RS
662
663#ifdef HAVE_cc0
664 /* Try to combine a jump insn that uses CC0
665 with a preceding insn that sets CC0, and maybe with its
666 logical predecessor as well.
667 This is how we make decrement-and-branch insns.
668 We need this special code because data flow connections
669 via CC0 do not get entered in LOG_LINKS. */
670
671 if (GET_CODE (insn) == JUMP_INSN
672 && (prev = prev_nonnote_insn (insn)) != 0
673 && GET_CODE (prev) == INSN
674 && sets_cc0_p (PATTERN (prev)))
675 {
663522cb 676 if ((next = try_combine (insn, prev,
44a76fc8 677 NULL_RTX, &new_direct_jump_p)) != 0)
230d793d
RS
678 goto retry;
679
680 for (nextlinks = LOG_LINKS (prev); nextlinks;
681 nextlinks = XEXP (nextlinks, 1))
682 if ((next = try_combine (insn, prev,
44a76fc8
AG
683 XEXP (nextlinks, 0),
684 &new_direct_jump_p)) != 0)
230d793d
RS
685 goto retry;
686 }
687
688 /* Do the same for an insn that explicitly references CC0. */
689 if (GET_CODE (insn) == INSN
690 && (prev = prev_nonnote_insn (insn)) != 0
691 && GET_CODE (prev) == INSN
692 && sets_cc0_p (PATTERN (prev))
693 && GET_CODE (PATTERN (insn)) == SET
694 && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (insn))))
695 {
663522cb 696 if ((next = try_combine (insn, prev,
44a76fc8 697 NULL_RTX, &new_direct_jump_p)) != 0)
230d793d
RS
698 goto retry;
699
700 for (nextlinks = LOG_LINKS (prev); nextlinks;
701 nextlinks = XEXP (nextlinks, 1))
702 if ((next = try_combine (insn, prev,
44a76fc8
AG
703 XEXP (nextlinks, 0),
704 &new_direct_jump_p)) != 0)
230d793d
RS
705 goto retry;
706 }
707
708 /* Finally, see if any of the insns that this insn links to
709 explicitly references CC0. If so, try this insn, that insn,
5089e22e 710 and its predecessor if it sets CC0. */
230d793d
RS
711 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
712 if (GET_CODE (XEXP (links, 0)) == INSN
713 && GET_CODE (PATTERN (XEXP (links, 0))) == SET
714 && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (XEXP (links, 0))))
715 && (prev = prev_nonnote_insn (XEXP (links, 0))) != 0
716 && GET_CODE (prev) == INSN
717 && sets_cc0_p (PATTERN (prev))
663522cb 718 && (next = try_combine (insn, XEXP (links, 0),
44a76fc8 719 prev, &new_direct_jump_p)) != 0)
230d793d
RS
720 goto retry;
721#endif
722
723 /* Try combining an insn with two different insns whose results it
724 uses. */
725 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
726 for (nextlinks = XEXP (links, 1); nextlinks;
727 nextlinks = XEXP (nextlinks, 1))
728 if ((next = try_combine (insn, XEXP (links, 0),
44a76fc8
AG
729 XEXP (nextlinks, 0),
730 &new_direct_jump_p)) != 0)
230d793d
RS
731 goto retry;
732
733 if (GET_CODE (insn) != NOTE)
734 record_dead_and_set_regs (insn);
735
736 retry:
737 ;
738 }
739 }
740
715e7fbc 741 if (need_refresh)
49c3bb12
RH
742 {
743 compute_bb_for_insn (get_max_uid ());
744 update_life_info (refresh_blocks, UPDATE_LIFE_GLOBAL_RM_NOTES,
663522cb 745 PROP_DEATH_NOTES);
49c3bb12 746 }
c05ddfa7
MM
747
748 /* Clean up. */
715e7fbc 749 sbitmap_free (refresh_blocks);
c05ddfa7
MM
750 free (reg_nonzero_bits);
751 free (reg_sign_bit_copies);
752 free (reg_last_death);
753 free (reg_last_set);
754 free (reg_last_set_value);
755 free (reg_last_set_table_tick);
756 free (reg_last_set_label);
757 free (reg_last_set_invalid);
758 free (reg_last_set_mode);
759 free (reg_last_set_nonzero_bits);
760 free (reg_last_set_sign_bit_copies);
761 free (uid_cuid);
715e7fbc 762
e7749837
RH
763 {
764 struct undo *undo, *next;
765 for (undo = undobuf.frees; undo; undo = next)
766 {
767 next = undo->next;
768 free (undo);
769 }
770 undobuf.frees = 0;
771 }
772
230d793d
RS
773 total_attempts += combine_attempts;
774 total_merges += combine_merges;
775 total_extras += combine_extras;
776 total_successes += combine_successes;
1a26b032 777
951553af 778 nonzero_sign_valid = 0;
972b320c
R
779
780 /* Make recognizer allow volatile MEMs again. */
781 init_recog ();
44a76fc8
AG
782
783 return new_direct_jump_p;
230d793d 784}
ef026f91
RS
785
786/* Wipe the reg_last_xxx arrays in preparation for another pass. */
787
788static void
789init_reg_last_arrays ()
790{
770ae6cc 791 unsigned int nregs = combine_max_regno;
ef026f91 792
961192e1
JM
793 memset ((char *) reg_last_death, 0, nregs * sizeof (rtx));
794 memset ((char *) reg_last_set, 0, nregs * sizeof (rtx));
795 memset ((char *) reg_last_set_value, 0, nregs * sizeof (rtx));
796 memset ((char *) reg_last_set_table_tick, 0, nregs * sizeof (int));
797 memset ((char *) reg_last_set_label, 0, nregs * sizeof (int));
798 memset (reg_last_set_invalid, 0, nregs * sizeof (char));
799 memset ((char *) reg_last_set_mode, 0, nregs * sizeof (enum machine_mode));
800 memset ((char *) reg_last_set_nonzero_bits, 0, nregs * sizeof (HOST_WIDE_INT));
801 memset (reg_last_set_sign_bit_copies, 0, nregs * sizeof (char));
ef026f91 802}
230d793d 803\f
7988fd36
RK
804/* Set up any promoted values for incoming argument registers. */
805
ee791cc3 806static void
7988fd36
RK
807setup_incoming_promotions ()
808{
809#ifdef PROMOTE_FUNCTION_ARGS
770ae6cc 810 unsigned int regno;
7988fd36
RK
811 rtx reg;
812 enum machine_mode mode;
813 int unsignedp;
814 rtx first = get_insns ();
815
c285f57a
JJ
816#ifndef OUTGOING_REGNO
817#define OUTGOING_REGNO(N) N
818#endif
7988fd36 819 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
c285f57a
JJ
820 /* Check whether this register can hold an incoming pointer
821 argument. FUNCTION_ARG_REGNO_P tests outgoing register
822 numbers, so translate if necessary due to register windows. */
823 if (FUNCTION_ARG_REGNO_P (OUTGOING_REGNO (regno))
7988fd36 824 && (reg = promoted_input_arg (regno, &mode, &unsignedp)) != 0)
38a448ca
RH
825 {
826 record_value_for_reg
827 (reg, first, gen_rtx_fmt_e ((unsignedp ? ZERO_EXTEND
828 : SIGN_EXTEND),
829 GET_MODE (reg),
830 gen_rtx_CLOBBER (mode, const0_rtx)));
831 }
7988fd36
RK
832#endif
833}
834\f
91102d5a
RK
835/* Called via note_stores. If X is a pseudo that is narrower than
836 HOST_BITS_PER_WIDE_INT and is being set, record what bits are known zero.
230d793d
RS
837
838 If we are setting only a portion of X and we can't figure out what
839 portion, assume all bits will be used since we don't know what will
d0ab8cd3
RK
840 be happening.
841
842 Similarly, set how many bits of X are known to be copies of the sign bit
663522cb 843 at all locations in the function. This is the smallest number implied
d0ab8cd3 844 by any set of X. */
230d793d
RS
845
846static void
84832317 847set_nonzero_bits_and_sign_copies (x, set, data)
230d793d
RS
848 rtx x;
849 rtx set;
84832317 850 void *data ATTRIBUTE_UNUSED;
230d793d 851{
770ae6cc 852 unsigned int num;
d0ab8cd3 853
230d793d
RS
854 if (GET_CODE (x) == REG
855 && REGNO (x) >= FIRST_PSEUDO_REGISTER
e8095e80
RK
856 /* If this register is undefined at the start of the file, we can't
857 say what its contents were. */
e881bb1b 858 && ! REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, REGNO (x))
5f4f0e22 859 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
230d793d 860 {
2dab894a 861 if (set == 0 || GET_CODE (set) == CLOBBER)
e8095e80
RK
862 {
863 reg_nonzero_bits[REGNO (x)] = GET_MODE_MASK (GET_MODE (x));
88306d12 864 reg_sign_bit_copies[REGNO (x)] = 1;
e8095e80
RK
865 return;
866 }
230d793d
RS
867
868 /* If this is a complex assignment, see if we can convert it into a
5089e22e 869 simple assignment. */
230d793d 870 set = expand_field_assignment (set);
d79f08e0
RK
871
872 /* If this is a simple assignment, or we have a paradoxical SUBREG,
873 set what we know about X. */
874
875 if (SET_DEST (set) == x
876 || (GET_CODE (SET_DEST (set)) == SUBREG
705c7b3b
JW
877 && (GET_MODE_SIZE (GET_MODE (SET_DEST (set)))
878 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (set)))))
d79f08e0 879 && SUBREG_REG (SET_DEST (set)) == x))
d0ab8cd3 880 {
9afa3d54
RK
881 rtx src = SET_SRC (set);
882
883#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
884 /* If X is narrower than a word and SRC is a non-negative
885 constant that would appear negative in the mode of X,
886 sign-extend it for use in reg_nonzero_bits because some
887 machines (maybe most) will actually do the sign-extension
663522cb 888 and this is the conservative approach.
9afa3d54
RK
889
890 ??? For 2.5, try to tighten up the MD files in this regard
891 instead of this kludge. */
892
893 if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
894 && GET_CODE (src) == CONST_INT
895 && INTVAL (src) > 0
896 && 0 != (INTVAL (src)
897 & ((HOST_WIDE_INT) 1
9e69be8c 898 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
9afa3d54
RK
899 src = GEN_INT (INTVAL (src)
900 | ((HOST_WIDE_INT) (-1)
901 << GET_MODE_BITSIZE (GET_MODE (x))));
902#endif
903
951553af 904 reg_nonzero_bits[REGNO (x)]
9afa3d54 905 |= nonzero_bits (src, nonzero_bits_mode);
d0ab8cd3
RK
906 num = num_sign_bit_copies (SET_SRC (set), GET_MODE (x));
907 if (reg_sign_bit_copies[REGNO (x)] == 0
908 || reg_sign_bit_copies[REGNO (x)] > num)
909 reg_sign_bit_copies[REGNO (x)] = num;
910 }
230d793d 911 else
d0ab8cd3 912 {
951553af 913 reg_nonzero_bits[REGNO (x)] = GET_MODE_MASK (GET_MODE (x));
88306d12 914 reg_sign_bit_copies[REGNO (x)] = 1;
d0ab8cd3 915 }
230d793d
RS
916 }
917}
918\f
919/* See if INSN can be combined into I3. PRED and SUCC are optionally
920 insns that were previously combined into I3 or that will be combined
921 into the merger of INSN and I3.
922
923 Return 0 if the combination is not allowed for any reason.
924
663522cb 925 If the combination is allowed, *PDEST will be set to the single
230d793d
RS
926 destination of INSN and *PSRC to the single source, and this function
927 will return 1. */
928
929static int
930can_combine_p (insn, i3, pred, succ, pdest, psrc)
931 rtx insn;
932 rtx i3;
e51712db
KG
933 rtx pred ATTRIBUTE_UNUSED;
934 rtx succ;
230d793d
RS
935 rtx *pdest, *psrc;
936{
937 int i;
938 rtx set = 0, src, dest;
b729186a
JL
939 rtx p;
940#ifdef AUTO_INC_DEC
76d31c63 941 rtx link;
b729186a 942#endif
230d793d
RS
943 int all_adjacent = (succ ? (next_active_insn (insn) == succ
944 && next_active_insn (succ) == i3)
945 : next_active_insn (insn) == i3);
946
947 /* Can combine only if previous insn is a SET of a REG, a SUBREG or CC0.
663522cb 948 or a PARALLEL consisting of such a SET and CLOBBERs.
230d793d
RS
949
950 If INSN has CLOBBER parallel parts, ignore them for our processing.
951 By definition, these happen during the execution of the insn. When it
952 is merged with another insn, all bets are off. If they are, in fact,
953 needed and aren't also supplied in I3, they may be added by
663522cb 954 recog_for_combine. Otherwise, it won't match.
230d793d
RS
955
956 We can also ignore a SET whose SET_DEST is mentioned in a REG_UNUSED
957 note.
958
663522cb 959 Get the source and destination of INSN. If more than one, can't
230d793d 960 combine. */
663522cb 961
230d793d
RS
962 if (GET_CODE (PATTERN (insn)) == SET)
963 set = PATTERN (insn);
964 else if (GET_CODE (PATTERN (insn)) == PARALLEL
965 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
966 {
967 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
968 {
969 rtx elt = XVECEXP (PATTERN (insn), 0, i);
970
971 switch (GET_CODE (elt))
972 {
e3258cef
R
973 /* This is important to combine floating point insns
974 for the SH4 port. */
975 case USE:
976 /* Combining an isolated USE doesn't make sense.
977 We depend here on combinable_i3_pat to reject them. */
978 /* The code below this loop only verifies that the inputs of
979 the SET in INSN do not change. We call reg_set_between_p
980 to verify that the REG in the USE does not change betweeen
981 I3 and INSN.
982 If the USE in INSN was for a pseudo register, the matching
983 insn pattern will likely match any register; combining this
984 with any other USE would only be safe if we knew that the
985 used registers have identical values, or if there was
986 something to tell them apart, e.g. different modes. For
987 now, we forgo such compilcated tests and simply disallow
988 combining of USES of pseudo registers with any other USE. */
989 if (GET_CODE (XEXP (elt, 0)) == REG
990 && GET_CODE (PATTERN (i3)) == PARALLEL)
991 {
992 rtx i3pat = PATTERN (i3);
993 int i = XVECLEN (i3pat, 0) - 1;
770ae6cc
RK
994 unsigned int regno = REGNO (XEXP (elt, 0));
995
e3258cef
R
996 do
997 {
998 rtx i3elt = XVECEXP (i3pat, 0, i);
770ae6cc 999
e3258cef
R
1000 if (GET_CODE (i3elt) == USE
1001 && GET_CODE (XEXP (i3elt, 0)) == REG
1002 && (REGNO (XEXP (i3elt, 0)) == regno
1003 ? reg_set_between_p (XEXP (elt, 0),
1004 PREV_INSN (insn), i3)
1005 : regno >= FIRST_PSEUDO_REGISTER))
1006 return 0;
1007 }
1008 while (--i >= 0);
1009 }
1010 break;
1011
230d793d
RS
1012 /* We can ignore CLOBBERs. */
1013 case CLOBBER:
1014 break;
1015
1016 case SET:
1017 /* Ignore SETs whose result isn't used but not those that
1018 have side-effects. */
1019 if (find_reg_note (insn, REG_UNUSED, SET_DEST (elt))
1020 && ! side_effects_p (elt))
1021 break;
1022
1023 /* If we have already found a SET, this is a second one and
1024 so we cannot combine with this insn. */
1025 if (set)
1026 return 0;
1027
1028 set = elt;
1029 break;
1030
1031 default:
1032 /* Anything else means we can't combine. */
1033 return 0;
1034 }
1035 }
1036
1037 if (set == 0
1038 /* If SET_SRC is an ASM_OPERANDS we can't throw away these CLOBBERs,
1039 so don't do anything with it. */
1040 || GET_CODE (SET_SRC (set)) == ASM_OPERANDS)
1041 return 0;
1042 }
1043 else
1044 return 0;
1045
1046 if (set == 0)
1047 return 0;
1048
1049 set = expand_field_assignment (set);
1050 src = SET_SRC (set), dest = SET_DEST (set);
1051
1052 /* Don't eliminate a store in the stack pointer. */
1053 if (dest == stack_pointer_rtx
230d793d
RS
1054 /* If we couldn't eliminate a field assignment, we can't combine. */
1055 || GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == STRICT_LOW_PART
1056 /* Don't combine with an insn that sets a register to itself if it has
1057 a REG_EQUAL note. This may be part of a REG_NO_CONFLICT sequence. */
5f4f0e22 1058 || (rtx_equal_p (src, dest) && find_reg_note (insn, REG_EQUAL, NULL_RTX))
62f7f1f5
GK
1059 /* Can't merge an ASM_OPERANDS. */
1060 || GET_CODE (src) == ASM_OPERANDS
230d793d
RS
1061 /* Can't merge a function call. */
1062 || GET_CODE (src) == CALL
cd5e8f1f 1063 /* Don't eliminate a function call argument. */
4dca5ec5
RK
1064 || (GET_CODE (i3) == CALL_INSN
1065 && (find_reg_fusage (i3, USE, dest)
1066 || (GET_CODE (dest) == REG
1067 && REGNO (dest) < FIRST_PSEUDO_REGISTER
1068 && global_regs[REGNO (dest)])))
230d793d
RS
1069 /* Don't substitute into an incremented register. */
1070 || FIND_REG_INC_NOTE (i3, dest)
1071 || (succ && FIND_REG_INC_NOTE (succ, dest))
ec35104c 1072#if 0
230d793d 1073 /* Don't combine the end of a libcall into anything. */
ec35104c
JL
1074 /* ??? This gives worse code, and appears to be unnecessary, since no
1075 pass after flow uses REG_LIBCALL/REG_RETVAL notes. Local-alloc does
1076 use REG_RETVAL notes for noconflict blocks, but other code here
1077 makes sure that those insns don't disappear. */
5f4f0e22 1078 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
ec35104c 1079#endif
230d793d
RS
1080 /* Make sure that DEST is not used after SUCC but before I3. */
1081 || (succ && ! all_adjacent
1082 && reg_used_between_p (dest, succ, i3))
1083 /* Make sure that the value that is to be substituted for the register
1084 does not use any registers whose values alter in between. However,
1085 If the insns are adjacent, a use can't cross a set even though we
1086 think it might (this can happen for a sequence of insns each setting
1087 the same destination; reg_last_set of that register might point to
d81481d3
RK
1088 a NOTE). If INSN has a REG_EQUIV note, the register is always
1089 equivalent to the memory so the substitution is valid even if there
1090 are intervening stores. Also, don't move a volatile asm or
1091 UNSPEC_VOLATILE across any other insns. */
230d793d 1092 || (! all_adjacent
d81481d3
RK
1093 && (((GET_CODE (src) != MEM
1094 || ! find_reg_note (insn, REG_EQUIV, src))
1095 && use_crosses_set_p (src, INSN_CUID (insn)))
a66a10c7
RS
1096 || (GET_CODE (src) == ASM_OPERANDS && MEM_VOLATILE_P (src))
1097 || GET_CODE (src) == UNSPEC_VOLATILE))
230d793d
RS
1098 /* If there is a REG_NO_CONFLICT note for DEST in I3 or SUCC, we get
1099 better register allocation by not doing the combine. */
1100 || find_reg_note (i3, REG_NO_CONFLICT, dest)
1101 || (succ && find_reg_note (succ, REG_NO_CONFLICT, dest))
1102 /* Don't combine across a CALL_INSN, because that would possibly
1103 change whether the life span of some REGs crosses calls or not,
1104 and it is a pain to update that information.
1105 Exception: if source is a constant, moving it later can't hurt.
1106 Accept that special case, because it helps -fforce-addr a lot. */
1107 || (INSN_CUID (insn) < last_call_cuid && ! CONSTANT_P (src)))
1108 return 0;
1109
1110 /* DEST must either be a REG or CC0. */
1111 if (GET_CODE (dest) == REG)
1112 {
1113 /* If register alignment is being enforced for multi-word items in all
1114 cases except for parameters, it is possible to have a register copy
1115 insn referencing a hard register that is not allowed to contain the
1116 mode being copied and which would not be valid as an operand of most
1117 insns. Eliminate this problem by not combining with such an insn.
1118
1119 Also, on some machines we don't want to extend the life of a hard
53895717 1120 register. */
230d793d
RS
1121
1122 if (GET_CODE (src) == REG
1123 && ((REGNO (dest) < FIRST_PSEUDO_REGISTER
1124 && ! HARD_REGNO_MODE_OK (REGNO (dest), GET_MODE (dest)))
c448a43e
RK
1125 /* Don't extend the life of a hard register unless it is
1126 user variable (if we have few registers) or it can't
1127 fit into the desired register (meaning something special
ecd40809
RK
1128 is going on).
1129 Also avoid substituting a return register into I3, because
1130 reload can't handle a conflict with constraints of other
1131 inputs. */
230d793d 1132 || (REGNO (src) < FIRST_PSEUDO_REGISTER
53895717 1133 && ! HARD_REGNO_MODE_OK (REGNO (src), GET_MODE (src)))))
230d793d
RS
1134 return 0;
1135 }
1136 else if (GET_CODE (dest) != CC0)
1137 return 0;
1138
5f96750d
RS
1139 /* Don't substitute for a register intended as a clobberable operand.
1140 Similarly, don't substitute an expression containing a register that
1141 will be clobbered in I3. */
230d793d
RS
1142 if (GET_CODE (PATTERN (i3)) == PARALLEL)
1143 for (i = XVECLEN (PATTERN (i3), 0) - 1; i >= 0; i--)
1144 if (GET_CODE (XVECEXP (PATTERN (i3), 0, i)) == CLOBBER
5f96750d
RS
1145 && (reg_overlap_mentioned_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0),
1146 src)
1147 || rtx_equal_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0), dest)))
230d793d
RS
1148 return 0;
1149
1150 /* If INSN contains anything volatile, or is an `asm' (whether volatile
d276f2bb 1151 or not), reject, unless nothing volatile comes between it and I3 */
230d793d
RS
1152
1153 if (GET_CODE (src) == ASM_OPERANDS || volatile_refs_p (src))
d276f2bb
CM
1154 {
1155 /* Make sure succ doesn't contain a volatile reference. */
1156 if (succ != 0 && volatile_refs_p (PATTERN (succ)))
1157 return 0;
663522cb 1158
d276f2bb 1159 for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
2c3c49de 1160 if (INSN_P (p) && p != succ && volatile_refs_p (PATTERN (p)))
d276f2bb
CM
1161 return 0;
1162 }
230d793d 1163
b79ee7eb
RH
1164 /* If INSN is an asm, and DEST is a hard register, reject, since it has
1165 to be an explicit register variable, and was chosen for a reason. */
1166
1167 if (GET_CODE (src) == ASM_OPERANDS
1168 && GET_CODE (dest) == REG && REGNO (dest) < FIRST_PSEUDO_REGISTER)
1169 return 0;
1170
4b2cb4a2
RS
1171 /* If there are any volatile insns between INSN and I3, reject, because
1172 they might affect machine state. */
1173
1174 for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
2c3c49de 1175 if (INSN_P (p) && p != succ && volatile_insn_p (PATTERN (p)))
4b2cb4a2
RS
1176 return 0;
1177
230d793d
RS
1178 /* If INSN or I2 contains an autoincrement or autodecrement,
1179 make sure that register is not used between there and I3,
1180 and not already used in I3 either.
1181 Also insist that I3 not be a jump; if it were one
1182 and the incremented register were spilled, we would lose. */
1183
1184#ifdef AUTO_INC_DEC
1185 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1186 if (REG_NOTE_KIND (link) == REG_INC
1187 && (GET_CODE (i3) == JUMP_INSN
1188 || reg_used_between_p (XEXP (link, 0), insn, i3)
1189 || reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i3))))
1190 return 0;
1191#endif
1192
1193#ifdef HAVE_cc0
1194 /* Don't combine an insn that follows a CC0-setting insn.
1195 An insn that uses CC0 must not be separated from the one that sets it.
1196 We do, however, allow I2 to follow a CC0-setting insn if that insn
1197 is passed as I1; in that case it will be deleted also.
1198 We also allow combining in this case if all the insns are adjacent
1199 because that would leave the two CC0 insns adjacent as well.
1200 It would be more logical to test whether CC0 occurs inside I1 or I2,
1201 but that would be much slower, and this ought to be equivalent. */
1202
1203 p = prev_nonnote_insn (insn);
1204 if (p && p != pred && GET_CODE (p) == INSN && sets_cc0_p (PATTERN (p))
1205 && ! all_adjacent)
1206 return 0;
1207#endif
1208
1209 /* If we get here, we have passed all the tests and the combination is
1210 to be allowed. */
1211
1212 *pdest = dest;
1213 *psrc = src;
1214
1215 return 1;
1216}
1217\f
956d6950
JL
1218/* Check if PAT is an insn - or a part of it - used to set up an
1219 argument for a function in a hard register. */
1220
1221static int
1222sets_function_arg_p (pat)
1223 rtx pat;
1224{
1225 int i;
1226 rtx inner_dest;
1227
1228 switch (GET_CODE (pat))
1229 {
1230 case INSN:
1231 return sets_function_arg_p (PATTERN (pat));
1232
1233 case PARALLEL:
1234 for (i = XVECLEN (pat, 0); --i >= 0;)
1235 if (sets_function_arg_p (XVECEXP (pat, 0, i)))
1236 return 1;
1237
1238 break;
1239
1240 case SET:
1241 inner_dest = SET_DEST (pat);
1242 while (GET_CODE (inner_dest) == STRICT_LOW_PART
1243 || GET_CODE (inner_dest) == SUBREG
1244 || GET_CODE (inner_dest) == ZERO_EXTRACT)
1245 inner_dest = XEXP (inner_dest, 0);
1246
1247 return (GET_CODE (inner_dest) == REG
1248 && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER
1249 && FUNCTION_ARG_REGNO_P (REGNO (inner_dest)));
1d300e19
KG
1250
1251 default:
1252 break;
956d6950
JL
1253 }
1254
1255 return 0;
1256}
1257
230d793d
RS
1258/* LOC is the location within I3 that contains its pattern or the component
1259 of a PARALLEL of the pattern. We validate that it is valid for combining.
1260
1261 One problem is if I3 modifies its output, as opposed to replacing it
1262 entirely, we can't allow the output to contain I2DEST or I1DEST as doing
1263 so would produce an insn that is not equivalent to the original insns.
1264
1265 Consider:
1266
1267 (set (reg:DI 101) (reg:DI 100))
1268 (set (subreg:SI (reg:DI 101) 0) <foo>)
1269
1270 This is NOT equivalent to:
1271
1272 (parallel [(set (subreg:SI (reg:DI 100) 0) <foo>)
1273 (set (reg:DI 101) (reg:DI 100))])
1274
1275 Not only does this modify 100 (in which case it might still be valid
663522cb 1276 if 100 were dead in I2), it sets 101 to the ORIGINAL value of 100.
230d793d
RS
1277
1278 We can also run into a problem if I2 sets a register that I1
1279 uses and I1 gets directly substituted into I3 (not via I2). In that
1280 case, we would be getting the wrong value of I2DEST into I3, so we
1281 must reject the combination. This case occurs when I2 and I1 both
1282 feed into I3, rather than when I1 feeds into I2, which feeds into I3.
1283 If I1_NOT_IN_SRC is non-zero, it means that finding I1 in the source
1284 of a SET must prevent combination from occurring.
1285
230d793d
RS
1286 Before doing the above check, we first try to expand a field assignment
1287 into a set of logical operations.
1288
1289 If PI3_DEST_KILLED is non-zero, it is a pointer to a location in which
1290 we place a register that is both set and used within I3. If more than one
1291 such register is detected, we fail.
1292
1293 Return 1 if the combination is valid, zero otherwise. */
1294
1295static int
1296combinable_i3pat (i3, loc, i2dest, i1dest, i1_not_in_src, pi3dest_killed)
1297 rtx i3;
1298 rtx *loc;
1299 rtx i2dest;
1300 rtx i1dest;
1301 int i1_not_in_src;
1302 rtx *pi3dest_killed;
1303{
1304 rtx x = *loc;
1305
1306 if (GET_CODE (x) == SET)
1307 {
1308 rtx set = expand_field_assignment (x);
1309 rtx dest = SET_DEST (set);
1310 rtx src = SET_SRC (set);
29a82058 1311 rtx inner_dest = dest;
663522cb 1312
29a82058
JL
1313#if 0
1314 rtx inner_src = src;
1315#endif
230d793d
RS
1316
1317 SUBST (*loc, set);
1318
1319 while (GET_CODE (inner_dest) == STRICT_LOW_PART
1320 || GET_CODE (inner_dest) == SUBREG
1321 || GET_CODE (inner_dest) == ZERO_EXTRACT)
1322 inner_dest = XEXP (inner_dest, 0);
1323
1324 /* We probably don't need this any more now that LIMIT_RELOAD_CLASS
1325 was added. */
1326#if 0
1327 while (GET_CODE (inner_src) == STRICT_LOW_PART
1328 || GET_CODE (inner_src) == SUBREG
1329 || GET_CODE (inner_src) == ZERO_EXTRACT)
1330 inner_src = XEXP (inner_src, 0);
1331
1332 /* If it is better that two different modes keep two different pseudos,
1333 avoid combining them. This avoids producing the following pattern
1334 on a 386:
1335 (set (subreg:SI (reg/v:QI 21) 0)
1336 (lshiftrt:SI (reg/v:SI 20)
1337 (const_int 24)))
1338 If that were made, reload could not handle the pair of
1339 reg 20/21, since it would try to get any GENERAL_REGS
1340 but some of them don't handle QImode. */
1341
1342 if (rtx_equal_p (inner_src, i2dest)
1343 && GET_CODE (inner_dest) == REG
1344 && ! MODES_TIEABLE_P (GET_MODE (i2dest), GET_MODE (inner_dest)))
1345 return 0;
1346#endif
1347
1348 /* Check for the case where I3 modifies its output, as
1349 discussed above. */
1350 if ((inner_dest != dest
1351 && (reg_overlap_mentioned_p (i2dest, inner_dest)
1352 || (i1dest && reg_overlap_mentioned_p (i1dest, inner_dest))))
956d6950 1353
53895717
BS
1354 /* This is the same test done in can_combine_p except we can't test
1355 all_adjacent; we don't have to, since this instruction will stay
1356 in place, thus we are not considering increasing the lifetime of
1357 INNER_DEST.
956d6950
JL
1358
1359 Also, if this insn sets a function argument, combining it with
1360 something that might need a spill could clobber a previous
1361 function argument; the all_adjacent test in can_combine_p also
1362 checks this; here, we do a more specific test for this case. */
663522cb 1363
230d793d 1364 || (GET_CODE (inner_dest) == REG
dfbe1b2f 1365 && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER
c448a43e 1366 && (! HARD_REGNO_MODE_OK (REGNO (inner_dest),
53895717 1367 GET_MODE (inner_dest))))
230d793d
RS
1368 || (i1_not_in_src && reg_overlap_mentioned_p (i1dest, src)))
1369 return 0;
1370
1371 /* If DEST is used in I3, it is being killed in this insn,
663522cb 1372 so record that for later.
36a9c2e9
JL
1373 Never add REG_DEAD notes for the FRAME_POINTER_REGNUM or the
1374 STACK_POINTER_REGNUM, since these are always considered to be
1375 live. Similarly for ARG_POINTER_REGNUM if it is fixed. */
230d793d 1376 if (pi3dest_killed && GET_CODE (dest) == REG
36a9c2e9
JL
1377 && reg_referenced_p (dest, PATTERN (i3))
1378 && REGNO (dest) != FRAME_POINTER_REGNUM
6d7096b0
DE
1379#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1380 && REGNO (dest) != HARD_FRAME_POINTER_REGNUM
1381#endif
36a9c2e9
JL
1382#if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
1383 && (REGNO (dest) != ARG_POINTER_REGNUM
1384 || ! fixed_regs [REGNO (dest)])
1385#endif
1386 && REGNO (dest) != STACK_POINTER_REGNUM)
230d793d
RS
1387 {
1388 if (*pi3dest_killed)
1389 return 0;
1390
1391 *pi3dest_killed = dest;
1392 }
1393 }
1394
1395 else if (GET_CODE (x) == PARALLEL)
1396 {
1397 int i;
1398
1399 for (i = 0; i < XVECLEN (x, 0); i++)
1400 if (! combinable_i3pat (i3, &XVECEXP (x, 0, i), i2dest, i1dest,
1401 i1_not_in_src, pi3dest_killed))
1402 return 0;
1403 }
1404
1405 return 1;
1406}
1407\f
14a774a9
RK
1408/* Return 1 if X is an arithmetic expression that contains a multiplication
1409 and division. We don't count multiplications by powers of two here. */
1410
1411static int
1412contains_muldiv (x)
1413 rtx x;
1414{
1415 switch (GET_CODE (x))
1416 {
1417 case MOD: case DIV: case UMOD: case UDIV:
1418 return 1;
1419
1420 case MULT:
1421 return ! (GET_CODE (XEXP (x, 1)) == CONST_INT
1422 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0);
1423 default:
1424 switch (GET_RTX_CLASS (GET_CODE (x)))
1425 {
1426 case 'c': case '<': case '2':
1427 return contains_muldiv (XEXP (x, 0))
1428 || contains_muldiv (XEXP (x, 1));
1429
1430 case '1':
1431 return contains_muldiv (XEXP (x, 0));
1432
1433 default:
1434 return 0;
1435 }
1436 }
1437}
1438\f
c3410241
BS
1439/* Determine whether INSN can be used in a combination. Return nonzero if
1440 not. This is used in try_combine to detect early some cases where we
1441 can't perform combinations. */
1442
1443static int
1444cant_combine_insn_p (insn)
1445 rtx insn;
1446{
1447 rtx set;
1448 rtx src, dest;
1449
1450 /* If this isn't really an insn, we can't do anything.
1451 This can occur when flow deletes an insn that it has merged into an
1452 auto-increment address. */
1453 if (! INSN_P (insn))
1454 return 1;
1455
1456 /* Never combine loads and stores involving hard regs. The register
1457 allocator can usually handle such reg-reg moves by tying. If we allow
1458 the combiner to make substitutions of hard regs, we risk aborting in
1459 reload on machines that have SMALL_REGISTER_CLASSES.
1460 As an exception, we allow combinations involving fixed regs; these are
1461 not available to the register allocator so there's no risk involved. */
1462
1463 set = single_set (insn);
1464 if (! set)
1465 return 0;
1466 src = SET_SRC (set);
1467 dest = SET_DEST (set);
ad334b51
JH
1468 if (GET_CODE (src) == SUBREG)
1469 src = SUBREG_REG (src);
1470 if (GET_CODE (dest) == SUBREG)
1471 dest = SUBREG_REG (dest);
53895717
BS
1472 if (REG_P (src) && REG_P (dest)
1473 && ((REGNO (src) < FIRST_PSEUDO_REGISTER
1474 && ! fixed_regs[REGNO (src)])
1475 || (REGNO (dest) < FIRST_PSEUDO_REGISTER
1476 && ! fixed_regs[REGNO (dest)])))
c3410241 1477 return 1;
53895717 1478
c3410241
BS
1479 return 0;
1480}
1481
230d793d
RS
1482/* Try to combine the insns I1 and I2 into I3.
1483 Here I1 and I2 appear earlier than I3.
1484 I1 can be zero; then we combine just I2 into I3.
663522cb 1485
230d793d
RS
1486 It we are combining three insns and the resulting insn is not recognized,
1487 try splitting it into two insns. If that happens, I2 and I3 are retained
1488 and I1 is pseudo-deleted by turning it into a NOTE. Otherwise, I1 and I2
1489 are pseudo-deleted.
1490
663522cb 1491 Return 0 if the combination does not work. Then nothing is changed.
abe6e52f 1492 If we did the combination, return the insn at which combine should
663522cb
KH
1493 resume scanning.
1494
44a76fc8
AG
1495 Set NEW_DIRECT_JUMP_P to a non-zero value if try_combine creates a
1496 new direct jump instruction. */
230d793d
RS
1497
1498static rtx
44a76fc8 1499try_combine (i3, i2, i1, new_direct_jump_p)
230d793d 1500 register rtx i3, i2, i1;
44a76fc8 1501 register int *new_direct_jump_p;
230d793d 1502{
02359929 1503 /* New patterns for I3 and I2, respectively. */
230d793d
RS
1504 rtx newpat, newi2pat = 0;
1505 /* Indicates need to preserve SET in I1 or I2 in I3 if it is not dead. */
1506 int added_sets_1, added_sets_2;
1507 /* Total number of SETs to put into I3. */
1508 int total_sets;
1509 /* Nonzero is I2's body now appears in I3. */
1510 int i2_is_used;
1511 /* INSN_CODEs for new I3, new I2, and user of condition code. */
6a651371 1512 int insn_code_number, i2_code_number = 0, other_code_number = 0;
230d793d
RS
1513 /* Contains I3 if the destination of I3 is used in its source, which means
1514 that the old life of I3 is being killed. If that usage is placed into
1515 I2 and not in I3, a REG_DEAD note must be made. */
1516 rtx i3dest_killed = 0;
1517 /* SET_DEST and SET_SRC of I2 and I1. */
1518 rtx i2dest, i2src, i1dest = 0, i1src = 0;
1519 /* PATTERN (I2), or a copy of it in certain cases. */
1520 rtx i2pat;
1521 /* Indicates if I2DEST or I1DEST is in I2SRC or I1_SRC. */
c4e861e8 1522 int i2dest_in_i2src = 0, i1dest_in_i1src = 0, i2dest_in_i1src = 0;
230d793d
RS
1523 int i1_feeds_i3 = 0;
1524 /* Notes that must be added to REG_NOTES in I3 and I2. */
1525 rtx new_i3_notes, new_i2_notes;
176c9e6b
JW
1526 /* Notes that we substituted I3 into I2 instead of the normal case. */
1527 int i3_subst_into_i2 = 0;
df7d75de
RK
1528 /* Notes that I1, I2 or I3 is a MULT operation. */
1529 int have_mult = 0;
230d793d
RS
1530
1531 int maxreg;
1532 rtx temp;
1533 register rtx link;
1534 int i;
1535
c3410241
BS
1536 /* Exit early if one of the insns involved can't be used for
1537 combinations. */
1538 if (cant_combine_insn_p (i3)
1539 || cant_combine_insn_p (i2)
1540 || (i1 && cant_combine_insn_p (i1))
1541 /* We also can't do anything if I3 has a
1542 REG_LIBCALL note since we don't want to disrupt the contiguity of a
1543 libcall. */
ec35104c
JL
1544#if 0
1545 /* ??? This gives worse code, and appears to be unnecessary, since no
1546 pass after flow uses REG_LIBCALL/REG_RETVAL notes. */
1547 || find_reg_note (i3, REG_LIBCALL, NULL_RTX)
1548#endif
663522cb 1549 )
230d793d
RS
1550 return 0;
1551
1552 combine_attempts++;
230d793d
RS
1553 undobuf.other_insn = 0;
1554
6e25d159
RK
1555 /* Reset the hard register usage information. */
1556 CLEAR_HARD_REG_SET (newpat_used_regs);
1557
230d793d
RS
1558 /* If I1 and I2 both feed I3, they can be in any order. To simplify the
1559 code below, set I1 to be the earlier of the two insns. */
1560 if (i1 && INSN_CUID (i1) > INSN_CUID (i2))
1561 temp = i1, i1 = i2, i2 = temp;
1562
abe6e52f 1563 added_links_insn = 0;
137e889e 1564
230d793d 1565 /* First check for one important special-case that the code below will
c7be4f66 1566 not handle. Namely, the case where I1 is zero, I2 is a PARALLEL
230d793d
RS
1567 and I3 is a SET whose SET_SRC is a SET_DEST in I2. In that case,
1568 we may be able to replace that destination with the destination of I3.
1569 This occurs in the common code where we compute both a quotient and
1570 remainder into a structure, in which case we want to do the computation
1571 directly into the structure to avoid register-register copies.
1572
c7be4f66
RK
1573 Note that this case handles both multiple sets in I2 and also
1574 cases where I2 has a number of CLOBBER or PARALLELs.
1575
230d793d
RS
1576 We make very conservative checks below and only try to handle the
1577 most common cases of this. For example, we only handle the case
1578 where I2 and I3 are adjacent to avoid making difficult register
1579 usage tests. */
1580
1581 if (i1 == 0 && GET_CODE (i3) == INSN && GET_CODE (PATTERN (i3)) == SET
1582 && GET_CODE (SET_SRC (PATTERN (i3))) == REG
1583 && REGNO (SET_SRC (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER
230d793d
RS
1584 && find_reg_note (i3, REG_DEAD, SET_SRC (PATTERN (i3)))
1585 && GET_CODE (PATTERN (i2)) == PARALLEL
1586 && ! side_effects_p (SET_DEST (PATTERN (i3)))
5089e22e
RS
1587 /* If the dest of I3 is a ZERO_EXTRACT or STRICT_LOW_PART, the code
1588 below would need to check what is inside (and reg_overlap_mentioned_p
1589 doesn't support those codes anyway). Don't allow those destinations;
1590 the resulting insn isn't likely to be recognized anyway. */
1591 && GET_CODE (SET_DEST (PATTERN (i3))) != ZERO_EXTRACT
1592 && GET_CODE (SET_DEST (PATTERN (i3))) != STRICT_LOW_PART
230d793d
RS
1593 && ! reg_overlap_mentioned_p (SET_SRC (PATTERN (i3)),
1594 SET_DEST (PATTERN (i3)))
1595 && next_real_insn (i2) == i3)
5089e22e
RS
1596 {
1597 rtx p2 = PATTERN (i2);
1598
1599 /* Make sure that the destination of I3,
1600 which we are going to substitute into one output of I2,
1601 is not used within another output of I2. We must avoid making this:
1602 (parallel [(set (mem (reg 69)) ...)
1603 (set (reg 69) ...)])
1604 which is not well-defined as to order of actions.
1605 (Besides, reload can't handle output reloads for this.)
1606
1607 The problem can also happen if the dest of I3 is a memory ref,
1608 if another dest in I2 is an indirect memory ref. */
1609 for (i = 0; i < XVECLEN (p2, 0); i++)
7ca919b7
RK
1610 if ((GET_CODE (XVECEXP (p2, 0, i)) == SET
1611 || GET_CODE (XVECEXP (p2, 0, i)) == CLOBBER)
5089e22e
RS
1612 && reg_overlap_mentioned_p (SET_DEST (PATTERN (i3)),
1613 SET_DEST (XVECEXP (p2, 0, i))))
1614 break;
230d793d 1615
5089e22e
RS
1616 if (i == XVECLEN (p2, 0))
1617 for (i = 0; i < XVECLEN (p2, 0); i++)
481c7efa
FS
1618 if ((GET_CODE (XVECEXP (p2, 0, i)) == SET
1619 || GET_CODE (XVECEXP (p2, 0, i)) == CLOBBER)
1620 && SET_DEST (XVECEXP (p2, 0, i)) == SET_SRC (PATTERN (i3)))
5089e22e
RS
1621 {
1622 combine_merges++;
230d793d 1623
5089e22e
RS
1624 subst_insn = i3;
1625 subst_low_cuid = INSN_CUID (i2);
230d793d 1626
c4e861e8 1627 added_sets_2 = added_sets_1 = 0;
5089e22e 1628 i2dest = SET_SRC (PATTERN (i3));
230d793d 1629
5089e22e
RS
1630 /* Replace the dest in I2 with our dest and make the resulting
1631 insn the new pattern for I3. Then skip to where we
1632 validate the pattern. Everything was set up above. */
663522cb 1633 SUBST (SET_DEST (XVECEXP (p2, 0, i)),
5089e22e
RS
1634 SET_DEST (PATTERN (i3)));
1635
1636 newpat = p2;
176c9e6b 1637 i3_subst_into_i2 = 1;
5089e22e
RS
1638 goto validate_replacement;
1639 }
1640 }
230d793d 1641
667c1c2c
RK
1642 /* If I2 is setting a double-word pseudo to a constant and I3 is setting
1643 one of those words to another constant, merge them by making a new
1644 constant. */
1645 if (i1 == 0
1646 && (temp = single_set (i2)) != 0
1647 && (GET_CODE (SET_SRC (temp)) == CONST_INT
1648 || GET_CODE (SET_SRC (temp)) == CONST_DOUBLE)
1649 && GET_CODE (SET_DEST (temp)) == REG
1650 && GET_MODE_CLASS (GET_MODE (SET_DEST (temp))) == MODE_INT
1651 && GET_MODE_SIZE (GET_MODE (SET_DEST (temp))) == 2 * UNITS_PER_WORD
1652 && GET_CODE (PATTERN (i3)) == SET
1653 && GET_CODE (SET_DEST (PATTERN (i3))) == SUBREG
1654 && SUBREG_REG (SET_DEST (PATTERN (i3))) == SET_DEST (temp)
1655 && GET_MODE_CLASS (GET_MODE (SET_DEST (PATTERN (i3)))) == MODE_INT
1656 && GET_MODE_SIZE (GET_MODE (SET_DEST (PATTERN (i3)))) == UNITS_PER_WORD
1657 && GET_CODE (SET_SRC (PATTERN (i3))) == CONST_INT)
1658 {
1659 HOST_WIDE_INT lo, hi;
1660
1661 if (GET_CODE (SET_SRC (temp)) == CONST_INT)
1662 lo = INTVAL (SET_SRC (temp)), hi = lo < 0 ? -1 : 0;
1663 else
1664 {
1665 lo = CONST_DOUBLE_LOW (SET_SRC (temp));
1666 hi = CONST_DOUBLE_HIGH (SET_SRC (temp));
1667 }
1668
1669 if (subreg_lowpart_p (SET_DEST (PATTERN (i3))))
48b4d901
AO
1670 {
1671 /* We don't handle the case of the target word being wider
1672 than a host wide int. */
1673 if (HOST_BITS_PER_WIDE_INT < BITS_PER_WORD)
1674 abort ();
1675
42a6ff51 1676 lo &= ~(UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD (1) - 1);
48b4d901
AO
1677 lo |= INTVAL (SET_SRC (PATTERN (i3)));
1678 }
1679 else if (HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
667c1c2c 1680 hi = INTVAL (SET_SRC (PATTERN (i3)));
48b4d901
AO
1681 else if (HOST_BITS_PER_WIDE_INT >= 2 * BITS_PER_WORD)
1682 {
1683 int sign = -(int) ((unsigned HOST_WIDE_INT) lo
1684 >> (HOST_BITS_PER_WIDE_INT - 1));
1685
42a6ff51
AO
1686 lo &= ~ (UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD
1687 (UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD (1) - 1));
1688 lo |= (UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD
1689 (INTVAL (SET_SRC (PATTERN (i3)))));
48b4d901
AO
1690 if (hi == sign)
1691 hi = lo < 0 ? -1 : 0;
1692 }
1693 else
1694 /* We don't handle the case of the higher word not fitting
1695 entirely in either hi or lo. */
1696 abort ();
667c1c2c
RK
1697
1698 combine_merges++;
1699 subst_insn = i3;
1700 subst_low_cuid = INSN_CUID (i2);
1701 added_sets_2 = added_sets_1 = 0;
1702 i2dest = SET_DEST (temp);
1703
1704 SUBST (SET_SRC (temp),
1705 immed_double_const (lo, hi, GET_MODE (SET_DEST (temp))));
1706
1707 newpat = PATTERN (i2);
667c1c2c
RK
1708 goto validate_replacement;
1709 }
1710
230d793d
RS
1711#ifndef HAVE_cc0
1712 /* If we have no I1 and I2 looks like:
1713 (parallel [(set (reg:CC X) (compare:CC OP (const_int 0)))
1714 (set Y OP)])
1715 make up a dummy I1 that is
1716 (set Y OP)
1717 and change I2 to be
1718 (set (reg:CC X) (compare:CC Y (const_int 0)))
1719
1720 (We can ignore any trailing CLOBBERs.)
1721
1722 This undoes a previous combination and allows us to match a branch-and-
1723 decrement insn. */
1724
1725 if (i1 == 0 && GET_CODE (PATTERN (i2)) == PARALLEL
1726 && XVECLEN (PATTERN (i2), 0) >= 2
1727 && GET_CODE (XVECEXP (PATTERN (i2), 0, 0)) == SET
1728 && (GET_MODE_CLASS (GET_MODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 0))))
1729 == MODE_CC)
1730 && GET_CODE (SET_SRC (XVECEXP (PATTERN (i2), 0, 0))) == COMPARE
1731 && XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 1) == const0_rtx
1732 && GET_CODE (XVECEXP (PATTERN (i2), 0, 1)) == SET
1733 && GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 1))) == REG
1734 && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 0),
1735 SET_SRC (XVECEXP (PATTERN (i2), 0, 1))))
1736 {
663522cb 1737 for (i = XVECLEN (PATTERN (i2), 0) - 1; i >= 2; i--)
230d793d
RS
1738 if (GET_CODE (XVECEXP (PATTERN (i2), 0, i)) != CLOBBER)
1739 break;
1740
1741 if (i == 1)
1742 {
1743 /* We make I1 with the same INSN_UID as I2. This gives it
1744 the same INSN_CUID for value tracking. Our fake I1 will
1745 never appear in the insn stream so giving it the same INSN_UID
1746 as I2 will not cause a problem. */
1747
0d9641d1 1748 subst_prev_insn = i1
38a448ca
RH
1749 = gen_rtx_INSN (VOIDmode, INSN_UID (i2), NULL_RTX, i2,
1750 XVECEXP (PATTERN (i2), 0, 1), -1, NULL_RTX,
1751 NULL_RTX);
230d793d
RS
1752
1753 SUBST (PATTERN (i2), XVECEXP (PATTERN (i2), 0, 0));
1754 SUBST (XEXP (SET_SRC (PATTERN (i2)), 0),
1755 SET_DEST (PATTERN (i1)));
1756 }
1757 }
1758#endif
1759
1760 /* Verify that I2 and I1 are valid for combining. */
5f4f0e22
CH
1761 if (! can_combine_p (i2, i3, i1, NULL_RTX, &i2dest, &i2src)
1762 || (i1 && ! can_combine_p (i1, i3, NULL_RTX, i2, &i1dest, &i1src)))
230d793d
RS
1763 {
1764 undo_all ();
1765 return 0;
1766 }
1767
1768 /* Record whether I2DEST is used in I2SRC and similarly for the other
1769 cases. Knowing this will help in register status updating below. */
1770 i2dest_in_i2src = reg_overlap_mentioned_p (i2dest, i2src);
1771 i1dest_in_i1src = i1 && reg_overlap_mentioned_p (i1dest, i1src);
1772 i2dest_in_i1src = i1 && reg_overlap_mentioned_p (i2dest, i1src);
1773
916f14f1 1774 /* See if I1 directly feeds into I3. It does if I1DEST is not used
230d793d
RS
1775 in I2SRC. */
1776 i1_feeds_i3 = i1 && ! reg_overlap_mentioned_p (i1dest, i2src);
1777
1778 /* Ensure that I3's pattern can be the destination of combines. */
1779 if (! combinable_i3pat (i3, &PATTERN (i3), i2dest, i1dest,
1780 i1 && i2dest_in_i1src && i1_feeds_i3,
1781 &i3dest_killed))
1782 {
1783 undo_all ();
1784 return 0;
1785 }
1786
df7d75de
RK
1787 /* See if any of the insns is a MULT operation. Unless one is, we will
1788 reject a combination that is, since it must be slower. Be conservative
1789 here. */
1790 if (GET_CODE (i2src) == MULT
1791 || (i1 != 0 && GET_CODE (i1src) == MULT)
1792 || (GET_CODE (PATTERN (i3)) == SET
1793 && GET_CODE (SET_SRC (PATTERN (i3))) == MULT))
1794 have_mult = 1;
1795
230d793d
RS
1796 /* If I3 has an inc, then give up if I1 or I2 uses the reg that is inc'd.
1797 We used to do this EXCEPT in one case: I3 has a post-inc in an
1798 output operand. However, that exception can give rise to insns like
1799 mov r3,(r3)+
1800 which is a famous insn on the PDP-11 where the value of r3 used as the
5089e22e 1801 source was model-dependent. Avoid this sort of thing. */
230d793d
RS
1802
1803#if 0
1804 if (!(GET_CODE (PATTERN (i3)) == SET
1805 && GET_CODE (SET_SRC (PATTERN (i3))) == REG
1806 && GET_CODE (SET_DEST (PATTERN (i3))) == MEM
1807 && (GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_INC
1808 || GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_DEC)))
1809 /* It's not the exception. */
1810#endif
1811#ifdef AUTO_INC_DEC
1812 for (link = REG_NOTES (i3); link; link = XEXP (link, 1))
1813 if (REG_NOTE_KIND (link) == REG_INC
1814 && (reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i2))
1815 || (i1 != 0
1816 && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i1)))))
1817 {
1818 undo_all ();
1819 return 0;
1820 }
1821#endif
1822
1823 /* See if the SETs in I1 or I2 need to be kept around in the merged
1824 instruction: whenever the value set there is still needed past I3.
1825 For the SETs in I2, this is easy: we see if I2DEST dies or is set in I3.
1826
1827 For the SET in I1, we have two cases: If I1 and I2 independently
1828 feed into I3, the set in I1 needs to be kept around if I1DEST dies
1829 or is set in I3. Otherwise (if I1 feeds I2 which feeds I3), the set
1830 in I1 needs to be kept around unless I1DEST dies or is set in either
1831 I2 or I3. We can distinguish these cases by seeing if I2SRC mentions
1832 I1DEST. If so, we know I1 feeds into I2. */
1833
1834 added_sets_2 = ! dead_or_set_p (i3, i2dest);
1835
1836 added_sets_1
1837 = i1 && ! (i1_feeds_i3 ? dead_or_set_p (i3, i1dest)
1838 : (dead_or_set_p (i3, i1dest) || dead_or_set_p (i2, i1dest)));
1839
1840 /* If the set in I2 needs to be kept around, we must make a copy of
1841 PATTERN (I2), so that when we substitute I1SRC for I1DEST in
5089e22e 1842 PATTERN (I2), we are only substituting for the original I1DEST, not into
230d793d
RS
1843 an already-substituted copy. This also prevents making self-referential
1844 rtx. If I2 is a PARALLEL, we just need the piece that assigns I2SRC to
1845 I2DEST. */
1846
1847 i2pat = (GET_CODE (PATTERN (i2)) == PARALLEL
38a448ca 1848 ? gen_rtx_SET (VOIDmode, i2dest, i2src)
230d793d
RS
1849 : PATTERN (i2));
1850
1851 if (added_sets_2)
1852 i2pat = copy_rtx (i2pat);
1853
1854 combine_merges++;
1855
1856 /* Substitute in the latest insn for the regs set by the earlier ones. */
1857
1858 maxreg = max_reg_num ();
1859
1860 subst_insn = i3;
230d793d
RS
1861
1862 /* It is possible that the source of I2 or I1 may be performing an
1863 unneeded operation, such as a ZERO_EXTEND of something that is known
1864 to have the high part zero. Handle that case by letting subst look at
1865 the innermost one of them.
1866
1867 Another way to do this would be to have a function that tries to
1868 simplify a single insn instead of merging two or more insns. We don't
1869 do this because of the potential of infinite loops and because
1870 of the potential extra memory required. However, doing it the way
1871 we are is a bit of a kludge and doesn't catch all cases.
1872
1873 But only do this if -fexpensive-optimizations since it slows things down
1874 and doesn't usually win. */
1875
1876 if (flag_expensive_optimizations)
1877 {
1878 /* Pass pc_rtx so no substitutions are done, just simplifications.
1879 The cases that we are interested in here do not involve the few
1880 cases were is_replaced is checked. */
1881 if (i1)
d0ab8cd3
RK
1882 {
1883 subst_low_cuid = INSN_CUID (i1);
1884 i1src = subst (i1src, pc_rtx, pc_rtx, 0, 0);
1885 }
230d793d 1886 else
d0ab8cd3
RK
1887 {
1888 subst_low_cuid = INSN_CUID (i2);
1889 i2src = subst (i2src, pc_rtx, pc_rtx, 0, 0);
1890 }
230d793d 1891
241cea85 1892 undobuf.previous_undos = undobuf.undos;
230d793d
RS
1893 }
1894
1895#ifndef HAVE_cc0
1896 /* Many machines that don't use CC0 have insns that can both perform an
1897 arithmetic operation and set the condition code. These operations will
1898 be represented as a PARALLEL with the first element of the vector
1899 being a COMPARE of an arithmetic operation with the constant zero.
1900 The second element of the vector will set some pseudo to the result
1901 of the same arithmetic operation. If we simplify the COMPARE, we won't
1902 match such a pattern and so will generate an extra insn. Here we test
1903 for this case, where both the comparison and the operation result are
1904 needed, and make the PARALLEL by just replacing I2DEST in I3SRC with
1905 I2SRC. Later we will make the PARALLEL that contains I2. */
1906
1907 if (i1 == 0 && added_sets_2 && GET_CODE (PATTERN (i3)) == SET
1908 && GET_CODE (SET_SRC (PATTERN (i3))) == COMPARE
1909 && XEXP (SET_SRC (PATTERN (i3)), 1) == const0_rtx
1910 && rtx_equal_p (XEXP (SET_SRC (PATTERN (i3)), 0), i2dest))
1911 {
081f5e7e 1912#ifdef EXTRA_CC_MODES
230d793d
RS
1913 rtx *cc_use;
1914 enum machine_mode compare_mode;
081f5e7e 1915#endif
230d793d
RS
1916
1917 newpat = PATTERN (i3);
1918 SUBST (XEXP (SET_SRC (newpat), 0), i2src);
1919
1920 i2_is_used = 1;
1921
1922#ifdef EXTRA_CC_MODES
1923 /* See if a COMPARE with the operand we substituted in should be done
1924 with the mode that is currently being used. If not, do the same
1925 processing we do in `subst' for a SET; namely, if the destination
1926 is used only once, try to replace it with a register of the proper
1927 mode and also replace the COMPARE. */
1928 if (undobuf.other_insn == 0
1929 && (cc_use = find_single_use (SET_DEST (newpat), i3,
1930 &undobuf.other_insn))
77fa0940
RK
1931 && ((compare_mode = SELECT_CC_MODE (GET_CODE (*cc_use),
1932 i2src, const0_rtx))
230d793d
RS
1933 != GET_MODE (SET_DEST (newpat))))
1934 {
770ae6cc 1935 unsigned int regno = REGNO (SET_DEST (newpat));
38a448ca 1936 rtx new_dest = gen_rtx_REG (compare_mode, regno);
230d793d
RS
1937
1938 if (regno < FIRST_PSEUDO_REGISTER
b1f21e0a 1939 || (REG_N_SETS (regno) == 1 && ! added_sets_2
230d793d
RS
1940 && ! REG_USERVAR_P (SET_DEST (newpat))))
1941 {
1942 if (regno >= FIRST_PSEUDO_REGISTER)
1943 SUBST (regno_reg_rtx[regno], new_dest);
1944
1945 SUBST (SET_DEST (newpat), new_dest);
1946 SUBST (XEXP (*cc_use, 0), new_dest);
1947 SUBST (SET_SRC (newpat),
1948 gen_rtx_combine (COMPARE, compare_mode,
1949 i2src, const0_rtx));
1950 }
1951 else
1952 undobuf.other_insn = 0;
1953 }
663522cb 1954#endif
230d793d
RS
1955 }
1956 else
1957#endif
1958 {
1959 n_occurrences = 0; /* `subst' counts here */
1960
1961 /* If I1 feeds into I2 (not into I3) and I1DEST is in I1SRC, we
1962 need to make a unique copy of I2SRC each time we substitute it
1963 to avoid self-referential rtl. */
1964
d0ab8cd3 1965 subst_low_cuid = INSN_CUID (i2);
230d793d
RS
1966 newpat = subst (PATTERN (i3), i2dest, i2src, 0,
1967 ! i1_feeds_i3 && i1dest_in_i1src);
241cea85 1968 undobuf.previous_undos = undobuf.undos;
230d793d
RS
1969
1970 /* Record whether i2's body now appears within i3's body. */
1971 i2_is_used = n_occurrences;
1972 }
1973
1974 /* If we already got a failure, don't try to do more. Otherwise,
1975 try to substitute in I1 if we have it. */
1976
1977 if (i1 && GET_CODE (newpat) != CLOBBER)
1978 {
1979 /* Before we can do this substitution, we must redo the test done
1980 above (see detailed comments there) that ensures that I1DEST
0f41302f 1981 isn't mentioned in any SETs in NEWPAT that are field assignments. */
230d793d 1982
5f4f0e22
CH
1983 if (! combinable_i3pat (NULL_RTX, &newpat, i1dest, NULL_RTX,
1984 0, NULL_PTR))
230d793d
RS
1985 {
1986 undo_all ();
1987 return 0;
1988 }
1989
1990 n_occurrences = 0;
d0ab8cd3 1991 subst_low_cuid = INSN_CUID (i1);
230d793d 1992 newpat = subst (newpat, i1dest, i1src, 0, 0);
241cea85 1993 undobuf.previous_undos = undobuf.undos;
230d793d
RS
1994 }
1995
916f14f1
RK
1996 /* Fail if an autoincrement side-effect has been duplicated. Be careful
1997 to count all the ways that I2SRC and I1SRC can be used. */
5f4f0e22 1998 if ((FIND_REG_INC_NOTE (i2, NULL_RTX) != 0
916f14f1 1999 && i2_is_used + added_sets_2 > 1)
5f4f0e22 2000 || (i1 != 0 && FIND_REG_INC_NOTE (i1, NULL_RTX) != 0
916f14f1
RK
2001 && (n_occurrences + added_sets_1 + (added_sets_2 && ! i1_feeds_i3)
2002 > 1))
230d793d
RS
2003 /* Fail if we tried to make a new register (we used to abort, but there's
2004 really no reason to). */
2005 || max_reg_num () != maxreg
2006 /* Fail if we couldn't do something and have a CLOBBER. */
df7d75de
RK
2007 || GET_CODE (newpat) == CLOBBER
2008 /* Fail if this new pattern is a MULT and we didn't have one before
2009 at the outer level. */
2010 || (GET_CODE (newpat) == SET && GET_CODE (SET_SRC (newpat)) == MULT
2011 && ! have_mult))
230d793d
RS
2012 {
2013 undo_all ();
2014 return 0;
2015 }
2016
2017 /* If the actions of the earlier insns must be kept
2018 in addition to substituting them into the latest one,
2019 we must make a new PARALLEL for the latest insn
2020 to hold additional the SETs. */
2021
2022 if (added_sets_1 || added_sets_2)
2023 {
2024 combine_extras++;
2025
2026 if (GET_CODE (newpat) == PARALLEL)
2027 {
2028 rtvec old = XVEC (newpat, 0);
2029 total_sets = XVECLEN (newpat, 0) + added_sets_1 + added_sets_2;
38a448ca 2030 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_sets));
59888de2 2031 bcopy ((char *) &old->elem[0], (char *) XVEC (newpat, 0)->elem,
230d793d
RS
2032 sizeof (old->elem[0]) * old->num_elem);
2033 }
2034 else
2035 {
2036 rtx old = newpat;
2037 total_sets = 1 + added_sets_1 + added_sets_2;
38a448ca 2038 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_sets));
230d793d
RS
2039 XVECEXP (newpat, 0, 0) = old;
2040 }
2041
2042 if (added_sets_1)
2043 XVECEXP (newpat, 0, --total_sets)
2044 = (GET_CODE (PATTERN (i1)) == PARALLEL
38a448ca 2045 ? gen_rtx_SET (VOIDmode, i1dest, i1src) : PATTERN (i1));
230d793d
RS
2046
2047 if (added_sets_2)
c5c76735
JL
2048 {
2049 /* If there is no I1, use I2's body as is. We used to also not do
2050 the subst call below if I2 was substituted into I3,
2051 but that could lose a simplification. */
2052 if (i1 == 0)
2053 XVECEXP (newpat, 0, --total_sets) = i2pat;
2054 else
2055 /* See comment where i2pat is assigned. */
2056 XVECEXP (newpat, 0, --total_sets)
2057 = subst (i2pat, i1dest, i1src, 0, 0);
2058 }
230d793d
RS
2059 }
2060
2061 /* We come here when we are replacing a destination in I2 with the
2062 destination of I3. */
2063 validate_replacement:
2064
6e25d159
RK
2065 /* Note which hard regs this insn has as inputs. */
2066 mark_used_regs_combine (newpat);
2067
230d793d 2068 /* Is the result of combination a valid instruction? */
8e2f6e35 2069 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
230d793d
RS
2070
2071 /* If the result isn't valid, see if it is a PARALLEL of two SETs where
2072 the second SET's destination is a register that is unused. In that case,
2073 we just need the first SET. This can occur when simplifying a divmod
2074 insn. We *must* test for this case here because the code below that
2075 splits two independent SETs doesn't handle this case correctly when it
2076 updates the register status. Also check the case where the first
2077 SET's destination is unused. That would not cause incorrect code, but
2078 does cause an unneeded insn to remain. */
2079
2080 if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
2081 && XVECLEN (newpat, 0) == 2
2082 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
2083 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2084 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == REG
2085 && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 1)))
2086 && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 1)))
2087 && asm_noperands (newpat) < 0)
2088 {
2089 newpat = XVECEXP (newpat, 0, 0);
8e2f6e35 2090 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
230d793d
RS
2091 }
2092
2093 else if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
2094 && XVECLEN (newpat, 0) == 2
2095 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
2096 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2097 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) == REG
2098 && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 0)))
2099 && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 0)))
2100 && asm_noperands (newpat) < 0)
2101 {
2102 newpat = XVECEXP (newpat, 0, 1);
8e2f6e35 2103 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
230d793d
RS
2104 }
2105
2106 /* If we were combining three insns and the result is a simple SET
2107 with no ASM_OPERANDS that wasn't recognized, try to split it into two
663522cb 2108 insns. There are two ways to do this. It can be split using a
916f14f1
RK
2109 machine-specific method (like when you have an addition of a large
2110 constant) or by combine in the function find_split_point. */
2111
230d793d
RS
2112 if (i1 && insn_code_number < 0 && GET_CODE (newpat) == SET
2113 && asm_noperands (newpat) < 0)
2114 {
916f14f1 2115 rtx m_split, *split;
42495ca0 2116 rtx ni2dest = i2dest;
916f14f1
RK
2117
2118 /* See if the MD file can split NEWPAT. If it can't, see if letting it
42495ca0
RK
2119 use I2DEST as a scratch register will help. In the latter case,
2120 convert I2DEST to the mode of the source of NEWPAT if we can. */
916f14f1
RK
2121
2122 m_split = split_insns (newpat, i3);
a70c61d9
JW
2123
2124 /* We can only use I2DEST as a scratch reg if it doesn't overlap any
2125 inputs of NEWPAT. */
2126
2127 /* ??? If I2DEST is not safe, and I1DEST exists, then it would be
2128 possible to try that as a scratch reg. This would require adding
2129 more code to make it work though. */
2130
2131 if (m_split == 0 && ! reg_overlap_mentioned_p (ni2dest, newpat))
42495ca0
RK
2132 {
2133 /* If I2DEST is a hard register or the only use of a pseudo,
2134 we can change its mode. */
2135 if (GET_MODE (SET_DEST (newpat)) != GET_MODE (i2dest)
02f4ada4 2136 && GET_MODE (SET_DEST (newpat)) != VOIDmode
60654f77 2137 && GET_CODE (i2dest) == REG
42495ca0 2138 && (REGNO (i2dest) < FIRST_PSEUDO_REGISTER
b1f21e0a 2139 || (REG_N_SETS (REGNO (i2dest)) == 1 && ! added_sets_2
42495ca0 2140 && ! REG_USERVAR_P (i2dest))))
38a448ca 2141 ni2dest = gen_rtx_REG (GET_MODE (SET_DEST (newpat)),
c5c76735
JL
2142 REGNO (i2dest));
2143
2144 m_split = split_insns (gen_rtx_PARALLEL
2145 (VOIDmode,
2146 gen_rtvec (2, newpat,
2147 gen_rtx_CLOBBER (VOIDmode,
2148 ni2dest))),
2149 i3);
42495ca0 2150 }
916f14f1 2151
d340408c
RH
2152 if (m_split && GET_CODE (m_split) != SEQUENCE)
2153 {
2154 insn_code_number = recog_for_combine (&m_split, i3, &new_i3_notes);
2155 if (insn_code_number >= 0)
2156 newpat = m_split;
2157 }
2158 else if (m_split && GET_CODE (m_split) == SEQUENCE
2159 && XVECLEN (m_split, 0) == 2
2160 && (next_real_insn (i2) == i3
2161 || ! use_crosses_set_p (PATTERN (XVECEXP (m_split, 0, 0)),
2162 INSN_CUID (i2))))
916f14f1 2163 {
1a26b032 2164 rtx i2set, i3set;
d0ab8cd3 2165 rtx newi3pat = PATTERN (XVECEXP (m_split, 0, 1));
916f14f1 2166 newi2pat = PATTERN (XVECEXP (m_split, 0, 0));
916f14f1 2167
e4ba89be
RK
2168 i3set = single_set (XVECEXP (m_split, 0, 1));
2169 i2set = single_set (XVECEXP (m_split, 0, 0));
1a26b032 2170
42495ca0
RK
2171 /* In case we changed the mode of I2DEST, replace it in the
2172 pseudo-register table here. We can't do it above in case this
2173 code doesn't get executed and we do a split the other way. */
2174
2175 if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
2176 SUBST (regno_reg_rtx[REGNO (i2dest)], ni2dest);
2177
8e2f6e35 2178 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
1a26b032
RK
2179
2180 /* If I2 or I3 has multiple SETs, we won't know how to track
9cc96794
RK
2181 register status, so don't use these insns. If I2's destination
2182 is used between I2 and I3, we also can't use these insns. */
1a26b032 2183
9cc96794
RK
2184 if (i2_code_number >= 0 && i2set && i3set
2185 && (next_real_insn (i2) == i3
2186 || ! reg_used_between_p (SET_DEST (i2set), i2, i3)))
8e2f6e35
BS
2187 insn_code_number = recog_for_combine (&newi3pat, i3,
2188 &new_i3_notes);
d0ab8cd3
RK
2189 if (insn_code_number >= 0)
2190 newpat = newi3pat;
2191
c767f54b 2192 /* It is possible that both insns now set the destination of I3.
22609cbf 2193 If so, we must show an extra use of it. */
c767f54b 2194
393de53f
RK
2195 if (insn_code_number >= 0)
2196 {
2197 rtx new_i3_dest = SET_DEST (i3set);
2198 rtx new_i2_dest = SET_DEST (i2set);
2199
2200 while (GET_CODE (new_i3_dest) == ZERO_EXTRACT
2201 || GET_CODE (new_i3_dest) == STRICT_LOW_PART
2202 || GET_CODE (new_i3_dest) == SUBREG)
2203 new_i3_dest = XEXP (new_i3_dest, 0);
2204
d4096689
RK
2205 while (GET_CODE (new_i2_dest) == ZERO_EXTRACT
2206 || GET_CODE (new_i2_dest) == STRICT_LOW_PART
2207 || GET_CODE (new_i2_dest) == SUBREG)
2208 new_i2_dest = XEXP (new_i2_dest, 0);
2209
393de53f
RK
2210 if (GET_CODE (new_i3_dest) == REG
2211 && GET_CODE (new_i2_dest) == REG
2212 && REGNO (new_i3_dest) == REGNO (new_i2_dest))
b1f21e0a 2213 REG_N_SETS (REGNO (new_i2_dest))++;
393de53f 2214 }
916f14f1 2215 }
230d793d
RS
2216
2217 /* If we can split it and use I2DEST, go ahead and see if that
2218 helps things be recognized. Verify that none of the registers
2219 are set between I2 and I3. */
d0ab8cd3 2220 if (insn_code_number < 0 && (split = find_split_point (&newpat, i3)) != 0
230d793d
RS
2221#ifdef HAVE_cc0
2222 && GET_CODE (i2dest) == REG
2223#endif
2224 /* We need I2DEST in the proper mode. If it is a hard register
2225 or the only use of a pseudo, we can change its mode. */
2226 && (GET_MODE (*split) == GET_MODE (i2dest)
2227 || GET_MODE (*split) == VOIDmode
2228 || REGNO (i2dest) < FIRST_PSEUDO_REGISTER
b1f21e0a 2229 || (REG_N_SETS (REGNO (i2dest)) == 1 && ! added_sets_2
230d793d
RS
2230 && ! REG_USERVAR_P (i2dest)))
2231 && (next_real_insn (i2) == i3
2232 || ! use_crosses_set_p (*split, INSN_CUID (i2)))
2233 /* We can't overwrite I2DEST if its value is still used by
2234 NEWPAT. */
2235 && ! reg_referenced_p (i2dest, newpat))
2236 {
2237 rtx newdest = i2dest;
df7d75de
RK
2238 enum rtx_code split_code = GET_CODE (*split);
2239 enum machine_mode split_mode = GET_MODE (*split);
230d793d
RS
2240
2241 /* Get NEWDEST as a register in the proper mode. We have already
2242 validated that we can do this. */
df7d75de 2243 if (GET_MODE (i2dest) != split_mode && split_mode != VOIDmode)
230d793d 2244 {
38a448ca 2245 newdest = gen_rtx_REG (split_mode, REGNO (i2dest));
230d793d
RS
2246
2247 if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
2248 SUBST (regno_reg_rtx[REGNO (i2dest)], newdest);
2249 }
2250
2251 /* If *SPLIT is a (mult FOO (const_int pow2)), convert it to
2252 an ASHIFT. This can occur if it was inside a PLUS and hence
2253 appeared to be a memory address. This is a kludge. */
df7d75de 2254 if (split_code == MULT
230d793d
RS
2255 && GET_CODE (XEXP (*split, 1)) == CONST_INT
2256 && (i = exact_log2 (INTVAL (XEXP (*split, 1)))) >= 0)
1dc8a823
JW
2257 {
2258 SUBST (*split, gen_rtx_combine (ASHIFT, split_mode,
2259 XEXP (*split, 0), GEN_INT (i)));
2260 /* Update split_code because we may not have a multiply
2261 anymore. */
2262 split_code = GET_CODE (*split);
2263 }
230d793d
RS
2264
2265#ifdef INSN_SCHEDULING
2266 /* If *SPLIT is a paradoxical SUBREG, when we split it, it should
2267 be written as a ZERO_EXTEND. */
df7d75de
RK
2268 if (split_code == SUBREG && GET_CODE (SUBREG_REG (*split)) == MEM)
2269 SUBST (*split, gen_rtx_combine (ZERO_EXTEND, split_mode,
230d793d
RS
2270 XEXP (*split, 0)));
2271#endif
2272
2273 newi2pat = gen_rtx_combine (SET, VOIDmode, newdest, *split);
2274 SUBST (*split, newdest);
8e2f6e35 2275 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
df7d75de
RK
2276
2277 /* If the split point was a MULT and we didn't have one before,
2278 don't use one now. */
2279 if (i2_code_number >= 0 && ! (split_code == MULT && ! have_mult))
8e2f6e35 2280 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
230d793d
RS
2281 }
2282 }
2283
2284 /* Check for a case where we loaded from memory in a narrow mode and
2285 then sign extended it, but we need both registers. In that case,
2286 we have a PARALLEL with both loads from the same memory location.
2287 We can split this into a load from memory followed by a register-register
2288 copy. This saves at least one insn, more if register allocation can
f0343c74
RK
2289 eliminate the copy.
2290
2291 We cannot do this if the destination of the second assignment is
2292 a register that we have already assumed is zero-extended. Similarly
2293 for a SUBREG of such a register. */
230d793d
RS
2294
2295 else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
2296 && GET_CODE (newpat) == PARALLEL
2297 && XVECLEN (newpat, 0) == 2
2298 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
2299 && GET_CODE (SET_SRC (XVECEXP (newpat, 0, 0))) == SIGN_EXTEND
2300 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2301 && rtx_equal_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2302 XEXP (SET_SRC (XVECEXP (newpat, 0, 0)), 0))
2303 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2304 INSN_CUID (i2))
2305 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
2306 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
f0343c74
RK
2307 && ! (temp = SET_DEST (XVECEXP (newpat, 0, 1)),
2308 (GET_CODE (temp) == REG
2309 && reg_nonzero_bits[REGNO (temp)] != 0
2310 && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
2311 && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
2312 && (reg_nonzero_bits[REGNO (temp)]
2313 != GET_MODE_MASK (word_mode))))
2314 && ! (GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == SUBREG
2315 && (temp = SUBREG_REG (SET_DEST (XVECEXP (newpat, 0, 1))),
2316 (GET_CODE (temp) == REG
2317 && reg_nonzero_bits[REGNO (temp)] != 0
2318 && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
2319 && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
2320 && (reg_nonzero_bits[REGNO (temp)]
2321 != GET_MODE_MASK (word_mode)))))
230d793d
RS
2322 && ! reg_overlap_mentioned_p (SET_DEST (XVECEXP (newpat, 0, 1)),
2323 SET_SRC (XVECEXP (newpat, 0, 1)))
2324 && ! find_reg_note (i3, REG_UNUSED,
2325 SET_DEST (XVECEXP (newpat, 0, 0))))
2326 {
472fbdd1
RK
2327 rtx ni2dest;
2328
230d793d 2329 newi2pat = XVECEXP (newpat, 0, 0);
472fbdd1 2330 ni2dest = SET_DEST (XVECEXP (newpat, 0, 0));
230d793d
RS
2331 newpat = XVECEXP (newpat, 0, 1);
2332 SUBST (SET_SRC (newpat),
472fbdd1 2333 gen_lowpart_for_combine (GET_MODE (SET_SRC (newpat)), ni2dest));
8e2f6e35 2334 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
a29ca9db 2335
230d793d 2336 if (i2_code_number >= 0)
8e2f6e35 2337 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
5089e22e
RS
2338
2339 if (insn_code_number >= 0)
2340 {
2341 rtx insn;
2342 rtx link;
2343
2344 /* If we will be able to accept this, we have made a change to the
2345 destination of I3. This can invalidate a LOG_LINKS pointing
2346 to I3. No other part of combine.c makes such a transformation.
2347
2348 The new I3 will have a destination that was previously the
2349 destination of I1 or I2 and which was used in i2 or I3. Call
2350 distribute_links to make a LOG_LINK from the next use of
2351 that destination. */
2352
2353 PATTERN (i3) = newpat;
38a448ca 2354 distribute_links (gen_rtx_INSN_LIST (VOIDmode, i3, NULL_RTX));
5089e22e
RS
2355
2356 /* I3 now uses what used to be its destination and which is
2357 now I2's destination. That means we need a LOG_LINK from
2358 I3 to I2. But we used to have one, so we still will.
2359
2360 However, some later insn might be using I2's dest and have
2361 a LOG_LINK pointing at I3. We must remove this link.
2362 The simplest way to remove the link is to point it at I1,
2363 which we know will be a NOTE. */
2364
2365 for (insn = NEXT_INSN (i3);
0d4d42c3 2366 insn && (this_basic_block == n_basic_blocks - 1
3b413743 2367 || insn != BLOCK_HEAD (this_basic_block + 1));
5089e22e
RS
2368 insn = NEXT_INSN (insn))
2369 {
2c3c49de 2370 if (INSN_P (insn) && reg_referenced_p (ni2dest, PATTERN (insn)))
5089e22e
RS
2371 {
2372 for (link = LOG_LINKS (insn); link;
2373 link = XEXP (link, 1))
2374 if (XEXP (link, 0) == i3)
2375 XEXP (link, 0) = i1;
2376
2377 break;
2378 }
2379 }
2380 }
230d793d 2381 }
663522cb 2382
230d793d
RS
2383 /* Similarly, check for a case where we have a PARALLEL of two independent
2384 SETs but we started with three insns. In this case, we can do the sets
2385 as two separate insns. This case occurs when some SET allows two
2386 other insns to combine, but the destination of that SET is still live. */
2387
2388 else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
2389 && GET_CODE (newpat) == PARALLEL
2390 && XVECLEN (newpat, 0) == 2
2391 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
2392 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != ZERO_EXTRACT
2393 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != STRICT_LOW_PART
2394 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2395 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
2396 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
2397 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2398 INSN_CUID (i2))
2399 /* Don't pass sets with (USE (MEM ...)) dests to the following. */
2400 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != USE
2401 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != USE
2402 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 1)),
2403 XVECEXP (newpat, 0, 0))
2404 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 0)),
14a774a9
RK
2405 XVECEXP (newpat, 0, 1))
2406 && ! (contains_muldiv (SET_SRC (XVECEXP (newpat, 0, 0)))
2407 && contains_muldiv (SET_SRC (XVECEXP (newpat, 0, 1)))))
230d793d 2408 {
e9a25f70
JL
2409 /* Normally, it doesn't matter which of the two is done first,
2410 but it does if one references cc0. In that case, it has to
2411 be first. */
2412#ifdef HAVE_cc0
2413 if (reg_referenced_p (cc0_rtx, XVECEXP (newpat, 0, 0)))
2414 {
2415 newi2pat = XVECEXP (newpat, 0, 0);
2416 newpat = XVECEXP (newpat, 0, 1);
2417 }
2418 else
2419#endif
2420 {
2421 newi2pat = XVECEXP (newpat, 0, 1);
2422 newpat = XVECEXP (newpat, 0, 0);
2423 }
230d793d 2424
8e2f6e35 2425 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
a29ca9db 2426
230d793d 2427 if (i2_code_number >= 0)
8e2f6e35 2428 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
230d793d
RS
2429 }
2430
2431 /* If it still isn't recognized, fail and change things back the way they
2432 were. */
2433 if ((insn_code_number < 0
2434 /* Is the result a reasonable ASM_OPERANDS? */
2435 && (! check_asm_operands (newpat) || added_sets_1 || added_sets_2)))
2436 {
2437 undo_all ();
2438 return 0;
2439 }
2440
2441 /* If we had to change another insn, make sure it is valid also. */
2442 if (undobuf.other_insn)
2443 {
230d793d
RS
2444 rtx other_pat = PATTERN (undobuf.other_insn);
2445 rtx new_other_notes;
2446 rtx note, next;
2447
6e25d159
RK
2448 CLEAR_HARD_REG_SET (newpat_used_regs);
2449
8e2f6e35
BS
2450 other_code_number = recog_for_combine (&other_pat, undobuf.other_insn,
2451 &new_other_notes);
230d793d
RS
2452
2453 if (other_code_number < 0 && ! check_asm_operands (other_pat))
2454 {
2455 undo_all ();
2456 return 0;
2457 }
2458
2459 PATTERN (undobuf.other_insn) = other_pat;
2460
2461 /* If any of the notes in OTHER_INSN were REG_UNUSED, ensure that they
2462 are still valid. Then add any non-duplicate notes added by
2463 recog_for_combine. */
2464 for (note = REG_NOTES (undobuf.other_insn); note; note = next)
2465 {
2466 next = XEXP (note, 1);
2467
2468 if (REG_NOTE_KIND (note) == REG_UNUSED
2469 && ! reg_set_p (XEXP (note, 0), PATTERN (undobuf.other_insn)))
1a26b032
RK
2470 {
2471 if (GET_CODE (XEXP (note, 0)) == REG)
b1f21e0a 2472 REG_N_DEATHS (REGNO (XEXP (note, 0)))--;
1a26b032
RK
2473
2474 remove_note (undobuf.other_insn, note);
2475 }
230d793d
RS
2476 }
2477
1a26b032
RK
2478 for (note = new_other_notes; note; note = XEXP (note, 1))
2479 if (GET_CODE (XEXP (note, 0)) == REG)
b1f21e0a 2480 REG_N_DEATHS (REGNO (XEXP (note, 0)))++;
1a26b032 2481
230d793d 2482 distribute_notes (new_other_notes, undobuf.other_insn,
5f4f0e22 2483 undobuf.other_insn, NULL_RTX, NULL_RTX, NULL_RTX);
230d793d 2484 }
5ef17dd2 2485#ifdef HAVE_cc0
663522cb 2486 /* If I2 is the setter CC0 and I3 is the user CC0 then check whether
5ef17dd2
CC
2487 they are adjacent to each other or not. */
2488 {
2489 rtx p = prev_nonnote_insn (i3);
663522cb
KH
2490 if (p && p != i2 && GET_CODE (p) == INSN && newi2pat
2491 && sets_cc0_p (newi2pat))
5ef17dd2 2492 {
663522cb
KH
2493 undo_all ();
2494 return 0;
5ef17dd2 2495 }
663522cb
KH
2496 }
2497#endif
230d793d 2498
663522cb 2499 /* We now know that we can do this combination. Merge the insns and
230d793d
RS
2500 update the status of registers and LOG_LINKS. */
2501
2502 {
2503 rtx i3notes, i2notes, i1notes = 0;
2504 rtx i3links, i2links, i1links = 0;
2505 rtx midnotes = 0;
770ae6cc 2506 unsigned int regno;
ff3467a9
JW
2507 /* Compute which registers we expect to eliminate. newi2pat may be setting
2508 either i3dest or i2dest, so we must check it. Also, i1dest may be the
2509 same as i3dest, in which case newi2pat may be setting i1dest. */
2510 rtx elim_i2 = ((newi2pat && reg_set_p (i2dest, newi2pat))
2511 || i2dest_in_i2src || i2dest_in_i1src
230d793d 2512 ? 0 : i2dest);
ff3467a9
JW
2513 rtx elim_i1 = (i1 == 0 || i1dest_in_i1src
2514 || (newi2pat && reg_set_p (i1dest, newi2pat))
2515 ? 0 : i1dest);
230d793d
RS
2516
2517 /* Get the old REG_NOTES and LOG_LINKS from all our insns and
2518 clear them. */
2519 i3notes = REG_NOTES (i3), i3links = LOG_LINKS (i3);
2520 i2notes = REG_NOTES (i2), i2links = LOG_LINKS (i2);
2521 if (i1)
2522 i1notes = REG_NOTES (i1), i1links = LOG_LINKS (i1);
2523
2524 /* Ensure that we do not have something that should not be shared but
2525 occurs multiple times in the new insns. Check this by first
5089e22e 2526 resetting all the `used' flags and then copying anything is shared. */
230d793d
RS
2527
2528 reset_used_flags (i3notes);
2529 reset_used_flags (i2notes);
2530 reset_used_flags (i1notes);
2531 reset_used_flags (newpat);
2532 reset_used_flags (newi2pat);
2533 if (undobuf.other_insn)
2534 reset_used_flags (PATTERN (undobuf.other_insn));
2535
2536 i3notes = copy_rtx_if_shared (i3notes);
2537 i2notes = copy_rtx_if_shared (i2notes);
2538 i1notes = copy_rtx_if_shared (i1notes);
2539 newpat = copy_rtx_if_shared (newpat);
2540 newi2pat = copy_rtx_if_shared (newi2pat);
2541 if (undobuf.other_insn)
2542 reset_used_flags (PATTERN (undobuf.other_insn));
2543
2544 INSN_CODE (i3) = insn_code_number;
2545 PATTERN (i3) = newpat;
2546 if (undobuf.other_insn)
2547 INSN_CODE (undobuf.other_insn) = other_code_number;
2548
2549 /* We had one special case above where I2 had more than one set and
2550 we replaced a destination of one of those sets with the destination
2551 of I3. In that case, we have to update LOG_LINKS of insns later
176c9e6b
JW
2552 in this basic block. Note that this (expensive) case is rare.
2553
2554 Also, in this case, we must pretend that all REG_NOTEs for I2
2555 actually came from I3, so that REG_UNUSED notes from I2 will be
2556 properly handled. */
2557
c7be4f66 2558 if (i3_subst_into_i2)
176c9e6b 2559 {
1786009e 2560 for (i = 0; i < XVECLEN (PATTERN (i2), 0); i++)
95ac07b0
AO
2561 if (GET_CODE (XVECEXP (PATTERN (i2), 0, i)) != USE
2562 && GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, i))) == REG
1786009e
ZW
2563 && SET_DEST (XVECEXP (PATTERN (i2), 0, i)) != i2dest
2564 && ! find_reg_note (i2, REG_UNUSED,
2565 SET_DEST (XVECEXP (PATTERN (i2), 0, i))))
2566 for (temp = NEXT_INSN (i2);
2567 temp && (this_basic_block == n_basic_blocks - 1
2568 || BLOCK_HEAD (this_basic_block) != temp);
2569 temp = NEXT_INSN (temp))
2570 if (temp != i3 && INSN_P (temp))
2571 for (link = LOG_LINKS (temp); link; link = XEXP (link, 1))
2572 if (XEXP (link, 0) == i2)
2573 XEXP (link, 0) = i3;
176c9e6b
JW
2574
2575 if (i3notes)
2576 {
2577 rtx link = i3notes;
2578 while (XEXP (link, 1))
2579 link = XEXP (link, 1);
2580 XEXP (link, 1) = i2notes;
2581 }
2582 else
2583 i3notes = i2notes;
2584 i2notes = 0;
2585 }
230d793d
RS
2586
2587 LOG_LINKS (i3) = 0;
2588 REG_NOTES (i3) = 0;
2589 LOG_LINKS (i2) = 0;
2590 REG_NOTES (i2) = 0;
2591
2592 if (newi2pat)
2593 {
2594 INSN_CODE (i2) = i2_code_number;
2595 PATTERN (i2) = newi2pat;
2596 }
2597 else
2598 {
2599 PUT_CODE (i2, NOTE);
2600 NOTE_LINE_NUMBER (i2) = NOTE_INSN_DELETED;
2601 NOTE_SOURCE_FILE (i2) = 0;
2602 }
2603
2604 if (i1)
2605 {
2606 LOG_LINKS (i1) = 0;
2607 REG_NOTES (i1) = 0;
2608 PUT_CODE (i1, NOTE);
2609 NOTE_LINE_NUMBER (i1) = NOTE_INSN_DELETED;
2610 NOTE_SOURCE_FILE (i1) = 0;
2611 }
2612
2613 /* Get death notes for everything that is now used in either I3 or
663522cb 2614 I2 and used to die in a previous insn. If we built two new
6eb12cef
RK
2615 patterns, move from I1 to I2 then I2 to I3 so that we get the
2616 proper movement on registers that I2 modifies. */
230d793d 2617
230d793d 2618 if (newi2pat)
6eb12cef
RK
2619 {
2620 move_deaths (newi2pat, NULL_RTX, INSN_CUID (i1), i2, &midnotes);
2621 move_deaths (newpat, newi2pat, INSN_CUID (i1), i3, &midnotes);
2622 }
2623 else
2624 move_deaths (newpat, NULL_RTX, i1 ? INSN_CUID (i1) : INSN_CUID (i2),
2625 i3, &midnotes);
230d793d
RS
2626
2627 /* Distribute all the LOG_LINKS and REG_NOTES from I1, I2, and I3. */
2628 if (i3notes)
5f4f0e22
CH
2629 distribute_notes (i3notes, i3, i3, newi2pat ? i2 : NULL_RTX,
2630 elim_i2, elim_i1);
230d793d 2631 if (i2notes)
5f4f0e22
CH
2632 distribute_notes (i2notes, i2, i3, newi2pat ? i2 : NULL_RTX,
2633 elim_i2, elim_i1);
230d793d 2634 if (i1notes)
5f4f0e22
CH
2635 distribute_notes (i1notes, i1, i3, newi2pat ? i2 : NULL_RTX,
2636 elim_i2, elim_i1);
230d793d 2637 if (midnotes)
5f4f0e22
CH
2638 distribute_notes (midnotes, NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2639 elim_i2, elim_i1);
230d793d
RS
2640
2641 /* Distribute any notes added to I2 or I3 by recog_for_combine. We
2642 know these are REG_UNUSED and want them to go to the desired insn,
663522cb 2643 so we always pass it as i3. We have not counted the notes in
1a26b032
RK
2644 reg_n_deaths yet, so we need to do so now. */
2645
230d793d 2646 if (newi2pat && new_i2_notes)
1a26b032
RK
2647 {
2648 for (temp = new_i2_notes; temp; temp = XEXP (temp, 1))
2649 if (GET_CODE (XEXP (temp, 0)) == REG)
b1f21e0a 2650 REG_N_DEATHS (REGNO (XEXP (temp, 0)))++;
663522cb 2651
1a26b032
RK
2652 distribute_notes (new_i2_notes, i2, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2653 }
2654
230d793d 2655 if (new_i3_notes)
1a26b032
RK
2656 {
2657 for (temp = new_i3_notes; temp; temp = XEXP (temp, 1))
2658 if (GET_CODE (XEXP (temp, 0)) == REG)
b1f21e0a 2659 REG_N_DEATHS (REGNO (XEXP (temp, 0)))++;
663522cb 2660
1a26b032
RK
2661 distribute_notes (new_i3_notes, i3, i3, NULL_RTX, NULL_RTX, NULL_RTX);
2662 }
230d793d
RS
2663
2664 /* If I3DEST was used in I3SRC, it really died in I3. We may need to
e9a25f70
JL
2665 put a REG_DEAD note for it somewhere. If NEWI2PAT exists and sets
2666 I3DEST, the death must be somewhere before I2, not I3. If we passed I3
2667 in that case, it might delete I2. Similarly for I2 and I1.
1a26b032
RK
2668 Show an additional death due to the REG_DEAD note we make here. If
2669 we discard it in distribute_notes, we will decrement it again. */
d0ab8cd3 2670
230d793d 2671 if (i3dest_killed)
1a26b032
RK
2672 {
2673 if (GET_CODE (i3dest_killed) == REG)
b1f21e0a 2674 REG_N_DEATHS (REGNO (i3dest_killed))++;
1a26b032 2675
e9a25f70 2676 if (newi2pat && reg_set_p (i3dest_killed, newi2pat))
38a448ca
RH
2677 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i3dest_killed,
2678 NULL_RTX),
ff3467a9 2679 NULL_RTX, i2, NULL_RTX, elim_i2, elim_i1);
e9a25f70 2680 else
38a448ca
RH
2681 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i3dest_killed,
2682 NULL_RTX),
e9a25f70 2683 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
ff3467a9 2684 elim_i2, elim_i1);
1a26b032 2685 }
58c8c593 2686
230d793d 2687 if (i2dest_in_i2src)
58c8c593 2688 {
1a26b032 2689 if (GET_CODE (i2dest) == REG)
b1f21e0a 2690 REG_N_DEATHS (REGNO (i2dest))++;
1a26b032 2691
58c8c593 2692 if (newi2pat && reg_set_p (i2dest, newi2pat))
38a448ca 2693 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i2dest, NULL_RTX),
58c8c593
RK
2694 NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2695 else
38a448ca 2696 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i2dest, NULL_RTX),
58c8c593
RK
2697 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2698 NULL_RTX, NULL_RTX);
2699 }
2700
230d793d 2701 if (i1dest_in_i1src)
58c8c593 2702 {
1a26b032 2703 if (GET_CODE (i1dest) == REG)
b1f21e0a 2704 REG_N_DEATHS (REGNO (i1dest))++;
1a26b032 2705
58c8c593 2706 if (newi2pat && reg_set_p (i1dest, newi2pat))
38a448ca 2707 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i1dest, NULL_RTX),
58c8c593
RK
2708 NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2709 else
38a448ca 2710 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i1dest, NULL_RTX),
58c8c593
RK
2711 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2712 NULL_RTX, NULL_RTX);
2713 }
230d793d
RS
2714
2715 distribute_links (i3links);
2716 distribute_links (i2links);
2717 distribute_links (i1links);
2718
2719 if (GET_CODE (i2dest) == REG)
2720 {
d0ab8cd3
RK
2721 rtx link;
2722 rtx i2_insn = 0, i2_val = 0, set;
2723
2724 /* The insn that used to set this register doesn't exist, and
2725 this life of the register may not exist either. See if one of
663522cb 2726 I3's links points to an insn that sets I2DEST. If it does,
d0ab8cd3
RK
2727 that is now the last known value for I2DEST. If we don't update
2728 this and I2 set the register to a value that depended on its old
230d793d
RS
2729 contents, we will get confused. If this insn is used, thing
2730 will be set correctly in combine_instructions. */
d0ab8cd3
RK
2731
2732 for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2733 if ((set = single_set (XEXP (link, 0))) != 0
2734 && rtx_equal_p (i2dest, SET_DEST (set)))
2735 i2_insn = XEXP (link, 0), i2_val = SET_SRC (set);
2736
2737 record_value_for_reg (i2dest, i2_insn, i2_val);
230d793d
RS
2738
2739 /* If the reg formerly set in I2 died only once and that was in I3,
2740 zero its use count so it won't make `reload' do any work. */
538fe8cd
ILT
2741 if (! added_sets_2
2742 && (newi2pat == 0 || ! reg_mentioned_p (i2dest, newi2pat))
2743 && ! i2dest_in_i2src)
230d793d
RS
2744 {
2745 regno = REGNO (i2dest);
b1f21e0a 2746 REG_N_SETS (regno)--;
230d793d
RS
2747 }
2748 }
2749
2750 if (i1 && GET_CODE (i1dest) == REG)
2751 {
d0ab8cd3
RK
2752 rtx link;
2753 rtx i1_insn = 0, i1_val = 0, set;
2754
2755 for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2756 if ((set = single_set (XEXP (link, 0))) != 0
2757 && rtx_equal_p (i1dest, SET_DEST (set)))
2758 i1_insn = XEXP (link, 0), i1_val = SET_SRC (set);
2759
2760 record_value_for_reg (i1dest, i1_insn, i1_val);
2761
230d793d 2762 regno = REGNO (i1dest);
5af91171 2763 if (! added_sets_1 && ! i1dest_in_i1src)
770ae6cc 2764 REG_N_SETS (regno)--;
230d793d
RS
2765 }
2766
951553af 2767 /* Update reg_nonzero_bits et al for any changes that may have been made
663522cb 2768 to this insn. The order of set_nonzero_bits_and_sign_copies() is
5fb7c247 2769 important. Because newi2pat can affect nonzero_bits of newpat */
22609cbf 2770 if (newi2pat)
84832317 2771 note_stores (newi2pat, set_nonzero_bits_and_sign_copies, NULL);
5fb7c247 2772 note_stores (newpat, set_nonzero_bits_and_sign_copies, NULL);
22609cbf 2773
44a76fc8
AG
2774 /* Set new_direct_jump_p if a new return or simple jump instruction
2775 has been created.
2776
663522cb 2777 If I3 is now an unconditional jump, ensure that it has a
230d793d 2778 BARRIER following it since it may have initially been a
381ee8af 2779 conditional jump. It may also be the last nonnote insn. */
663522cb 2780
7f1c097d 2781 if (GET_CODE (newpat) == RETURN || any_uncondjump_p (i3))
44a76fc8
AG
2782 {
2783 *new_direct_jump_p = 1;
230d793d 2784
44a76fc8
AG
2785 if ((temp = next_nonnote_insn (i3)) == NULL_RTX
2786 || GET_CODE (temp) != BARRIER)
2787 emit_barrier_after (i3);
2788 }
230d793d
RS
2789 }
2790
2791 combine_successes++;
e7749837 2792 undo_commit ();
230d793d 2793
bcd49eb7
JW
2794 /* Clear this here, so that subsequent get_last_value calls are not
2795 affected. */
2796 subst_prev_insn = NULL_RTX;
2797
abe6e52f
RK
2798 if (added_links_insn
2799 && (newi2pat == 0 || INSN_CUID (added_links_insn) < INSN_CUID (i2))
2800 && INSN_CUID (added_links_insn) < INSN_CUID (i3))
2801 return added_links_insn;
2802 else
2803 return newi2pat ? i2 : i3;
230d793d
RS
2804}
2805\f
2806/* Undo all the modifications recorded in undobuf. */
2807
2808static void
2809undo_all ()
2810{
241cea85
RK
2811 struct undo *undo, *next;
2812
2813 for (undo = undobuf.undos; undo; undo = next)
7c046e4e 2814 {
241cea85
RK
2815 next = undo->next;
2816 if (undo->is_int)
2817 *undo->where.i = undo->old_contents.i;
7c046e4e 2818 else
241cea85
RK
2819 *undo->where.r = undo->old_contents.r;
2820
2821 undo->next = undobuf.frees;
2822 undobuf.frees = undo;
7c046e4e 2823 }
230d793d 2824
845fc875 2825 undobuf.undos = undobuf.previous_undos = 0;
bcd49eb7
JW
2826
2827 /* Clear this here, so that subsequent get_last_value calls are not
2828 affected. */
2829 subst_prev_insn = NULL_RTX;
230d793d 2830}
e7749837
RH
2831
2832/* We've committed to accepting the changes we made. Move all
2833 of the undos to the free list. */
2834
2835static void
2836undo_commit ()
2837{
2838 struct undo *undo, *next;
2839
2840 for (undo = undobuf.undos; undo; undo = next)
2841 {
2842 next = undo->next;
2843 undo->next = undobuf.frees;
2844 undobuf.frees = undo;
2845 }
2846 undobuf.undos = undobuf.previous_undos = 0;
2847}
2848
230d793d
RS
2849\f
2850/* Find the innermost point within the rtx at LOC, possibly LOC itself,
d0ab8cd3
RK
2851 where we have an arithmetic expression and return that point. LOC will
2852 be inside INSN.
230d793d
RS
2853
2854 try_combine will call this function to see if an insn can be split into
2855 two insns. */
2856
2857static rtx *
d0ab8cd3 2858find_split_point (loc, insn)
230d793d 2859 rtx *loc;
d0ab8cd3 2860 rtx insn;
230d793d
RS
2861{
2862 rtx x = *loc;
2863 enum rtx_code code = GET_CODE (x);
2864 rtx *split;
770ae6cc
RK
2865 unsigned HOST_WIDE_INT len = 0;
2866 HOST_WIDE_INT pos = 0;
2867 int unsignedp = 0;
6a651371 2868 rtx inner = NULL_RTX;
230d793d
RS
2869
2870 /* First special-case some codes. */
2871 switch (code)
2872 {
2873 case SUBREG:
2874#ifdef INSN_SCHEDULING
2875 /* If we are making a paradoxical SUBREG invalid, it becomes a split
2876 point. */
2877 if (GET_CODE (SUBREG_REG (x)) == MEM)
2878 return loc;
2879#endif
d0ab8cd3 2880 return find_split_point (&SUBREG_REG (x), insn);
230d793d 2881
230d793d 2882 case MEM:
916f14f1 2883#ifdef HAVE_lo_sum
230d793d
RS
2884 /* If we have (mem (const ..)) or (mem (symbol_ref ...)), split it
2885 using LO_SUM and HIGH. */
2886 if (GET_CODE (XEXP (x, 0)) == CONST
2887 || GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
2888 {
2889 SUBST (XEXP (x, 0),
2890 gen_rtx_combine (LO_SUM, Pmode,
2891 gen_rtx_combine (HIGH, Pmode, XEXP (x, 0)),
2892 XEXP (x, 0)));
2893 return &XEXP (XEXP (x, 0), 0);
2894 }
230d793d
RS
2895#endif
2896
916f14f1
RK
2897 /* If we have a PLUS whose second operand is a constant and the
2898 address is not valid, perhaps will can split it up using
2899 the machine-specific way to split large constants. We use
ddd5a7c1 2900 the first pseudo-reg (one of the virtual regs) as a placeholder;
916f14f1
RK
2901 it will not remain in the result. */
2902 if (GET_CODE (XEXP (x, 0)) == PLUS
2903 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2904 && ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
2905 {
2906 rtx reg = regno_reg_rtx[FIRST_PSEUDO_REGISTER];
38a448ca 2907 rtx seq = split_insns (gen_rtx_SET (VOIDmode, reg, XEXP (x, 0)),
916f14f1
RK
2908 subst_insn);
2909
2910 /* This should have produced two insns, each of which sets our
2911 placeholder. If the source of the second is a valid address,
2912 we can make put both sources together and make a split point
2913 in the middle. */
2914
2915 if (seq && XVECLEN (seq, 0) == 2
2916 && GET_CODE (XVECEXP (seq, 0, 0)) == INSN
2917 && GET_CODE (PATTERN (XVECEXP (seq, 0, 0))) == SET
2918 && SET_DEST (PATTERN (XVECEXP (seq, 0, 0))) == reg
2919 && ! reg_mentioned_p (reg,
2920 SET_SRC (PATTERN (XVECEXP (seq, 0, 0))))
2921 && GET_CODE (XVECEXP (seq, 0, 1)) == INSN
2922 && GET_CODE (PATTERN (XVECEXP (seq, 0, 1))) == SET
2923 && SET_DEST (PATTERN (XVECEXP (seq, 0, 1))) == reg
2924 && memory_address_p (GET_MODE (x),
2925 SET_SRC (PATTERN (XVECEXP (seq, 0, 1)))))
2926 {
2927 rtx src1 = SET_SRC (PATTERN (XVECEXP (seq, 0, 0)));
2928 rtx src2 = SET_SRC (PATTERN (XVECEXP (seq, 0, 1)));
2929
2930 /* Replace the placeholder in SRC2 with SRC1. If we can
2931 find where in SRC2 it was placed, that can become our
2932 split point and we can replace this address with SRC2.
2933 Just try two obvious places. */
2934
2935 src2 = replace_rtx (src2, reg, src1);
2936 split = 0;
2937 if (XEXP (src2, 0) == src1)
2938 split = &XEXP (src2, 0);
2939 else if (GET_RTX_FORMAT (GET_CODE (XEXP (src2, 0)))[0] == 'e'
2940 && XEXP (XEXP (src2, 0), 0) == src1)
2941 split = &XEXP (XEXP (src2, 0), 0);
2942
2943 if (split)
2944 {
2945 SUBST (XEXP (x, 0), src2);
2946 return split;
2947 }
2948 }
663522cb 2949
1a26b032
RK
2950 /* If that didn't work, perhaps the first operand is complex and
2951 needs to be computed separately, so make a split point there.
2952 This will occur on machines that just support REG + CONST
2953 and have a constant moved through some previous computation. */
2954
2955 else if (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (x, 0), 0))) != 'o'
2956 && ! (GET_CODE (XEXP (XEXP (x, 0), 0)) == SUBREG
2957 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (XEXP (x, 0), 0))))
2958 == 'o')))
2959 return &XEXP (XEXP (x, 0), 0);
916f14f1
RK
2960 }
2961 break;
2962
230d793d
RS
2963 case SET:
2964#ifdef HAVE_cc0
2965 /* If SET_DEST is CC0 and SET_SRC is not an operand, a COMPARE, or a
2966 ZERO_EXTRACT, the most likely reason why this doesn't match is that
2967 we need to put the operand into a register. So split at that
2968 point. */
2969
2970 if (SET_DEST (x) == cc0_rtx
2971 && GET_CODE (SET_SRC (x)) != COMPARE
2972 && GET_CODE (SET_SRC (x)) != ZERO_EXTRACT
2973 && GET_RTX_CLASS (GET_CODE (SET_SRC (x))) != 'o'
2974 && ! (GET_CODE (SET_SRC (x)) == SUBREG
2975 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (SET_SRC (x)))) == 'o'))
2976 return &SET_SRC (x);
2977#endif
2978
2979 /* See if we can split SET_SRC as it stands. */
d0ab8cd3 2980 split = find_split_point (&SET_SRC (x), insn);
230d793d
RS
2981 if (split && split != &SET_SRC (x))
2982 return split;
2983
041d7180
JL
2984 /* See if we can split SET_DEST as it stands. */
2985 split = find_split_point (&SET_DEST (x), insn);
2986 if (split && split != &SET_DEST (x))
2987 return split;
2988
230d793d
RS
2989 /* See if this is a bitfield assignment with everything constant. If
2990 so, this is an IOR of an AND, so split it into that. */
2991 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2992 && (GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
5f4f0e22 2993 <= HOST_BITS_PER_WIDE_INT)
230d793d
RS
2994 && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT
2995 && GET_CODE (XEXP (SET_DEST (x), 2)) == CONST_INT
2996 && GET_CODE (SET_SRC (x)) == CONST_INT
2997 && ((INTVAL (XEXP (SET_DEST (x), 1))
2998 + INTVAL (XEXP (SET_DEST (x), 2)))
2999 <= GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0))))
3000 && ! side_effects_p (XEXP (SET_DEST (x), 0)))
3001 {
770ae6cc
RK
3002 HOST_WIDE_INT pos = INTVAL (XEXP (SET_DEST (x), 2));
3003 unsigned HOST_WIDE_INT len = INTVAL (XEXP (SET_DEST (x), 1));
3004 unsigned HOST_WIDE_INT src = INTVAL (SET_SRC (x));
230d793d
RS
3005 rtx dest = XEXP (SET_DEST (x), 0);
3006 enum machine_mode mode = GET_MODE (dest);
5f4f0e22 3007 unsigned HOST_WIDE_INT mask = ((HOST_WIDE_INT) 1 << len) - 1;
230d793d 3008
f76b9db2
ILT
3009 if (BITS_BIG_ENDIAN)
3010 pos = GET_MODE_BITSIZE (mode) - len - pos;
230d793d 3011
770ae6cc 3012 if (src == mask)
230d793d 3013 SUBST (SET_SRC (x),
5f4f0e22 3014 gen_binary (IOR, mode, dest, GEN_INT (src << pos)));
230d793d
RS
3015 else
3016 SUBST (SET_SRC (x),
3017 gen_binary (IOR, mode,
663522cb
KH
3018 gen_binary (AND, mode, dest,
3019 GEN_INT (~(mask << pos)
5f4f0e22
CH
3020 & GET_MODE_MASK (mode))),
3021 GEN_INT (src << pos)));
230d793d
RS
3022
3023 SUBST (SET_DEST (x), dest);
3024
d0ab8cd3 3025 split = find_split_point (&SET_SRC (x), insn);
230d793d
RS
3026 if (split && split != &SET_SRC (x))
3027 return split;
3028 }
3029
3030 /* Otherwise, see if this is an operation that we can split into two.
3031 If so, try to split that. */
3032 code = GET_CODE (SET_SRC (x));
3033
3034 switch (code)
3035 {
d0ab8cd3
RK
3036 case AND:
3037 /* If we are AND'ing with a large constant that is only a single
3038 bit and the result is only being used in a context where we
3039 need to know if it is zero or non-zero, replace it with a bit
3040 extraction. This will avoid the large constant, which might
3041 have taken more than one insn to make. If the constant were
3042 not a valid argument to the AND but took only one insn to make,
3043 this is no worse, but if it took more than one insn, it will
3044 be better. */
3045
3046 if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
3047 && GET_CODE (XEXP (SET_SRC (x), 0)) == REG
3048 && (pos = exact_log2 (INTVAL (XEXP (SET_SRC (x), 1)))) >= 7
3049 && GET_CODE (SET_DEST (x)) == REG
3050 && (split = find_single_use (SET_DEST (x), insn, NULL_PTR)) != 0
3051 && (GET_CODE (*split) == EQ || GET_CODE (*split) == NE)
3052 && XEXP (*split, 0) == SET_DEST (x)
3053 && XEXP (*split, 1) == const0_rtx)
3054 {
76184def
DE
3055 rtx extraction = make_extraction (GET_MODE (SET_DEST (x)),
3056 XEXP (SET_SRC (x), 0),
3057 pos, NULL_RTX, 1, 1, 0, 0);
3058 if (extraction != 0)
3059 {
3060 SUBST (SET_SRC (x), extraction);
3061 return find_split_point (loc, insn);
3062 }
d0ab8cd3
RK
3063 }
3064 break;
3065
1a6ec070
RK
3066 case NE:
3067 /* if STORE_FLAG_VALUE is -1, this is (NE X 0) and only one bit of X
3068 is known to be on, this can be converted into a NEG of a shift. */
3069 if (STORE_FLAG_VALUE == -1 && XEXP (SET_SRC (x), 1) == const0_rtx
3070 && GET_MODE (SET_SRC (x)) == GET_MODE (XEXP (SET_SRC (x), 0))
4eb2cb10 3071 && 1 <= (pos = exact_log2
1a6ec070
RK
3072 (nonzero_bits (XEXP (SET_SRC (x), 0),
3073 GET_MODE (XEXP (SET_SRC (x), 0))))))
3074 {
3075 enum machine_mode mode = GET_MODE (XEXP (SET_SRC (x), 0));
3076
3077 SUBST (SET_SRC (x),
3078 gen_rtx_combine (NEG, mode,
3079 gen_rtx_combine (LSHIFTRT, mode,
3080 XEXP (SET_SRC (x), 0),
4eb2cb10 3081 GEN_INT (pos))));
1a6ec070
RK
3082
3083 split = find_split_point (&SET_SRC (x), insn);
3084 if (split && split != &SET_SRC (x))
3085 return split;
3086 }
3087 break;
3088
230d793d
RS
3089 case SIGN_EXTEND:
3090 inner = XEXP (SET_SRC (x), 0);
101c1a3d
JL
3091
3092 /* We can't optimize if either mode is a partial integer
3093 mode as we don't know how many bits are significant
3094 in those modes. */
3095 if (GET_MODE_CLASS (GET_MODE (inner)) == MODE_PARTIAL_INT
3096 || GET_MODE_CLASS (GET_MODE (SET_SRC (x))) == MODE_PARTIAL_INT)
3097 break;
3098
230d793d
RS
3099 pos = 0;
3100 len = GET_MODE_BITSIZE (GET_MODE (inner));
3101 unsignedp = 0;
3102 break;
3103
3104 case SIGN_EXTRACT:
3105 case ZERO_EXTRACT:
3106 if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
3107 && GET_CODE (XEXP (SET_SRC (x), 2)) == CONST_INT)
3108 {
3109 inner = XEXP (SET_SRC (x), 0);
3110 len = INTVAL (XEXP (SET_SRC (x), 1));
3111 pos = INTVAL (XEXP (SET_SRC (x), 2));
3112
f76b9db2
ILT
3113 if (BITS_BIG_ENDIAN)
3114 pos = GET_MODE_BITSIZE (GET_MODE (inner)) - len - pos;
230d793d
RS
3115 unsignedp = (code == ZERO_EXTRACT);
3116 }
3117 break;
e9a25f70
JL
3118
3119 default:
3120 break;
230d793d
RS
3121 }
3122
3123 if (len && pos >= 0 && pos + len <= GET_MODE_BITSIZE (GET_MODE (inner)))
3124 {
3125 enum machine_mode mode = GET_MODE (SET_SRC (x));
3126
d0ab8cd3
RK
3127 /* For unsigned, we have a choice of a shift followed by an
3128 AND or two shifts. Use two shifts for field sizes where the
3129 constant might be too large. We assume here that we can
3130 always at least get 8-bit constants in an AND insn, which is
3131 true for every current RISC. */
3132
3133 if (unsignedp && len <= 8)
230d793d
RS
3134 {
3135 SUBST (SET_SRC (x),
3136 gen_rtx_combine
3137 (AND, mode,
3138 gen_rtx_combine (LSHIFTRT, mode,
3139 gen_lowpart_for_combine (mode, inner),
5f4f0e22
CH
3140 GEN_INT (pos)),
3141 GEN_INT (((HOST_WIDE_INT) 1 << len) - 1)));
230d793d 3142
d0ab8cd3 3143 split = find_split_point (&SET_SRC (x), insn);
230d793d
RS
3144 if (split && split != &SET_SRC (x))
3145 return split;
3146 }
3147 else
3148 {
3149 SUBST (SET_SRC (x),
3150 gen_rtx_combine
d0ab8cd3 3151 (unsignedp ? LSHIFTRT : ASHIFTRT, mode,
230d793d
RS
3152 gen_rtx_combine (ASHIFT, mode,
3153 gen_lowpart_for_combine (mode, inner),
5f4f0e22
CH
3154 GEN_INT (GET_MODE_BITSIZE (mode)
3155 - len - pos)),
3156 GEN_INT (GET_MODE_BITSIZE (mode) - len)));
230d793d 3157
d0ab8cd3 3158 split = find_split_point (&SET_SRC (x), insn);
230d793d
RS
3159 if (split && split != &SET_SRC (x))
3160 return split;
3161 }
3162 }
3163
3164 /* See if this is a simple operation with a constant as the second
3165 operand. It might be that this constant is out of range and hence
3166 could be used as a split point. */
3167 if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
3168 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
3169 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<')
3170 && CONSTANT_P (XEXP (SET_SRC (x), 1))
3171 && (GET_RTX_CLASS (GET_CODE (XEXP (SET_SRC (x), 0))) == 'o'
3172 || (GET_CODE (XEXP (SET_SRC (x), 0)) == SUBREG
3173 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (SET_SRC (x), 0))))
3174 == 'o'))))
3175 return &XEXP (SET_SRC (x), 1);
3176
3177 /* Finally, see if this is a simple operation with its first operand
3178 not in a register. The operation might require this operand in a
3179 register, so return it as a split point. We can always do this
3180 because if the first operand were another operation, we would have
3181 already found it as a split point. */
3182 if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
3183 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
3184 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<'
3185 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '1')
3186 && ! register_operand (XEXP (SET_SRC (x), 0), VOIDmode))
3187 return &XEXP (SET_SRC (x), 0);
3188
3189 return 0;
3190
3191 case AND:
3192 case IOR:
3193 /* We write NOR as (and (not A) (not B)), but if we don't have a NOR,
3194 it is better to write this as (not (ior A B)) so we can split it.
3195 Similarly for IOR. */
3196 if (GET_CODE (XEXP (x, 0)) == NOT && GET_CODE (XEXP (x, 1)) == NOT)
3197 {
3198 SUBST (*loc,
3199 gen_rtx_combine (NOT, GET_MODE (x),
3200 gen_rtx_combine (code == IOR ? AND : IOR,
3201 GET_MODE (x),
3202 XEXP (XEXP (x, 0), 0),
3203 XEXP (XEXP (x, 1), 0))));
d0ab8cd3 3204 return find_split_point (loc, insn);
230d793d
RS
3205 }
3206
3207 /* Many RISC machines have a large set of logical insns. If the
3208 second operand is a NOT, put it first so we will try to split the
3209 other operand first. */
3210 if (GET_CODE (XEXP (x, 1)) == NOT)
3211 {
3212 rtx tem = XEXP (x, 0);
3213 SUBST (XEXP (x, 0), XEXP (x, 1));
3214 SUBST (XEXP (x, 1), tem);
3215 }
3216 break;
e9a25f70
JL
3217
3218 default:
3219 break;
230d793d
RS
3220 }
3221
3222 /* Otherwise, select our actions depending on our rtx class. */
3223 switch (GET_RTX_CLASS (code))
3224 {
3225 case 'b': /* This is ZERO_EXTRACT and SIGN_EXTRACT. */
3226 case '3':
d0ab8cd3 3227 split = find_split_point (&XEXP (x, 2), insn);
230d793d
RS
3228 if (split)
3229 return split;
0f41302f 3230 /* ... fall through ... */
230d793d
RS
3231 case '2':
3232 case 'c':
3233 case '<':
d0ab8cd3 3234 split = find_split_point (&XEXP (x, 1), insn);
230d793d
RS
3235 if (split)
3236 return split;
0f41302f 3237 /* ... fall through ... */
230d793d
RS
3238 case '1':
3239 /* Some machines have (and (shift ...) ...) insns. If X is not
3240 an AND, but XEXP (X, 0) is, use it as our split point. */
3241 if (GET_CODE (x) != AND && GET_CODE (XEXP (x, 0)) == AND)
3242 return &XEXP (x, 0);
3243
d0ab8cd3 3244 split = find_split_point (&XEXP (x, 0), insn);
230d793d
RS
3245 if (split)
3246 return split;
3247 return loc;
3248 }
3249
3250 /* Otherwise, we don't have a split point. */
3251 return 0;
3252}
3253\f
3254/* Throughout X, replace FROM with TO, and return the result.
3255 The result is TO if X is FROM;
3256 otherwise the result is X, but its contents may have been modified.
3257 If they were modified, a record was made in undobuf so that
3258 undo_all will (among other things) return X to its original state.
3259
3260 If the number of changes necessary is too much to record to undo,
3261 the excess changes are not made, so the result is invalid.
3262 The changes already made can still be undone.
3263 undobuf.num_undo is incremented for such changes, so by testing that
3264 the caller can tell whether the result is valid.
3265
3266 `n_occurrences' is incremented each time FROM is replaced.
663522cb 3267
230d793d
RS
3268 IN_DEST is non-zero if we are processing the SET_DEST of a SET.
3269
5089e22e 3270 UNIQUE_COPY is non-zero if each substitution must be unique. We do this
230d793d
RS
3271 by copying if `n_occurrences' is non-zero. */
3272
3273static rtx
3274subst (x, from, to, in_dest, unique_copy)
3275 register rtx x, from, to;
3276 int in_dest;
3277 int unique_copy;
3278{
f24ad0e4 3279 register enum rtx_code code = GET_CODE (x);
230d793d 3280 enum machine_mode op0_mode = VOIDmode;
6f7d635c 3281 register const char *fmt;
8079805d
RK
3282 register int len, i;
3283 rtx new;
230d793d
RS
3284
3285/* Two expressions are equal if they are identical copies of a shared
3286 RTX or if they are both registers with the same register number
3287 and mode. */
3288
3289#define COMBINE_RTX_EQUAL_P(X,Y) \
3290 ((X) == (Y) \
3291 || (GET_CODE (X) == REG && GET_CODE (Y) == REG \
3292 && REGNO (X) == REGNO (Y) && GET_MODE (X) == GET_MODE (Y)))
3293
3294 if (! in_dest && COMBINE_RTX_EQUAL_P (x, from))
3295 {
3296 n_occurrences++;
3297 return (unique_copy && n_occurrences > 1 ? copy_rtx (to) : to);
3298 }
3299
3300 /* If X and FROM are the same register but different modes, they will
663522cb 3301 not have been seen as equal above. However, flow.c will make a
230d793d
RS
3302 LOG_LINKS entry for that case. If we do nothing, we will try to
3303 rerecognize our original insn and, when it succeeds, we will
3304 delete the feeding insn, which is incorrect.
3305
3306 So force this insn not to match in this (rare) case. */
3307 if (! in_dest && code == REG && GET_CODE (from) == REG
3308 && REGNO (x) == REGNO (from))
38a448ca 3309 return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
230d793d
RS
3310
3311 /* If this is an object, we are done unless it is a MEM or LO_SUM, both
3312 of which may contain things that can be combined. */
3313 if (code != MEM && code != LO_SUM && GET_RTX_CLASS (code) == 'o')
3314 return x;
3315
3316 /* It is possible to have a subexpression appear twice in the insn.
3317 Suppose that FROM is a register that appears within TO.
3318 Then, after that subexpression has been scanned once by `subst',
3319 the second time it is scanned, TO may be found. If we were
3320 to scan TO here, we would find FROM within it and create a
3321 self-referent rtl structure which is completely wrong. */
3322 if (COMBINE_RTX_EQUAL_P (x, to))
3323 return to;
3324
4f4b3679
RH
3325 /* Parallel asm_operands need special attention because all of the
3326 inputs are shared across the arms. Furthermore, unsharing the
3327 rtl results in recognition failures. Failure to handle this case
3328 specially can result in circular rtl.
3329
3330 Solve this by doing a normal pass across the first entry of the
3331 parallel, and only processing the SET_DESTs of the subsequent
3332 entries. Ug. */
3333
3334 if (code == PARALLEL
3335 && GET_CODE (XVECEXP (x, 0, 0)) == SET
3336 && GET_CODE (SET_SRC (XVECEXP (x, 0, 0))) == ASM_OPERANDS)
230d793d 3337 {
4f4b3679
RH
3338 new = subst (XVECEXP (x, 0, 0), from, to, 0, unique_copy);
3339
3340 /* If this substitution failed, this whole thing fails. */
3341 if (GET_CODE (new) == CLOBBER
3342 && XEXP (new, 0) == const0_rtx)
3343 return new;
3344
3345 SUBST (XVECEXP (x, 0, 0), new);
3346
3347 for (i = XVECLEN (x, 0) - 1; i >= 1; i--)
230d793d 3348 {
4f4b3679 3349 rtx dest = SET_DEST (XVECEXP (x, 0, i));
663522cb 3350
4f4b3679
RH
3351 if (GET_CODE (dest) != REG
3352 && GET_CODE (dest) != CC0
3353 && GET_CODE (dest) != PC)
230d793d 3354 {
4f4b3679 3355 new = subst (dest, from, to, 0, unique_copy);
230d793d 3356
4f4b3679
RH
3357 /* If this substitution failed, this whole thing fails. */
3358 if (GET_CODE (new) == CLOBBER
3359 && XEXP (new, 0) == const0_rtx)
3360 return new;
230d793d 3361
4f4b3679 3362 SUBST (SET_DEST (XVECEXP (x, 0, i)), new);
230d793d
RS
3363 }
3364 }
4f4b3679
RH
3365 }
3366 else
3367 {
3368 len = GET_RTX_LENGTH (code);
3369 fmt = GET_RTX_FORMAT (code);
3370
3371 /* We don't need to process a SET_DEST that is a register, CC0,
3372 or PC, so set up to skip this common case. All other cases
3373 where we want to suppress replacing something inside a
3374 SET_SRC are handled via the IN_DEST operand. */
3375 if (code == SET
3376 && (GET_CODE (SET_DEST (x)) == REG
3377 || GET_CODE (SET_DEST (x)) == CC0
3378 || GET_CODE (SET_DEST (x)) == PC))
3379 fmt = "ie";
3380
3381 /* Get the mode of operand 0 in case X is now a SIGN_EXTEND of a
3382 constant. */
3383 if (fmt[0] == 'e')
3384 op0_mode = GET_MODE (XEXP (x, 0));
3385
3386 for (i = 0; i < len; i++)
230d793d 3387 {
4f4b3679 3388 if (fmt[i] == 'E')
230d793d 3389 {
4f4b3679
RH
3390 register int j;
3391 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3392 {
3393 if (COMBINE_RTX_EQUAL_P (XVECEXP (x, i, j), from))
3394 {
3395 new = (unique_copy && n_occurrences
3396 ? copy_rtx (to) : to);
3397 n_occurrences++;
3398 }
3399 else
3400 {
3401 new = subst (XVECEXP (x, i, j), from, to, 0,
3402 unique_copy);
3403
3404 /* If this substitution failed, this whole thing
3405 fails. */
3406 if (GET_CODE (new) == CLOBBER
3407 && XEXP (new, 0) == const0_rtx)
3408 return new;
3409 }
3410
3411 SUBST (XVECEXP (x, i, j), new);
3412 }
3413 }
3414 else if (fmt[i] == 'e')
3415 {
3416 if (COMBINE_RTX_EQUAL_P (XEXP (x, i), from))
3417 {
3418 /* In general, don't install a subreg involving two
3419 modes not tieable. It can worsen register
3420 allocation, and can even make invalid reload
3421 insns, since the reg inside may need to be copied
3422 from in the outside mode, and that may be invalid
3423 if it is an fp reg copied in integer mode.
3424
3425 We allow two exceptions to this: It is valid if
3426 it is inside another SUBREG and the mode of that
3427 SUBREG and the mode of the inside of TO is
3428 tieable and it is valid if X is a SET that copies
3429 FROM to CC0. */
3430
3431 if (GET_CODE (to) == SUBREG
3432 && ! MODES_TIEABLE_P (GET_MODE (to),
3433 GET_MODE (SUBREG_REG (to)))
3434 && ! (code == SUBREG
3435 && MODES_TIEABLE_P (GET_MODE (x),
3436 GET_MODE (SUBREG_REG (to))))
42301240 3437#ifdef HAVE_cc0
4f4b3679 3438 && ! (code == SET && i == 1 && XEXP (x, 0) == cc0_rtx)
42301240 3439#endif
4f4b3679
RH
3440 )
3441 return gen_rtx_CLOBBER (VOIDmode, const0_rtx);
42301240 3442
02188693 3443#ifdef CLASS_CANNOT_CHANGE_MODE
ed8afe3a
GK
3444 if (code == SUBREG
3445 && GET_CODE (to) == REG
3446 && REGNO (to) < FIRST_PSEUDO_REGISTER
3447 && (TEST_HARD_REG_BIT
02188693 3448 (reg_class_contents[(int) CLASS_CANNOT_CHANGE_MODE],
ed8afe3a 3449 REGNO (to)))
02188693
RH
3450 && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (to),
3451 GET_MODE (x)))
ed8afe3a
GK
3452 return gen_rtx_CLOBBER (VOIDmode, const0_rtx);
3453#endif
3454
4f4b3679
RH
3455 new = (unique_copy && n_occurrences ? copy_rtx (to) : to);
3456 n_occurrences++;
3457 }
3458 else
3459 /* If we are in a SET_DEST, suppress most cases unless we
3460 have gone inside a MEM, in which case we want to
3461 simplify the address. We assume here that things that
3462 are actually part of the destination have their inner
663522cb 3463 parts in the first expression. This is true for SUBREG,
4f4b3679
RH
3464 STRICT_LOW_PART, and ZERO_EXTRACT, which are the only
3465 things aside from REG and MEM that should appear in a
3466 SET_DEST. */
3467 new = subst (XEXP (x, i), from, to,
3468 (((in_dest
3469 && (code == SUBREG || code == STRICT_LOW_PART
3470 || code == ZERO_EXTRACT))
3471 || code == SET)
3472 && i == 0), unique_copy);
3473
3474 /* If we found that we will have to reject this combination,
3475 indicate that by returning the CLOBBER ourselves, rather than
3476 an expression containing it. This will speed things up as
3477 well as prevent accidents where two CLOBBERs are considered
3478 to be equal, thus producing an incorrect simplification. */
3479
3480 if (GET_CODE (new) == CLOBBER && XEXP (new, 0) == const0_rtx)
3481 return new;
3482
3483 SUBST (XEXP (x, i), new);
230d793d 3484 }
230d793d
RS
3485 }
3486 }
3487
8079805d
RK
3488 /* Try to simplify X. If the simplification changed the code, it is likely
3489 that further simplification will help, so loop, but limit the number
3490 of repetitions that will be performed. */
3491
3492 for (i = 0; i < 4; i++)
3493 {
3494 /* If X is sufficiently simple, don't bother trying to do anything
3495 with it. */
3496 if (code != CONST_INT && code != REG && code != CLOBBER)
31ec4e5e 3497 x = combine_simplify_rtx (x, op0_mode, i == 3, in_dest);
d0ab8cd3 3498
8079805d
RK
3499 if (GET_CODE (x) == code)
3500 break;
d0ab8cd3 3501
8079805d 3502 code = GET_CODE (x);
eeb43d32 3503
8079805d
RK
3504 /* We no longer know the original mode of operand 0 since we
3505 have changed the form of X) */
3506 op0_mode = VOIDmode;
3507 }
eeb43d32 3508
8079805d
RK
3509 return x;
3510}
3511\f
3512/* Simplify X, a piece of RTL. We just operate on the expression at the
3513 outer level; call `subst' to simplify recursively. Return the new
3514 expression.
3515
3516 OP0_MODE is the original mode of XEXP (x, 0); LAST is nonzero if this
3517 will be the iteration even if an expression with a code different from
3518 X is returned; IN_DEST is nonzero if we are inside a SET_DEST. */
eeb43d32 3519
8079805d 3520static rtx
31ec4e5e 3521combine_simplify_rtx (x, op0_mode, last, in_dest)
8079805d
RK
3522 rtx x;
3523 enum machine_mode op0_mode;
3524 int last;
3525 int in_dest;
3526{
3527 enum rtx_code code = GET_CODE (x);
3528 enum machine_mode mode = GET_MODE (x);
3529 rtx temp;
9a915772 3530 rtx reversed;
8079805d 3531 int i;
d0ab8cd3 3532
230d793d
RS
3533 /* If this is a commutative operation, put a constant last and a complex
3534 expression first. We don't need to do this for comparisons here. */
3535 if (GET_RTX_CLASS (code) == 'c'
3536 && ((CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
3537 || (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == 'o'
3538 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o')
3539 || (GET_CODE (XEXP (x, 0)) == SUBREG
3540 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0)))) == 'o'
3541 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o')))
3542 {
3543 temp = XEXP (x, 0);
3544 SUBST (XEXP (x, 0), XEXP (x, 1));
3545 SUBST (XEXP (x, 1), temp);
3546 }
3547
22609cbf
RK
3548 /* If this is a PLUS, MINUS, or MULT, and the first operand is the
3549 sign extension of a PLUS with a constant, reverse the order of the sign
3550 extension and the addition. Note that this not the same as the original
3551 code, but overflow is undefined for signed values. Also note that the
3552 PLUS will have been partially moved "inside" the sign-extension, so that
3553 the first operand of X will really look like:
3554 (ashiftrt (plus (ashift A C4) C5) C4).
3555 We convert this to
3556 (plus (ashiftrt (ashift A C4) C2) C4)
3557 and replace the first operand of X with that expression. Later parts
3558 of this function may simplify the expression further.
3559
3560 For example, if we start with (mult (sign_extend (plus A C1)) C2),
3561 we swap the SIGN_EXTEND and PLUS. Later code will apply the
3562 distributive law to produce (plus (mult (sign_extend X) C1) C3).
3563
3564 We do this to simplify address expressions. */
3565
3566 if ((code == PLUS || code == MINUS || code == MULT)
3567 && GET_CODE (XEXP (x, 0)) == ASHIFTRT
3568 && GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS
3569 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ASHIFT
3570 && GET_CODE (XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1)) == CONST_INT
3571 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3572 && XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1) == XEXP (XEXP (x, 0), 1)
3573 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
3574 && (temp = simplify_binary_operation (ASHIFTRT, mode,
3575 XEXP (XEXP (XEXP (x, 0), 0), 1),
3576 XEXP (XEXP (x, 0), 1))) != 0)
3577 {
3578 rtx new
3579 = simplify_shift_const (NULL_RTX, ASHIFT, mode,
3580 XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 0),
3581 INTVAL (XEXP (XEXP (x, 0), 1)));
3582
3583 new = simplify_shift_const (NULL_RTX, ASHIFTRT, mode, new,
3584 INTVAL (XEXP (XEXP (x, 0), 1)));
3585
3586 SUBST (XEXP (x, 0), gen_binary (PLUS, mode, new, temp));
3587 }
3588
663522cb 3589 /* If this is a simple operation applied to an IF_THEN_ELSE, try
d0ab8cd3 3590 applying it to the arms of the IF_THEN_ELSE. This often simplifies
abe6e52f
RK
3591 things. Check for cases where both arms are testing the same
3592 condition.
3593
3594 Don't do anything if all operands are very simple. */
3595
3596 if (((GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c'
3597 || GET_RTX_CLASS (code) == '<')
3598 && ((GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) != 'o'
3599 && ! (GET_CODE (XEXP (x, 0)) == SUBREG
3600 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0))))
3601 == 'o')))
3602 || (GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o'
3603 && ! (GET_CODE (XEXP (x, 1)) == SUBREG
3604 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 1))))
3605 == 'o')))))
3606 || (GET_RTX_CLASS (code) == '1'
3607 && ((GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) != 'o'
3608 && ! (GET_CODE (XEXP (x, 0)) == SUBREG
3609 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0))))
3610 == 'o'))))))
d0ab8cd3 3611 {
abe6e52f
RK
3612 rtx cond, true, false;
3613
3614 cond = if_then_else_cond (x, &true, &false);
0802d516
RK
3615 if (cond != 0
3616 /* If everything is a comparison, what we have is highly unlikely
3617 to be simpler, so don't use it. */
3618 && ! (GET_RTX_CLASS (code) == '<'
3619 && (GET_RTX_CLASS (GET_CODE (true)) == '<'
3620 || GET_RTX_CLASS (GET_CODE (false)) == '<')))
abe6e52f
RK
3621 {
3622 rtx cop1 = const0_rtx;
3623 enum rtx_code cond_code = simplify_comparison (NE, &cond, &cop1);
3624
15448afc
RK
3625 if (cond_code == NE && GET_RTX_CLASS (GET_CODE (cond)) == '<')
3626 return x;
3627
663522cb 3628 /* Simplify the alternative arms; this may collapse the true and
9210df58
RK
3629 false arms to store-flag values. */
3630 true = subst (true, pc_rtx, pc_rtx, 0, 0);
3631 false = subst (false, pc_rtx, pc_rtx, 0, 0);
3632
085f1714
RH
3633 /* If true and false are not general_operands, an if_then_else
3634 is unlikely to be simpler. */
3635 if (general_operand (true, VOIDmode)
3636 && general_operand (false, VOIDmode))
3637 {
3638 /* Restarting if we generate a store-flag expression will cause
3639 us to loop. Just drop through in this case. */
3640
3641 /* If the result values are STORE_FLAG_VALUE and zero, we can
3642 just make the comparison operation. */
3643 if (true == const_true_rtx && false == const0_rtx)
3644 x = gen_binary (cond_code, mode, cond, cop1);
3645 else if (true == const0_rtx && false == const_true_rtx)
3646 x = gen_binary (reverse_condition (cond_code),
3647 mode, cond, cop1);
3648
3649 /* Likewise, we can make the negate of a comparison operation
3650 if the result values are - STORE_FLAG_VALUE and zero. */
3651 else if (GET_CODE (true) == CONST_INT
3652 && INTVAL (true) == - STORE_FLAG_VALUE
3653 && false == const0_rtx)
3654 x = gen_unary (NEG, mode, mode,
3655 gen_binary (cond_code, mode, cond, cop1));
3656 else if (GET_CODE (false) == CONST_INT
3657 && INTVAL (false) == - STORE_FLAG_VALUE
3658 && true == const0_rtx)
3659 x = gen_unary (NEG, mode, mode,
663522cb 3660 gen_binary (reverse_condition (cond_code),
085f1714
RH
3661 mode, cond, cop1));
3662 else
3663 return gen_rtx_IF_THEN_ELSE (mode,
3664 gen_binary (cond_code, VOIDmode,
3665 cond, cop1),
3666 true, false);
5109d49f 3667
085f1714
RH
3668 code = GET_CODE (x);
3669 op0_mode = VOIDmode;
3670 }
abe6e52f 3671 }
d0ab8cd3
RK
3672 }
3673
230d793d
RS
3674 /* Try to fold this expression in case we have constants that weren't
3675 present before. */
3676 temp = 0;
3677 switch (GET_RTX_CLASS (code))
3678 {
3679 case '1':
3680 temp = simplify_unary_operation (code, mode, XEXP (x, 0), op0_mode);
3681 break;
3682 case '<':
47b1e19b
JH
3683 {
3684 enum machine_mode cmp_mode = GET_MODE (XEXP (x, 0));
3685 if (cmp_mode == VOIDmode)
1cac8785
DD
3686 {
3687 cmp_mode = GET_MODE (XEXP (x, 1));
3688 if (cmp_mode == VOIDmode)
3689 cmp_mode = op0_mode;
3690 }
47b1e19b
JH
3691 temp = simplify_relational_operation (code, cmp_mode,
3692 XEXP (x, 0), XEXP (x, 1));
3693 }
77fa0940 3694#ifdef FLOAT_STORE_FLAG_VALUE
12530dbe
RH
3695 if (temp != 0 && GET_MODE_CLASS (mode) == MODE_FLOAT)
3696 {
3697 if (temp == const0_rtx)
3698 temp = CONST0_RTX (mode);
3699 else
3700 temp = immed_real_const_1 (FLOAT_STORE_FLAG_VALUE (mode), mode);
3701 }
77fa0940 3702#endif
230d793d
RS
3703 break;
3704 case 'c':
3705 case '2':
3706 temp = simplify_binary_operation (code, mode, XEXP (x, 0), XEXP (x, 1));
3707 break;
3708 case 'b':
3709 case '3':
3710 temp = simplify_ternary_operation (code, mode, op0_mode, XEXP (x, 0),
3711 XEXP (x, 1), XEXP (x, 2));
3712 break;
3713 }
3714
3715 if (temp)
d0ab8cd3 3716 x = temp, code = GET_CODE (temp);
230d793d 3717
230d793d 3718 /* First see if we can apply the inverse distributive law. */
224eeff2
RK
3719 if (code == PLUS || code == MINUS
3720 || code == AND || code == IOR || code == XOR)
230d793d
RS
3721 {
3722 x = apply_distributive_law (x);
3723 code = GET_CODE (x);
3724 }
3725
3726 /* If CODE is an associative operation not otherwise handled, see if we
3727 can associate some operands. This can win if they are constants or
3728 if they are logically related (i.e. (a & b) & a. */
3729 if ((code == PLUS || code == MINUS
3730 || code == MULT || code == AND || code == IOR || code == XOR
3731 || code == DIV || code == UDIV
3732 || code == SMAX || code == SMIN || code == UMAX || code == UMIN)
3ad2180a 3733 && INTEGRAL_MODE_P (mode))
230d793d
RS
3734 {
3735 if (GET_CODE (XEXP (x, 0)) == code)
3736 {
3737 rtx other = XEXP (XEXP (x, 0), 0);
3738 rtx inner_op0 = XEXP (XEXP (x, 0), 1);
3739 rtx inner_op1 = XEXP (x, 1);
3740 rtx inner;
663522cb 3741
230d793d
RS
3742 /* Make sure we pass the constant operand if any as the second
3743 one if this is a commutative operation. */
3744 if (CONSTANT_P (inner_op0) && GET_RTX_CLASS (code) == 'c')
3745 {
3746 rtx tem = inner_op0;
3747 inner_op0 = inner_op1;
3748 inner_op1 = tem;
3749 }
3750 inner = simplify_binary_operation (code == MINUS ? PLUS
3751 : code == DIV ? MULT
3752 : code == UDIV ? MULT
3753 : code,
3754 mode, inner_op0, inner_op1);
3755
3756 /* For commutative operations, try the other pair if that one
3757 didn't simplify. */
3758 if (inner == 0 && GET_RTX_CLASS (code) == 'c')
3759 {
3760 other = XEXP (XEXP (x, 0), 1);
3761 inner = simplify_binary_operation (code, mode,
3762 XEXP (XEXP (x, 0), 0),
3763 XEXP (x, 1));
3764 }
3765
3766 if (inner)
8079805d 3767 return gen_binary (code, mode, other, inner);
230d793d
RS
3768 }
3769 }
3770
3771 /* A little bit of algebraic simplification here. */
3772 switch (code)
3773 {
3774 case MEM:
3775 /* Ensure that our address has any ASHIFTs converted to MULT in case
3776 address-recognizing predicates are called later. */
3777 temp = make_compound_operation (XEXP (x, 0), MEM);
3778 SUBST (XEXP (x, 0), temp);
3779 break;
3780
3781 case SUBREG:
3782 /* (subreg:A (mem:B X) N) becomes a modified MEM unless the SUBREG
3783 is paradoxical. If we can't do that safely, then it becomes
3784 something nonsensical so that this combination won't take place. */
3785
3786 if (GET_CODE (SUBREG_REG (x)) == MEM
3787 && (GET_MODE_SIZE (mode)
3788 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
3789 {
3790 rtx inner = SUBREG_REG (x);
3791 int endian_offset = 0;
3792 /* Don't change the mode of the MEM
3793 if that would change the meaning of the address. */
3794 if (MEM_VOLATILE_P (SUBREG_REG (x))
3795 || mode_dependent_address_p (XEXP (inner, 0)))
38a448ca 3796 return gen_rtx_CLOBBER (mode, const0_rtx);
230d793d 3797
f76b9db2
ILT
3798 if (BYTES_BIG_ENDIAN)
3799 {
3800 if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
3801 endian_offset += UNITS_PER_WORD - GET_MODE_SIZE (mode);
3802 if (GET_MODE_SIZE (GET_MODE (inner)) < UNITS_PER_WORD)
3803 endian_offset -= (UNITS_PER_WORD
3804 - GET_MODE_SIZE (GET_MODE (inner)));
3805 }
230d793d
RS
3806 /* Note if the plus_constant doesn't make a valid address
3807 then this combination won't be accepted. */
38a448ca
RH
3808 x = gen_rtx_MEM (mode,
3809 plus_constant (XEXP (inner, 0),
3810 (SUBREG_WORD (x) * UNITS_PER_WORD
3811 + endian_offset)));
c6df88cb 3812 MEM_COPY_ATTRIBUTES (x, inner);
230d793d
RS
3813 return x;
3814 }
3815
3816 /* If we are in a SET_DEST, these other cases can't apply. */
3817 if (in_dest)
3818 return x;
3819
3820 /* Changing mode twice with SUBREG => just change it once,
3821 or not at all if changing back to starting mode. */
3822 if (GET_CODE (SUBREG_REG (x)) == SUBREG)
3823 {
3824 if (mode == GET_MODE (SUBREG_REG (SUBREG_REG (x)))
3825 && SUBREG_WORD (x) == 0 && SUBREG_WORD (SUBREG_REG (x)) == 0)
3826 return SUBREG_REG (SUBREG_REG (x));
3827
3828 SUBST_INT (SUBREG_WORD (x),
3829 SUBREG_WORD (x) + SUBREG_WORD (SUBREG_REG (x)));
3830 SUBST (SUBREG_REG (x), SUBREG_REG (SUBREG_REG (x)));
3831 }
3832
3833 /* SUBREG of a hard register => just change the register number
3834 and/or mode. If the hard register is not valid in that mode,
26ecfc76
RK
3835 suppress this combination. If the hard register is the stack,
3836 frame, or argument pointer, leave this as a SUBREG. */
230d793d
RS
3837
3838 if (GET_CODE (SUBREG_REG (x)) == REG
26ecfc76
RK
3839 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
3840 && REGNO (SUBREG_REG (x)) != FRAME_POINTER_REGNUM
6d7096b0
DE
3841#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3842 && REGNO (SUBREG_REG (x)) != HARD_FRAME_POINTER_REGNUM
3843#endif
26ecfc76
RK
3844#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3845 && REGNO (SUBREG_REG (x)) != ARG_POINTER_REGNUM
3846#endif
3847 && REGNO (SUBREG_REG (x)) != STACK_POINTER_REGNUM)
230d793d
RS
3848 {
3849 if (HARD_REGNO_MODE_OK (REGNO (SUBREG_REG (x)) + SUBREG_WORD (x),
3850 mode))
38a448ca
RH
3851 return gen_rtx_REG (mode,
3852 REGNO (SUBREG_REG (x)) + SUBREG_WORD (x));
230d793d 3853 else
38a448ca 3854 return gen_rtx_CLOBBER (mode, const0_rtx);
230d793d
RS
3855 }
3856
3857 /* For a constant, try to pick up the part we want. Handle a full
a4bde0b1
RK
3858 word and low-order part. Only do this if we are narrowing
3859 the constant; if it is being widened, we have no idea what
3860 the extra bits will have been set to. */
230d793d
RS
3861
3862 if (CONSTANT_P (SUBREG_REG (x)) && op0_mode != VOIDmode
3863 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
3c99d5ff 3864 && GET_MODE_SIZE (op0_mode) > UNITS_PER_WORD
230d793d
RS
3865 && GET_MODE_CLASS (mode) == MODE_INT)
3866 {
3867 temp = operand_subword (SUBREG_REG (x), SUBREG_WORD (x),
5f4f0e22 3868 0, op0_mode);
230d793d
RS
3869 if (temp)
3870 return temp;
3871 }
663522cb 3872
19808e22
RS
3873 /* If we want a subreg of a constant, at offset 0,
3874 take the low bits. On a little-endian machine, that's
3875 always valid. On a big-endian machine, it's valid
3c99d5ff 3876 only if the constant's mode fits in one word. Note that we
61b1bece 3877 cannot use subreg_lowpart_p since SUBREG_REG may be VOIDmode. */
3c99d5ff
RK
3878 if (CONSTANT_P (SUBREG_REG (x))
3879 && ((GET_MODE_SIZE (op0_mode) <= UNITS_PER_WORD
3880 || ! WORDS_BIG_ENDIAN)
3881 ? SUBREG_WORD (x) == 0
3882 : (SUBREG_WORD (x)
3883 == ((GET_MODE_SIZE (op0_mode)
3884 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD))
3885 / UNITS_PER_WORD)))
f82da7d2 3886 && GET_MODE_SIZE (mode) <= GET_MODE_SIZE (op0_mode)
f76b9db2
ILT
3887 && (! WORDS_BIG_ENDIAN
3888 || GET_MODE_BITSIZE (op0_mode) <= BITS_PER_WORD))
230d793d
RS
3889 return gen_lowpart_for_combine (mode, SUBREG_REG (x));
3890
b65c1b5b
RK
3891 /* A paradoxical SUBREG of a VOIDmode constant is the same constant,
3892 since we are saying that the high bits don't matter. */
3893 if (CONSTANT_P (SUBREG_REG (x)) && GET_MODE (SUBREG_REG (x)) == VOIDmode
3894 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (op0_mode))
54f3b5c2
R
3895 {
3896 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) > UNITS_PER_WORD
3897 && (WORDS_BIG_ENDIAN || SUBREG_WORD (x) != 0))
3898 return operand_subword (SUBREG_REG (x), SUBREG_WORD (x), 0, mode);
3899 return SUBREG_REG (x);
3900 }
b65c1b5b 3901
87e3e0c1
RK
3902 /* Note that we cannot do any narrowing for non-constants since
3903 we might have been counting on using the fact that some bits were
3904 zero. We now do this in the SET. */
3905
230d793d
RS
3906 break;
3907
3908 case NOT:
3909 /* (not (plus X -1)) can become (neg X). */
3910 if (GET_CODE (XEXP (x, 0)) == PLUS
3911 && XEXP (XEXP (x, 0), 1) == constm1_rtx)
8079805d 3912 return gen_rtx_combine (NEG, mode, XEXP (XEXP (x, 0), 0));
230d793d
RS
3913
3914 /* Similarly, (not (neg X)) is (plus X -1). */
3915 if (GET_CODE (XEXP (x, 0)) == NEG)
8079805d
RK
3916 return gen_rtx_combine (PLUS, mode, XEXP (XEXP (x, 0), 0),
3917 constm1_rtx);
230d793d 3918
663522cb 3919 /* (not (xor X C)) for C constant is (xor X D) with D = ~C. */
d0ab8cd3
RK
3920 if (GET_CODE (XEXP (x, 0)) == XOR
3921 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3922 && (temp = simplify_unary_operation (NOT, mode,
3923 XEXP (XEXP (x, 0), 1),
3924 mode)) != 0)
787745f5 3925 return gen_binary (XOR, mode, XEXP (XEXP (x, 0), 0), temp);
663522cb 3926
230d793d
RS
3927 /* (not (ashift 1 X)) is (rotate ~1 X). We used to do this for operands
3928 other than 1, but that is not valid. We could do a similar
3929 simplification for (not (lshiftrt C X)) where C is just the sign bit,
3930 but this doesn't seem common enough to bother with. */
3931 if (GET_CODE (XEXP (x, 0)) == ASHIFT
3932 && XEXP (XEXP (x, 0), 0) == const1_rtx)
38a448ca
RH
3933 return gen_rtx_ROTATE (mode, gen_unary (NOT, mode, mode, const1_rtx),
3934 XEXP (XEXP (x, 0), 1));
663522cb 3935
230d793d
RS
3936 if (GET_CODE (XEXP (x, 0)) == SUBREG
3937 && subreg_lowpart_p (XEXP (x, 0))
3938 && (GET_MODE_SIZE (GET_MODE (XEXP (x, 0)))
3939 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (x, 0)))))
3940 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == ASHIFT
3941 && XEXP (SUBREG_REG (XEXP (x, 0)), 0) == const1_rtx)
3942 {
3943 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (XEXP (x, 0)));
3944
38a448ca
RH
3945 x = gen_rtx_ROTATE (inner_mode,
3946 gen_unary (NOT, inner_mode, inner_mode,
3947 const1_rtx),
3948 XEXP (SUBREG_REG (XEXP (x, 0)), 1));
8079805d 3949 return gen_lowpart_for_combine (mode, x);
230d793d 3950 }
663522cb 3951
0802d516
RK
3952 /* If STORE_FLAG_VALUE is -1, (not (comparison foo bar)) can be done by
3953 reversing the comparison code if valid. */
3954 if (STORE_FLAG_VALUE == -1
3955 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
9a915772
JH
3956 && (reversed = reversed_comparison (x, mode, XEXP (XEXP (x, 0), 0),
3957 XEXP (XEXP (x, 0), 1))))
3958 return reversed;
500c518b
RK
3959
3960 /* (ashiftrt foo C) where C is the number of bits in FOO minus 1
0802d516
RK
3961 is (lt foo (const_int 0)) if STORE_FLAG_VALUE is -1, so we can
3962 perform the above simplification. */
500c518b 3963
0802d516 3964 if (STORE_FLAG_VALUE == -1
500c518b 3965 && GET_CODE (XEXP (x, 0)) == ASHIFTRT
37ac53d9 3966 && XEXP (x, 1) == const1_rtx
500c518b
RK
3967 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3968 && INTVAL (XEXP (XEXP (x, 0), 1)) == GET_MODE_BITSIZE (mode) - 1)
3969 return gen_rtx_combine (GE, mode, XEXP (XEXP (x, 0), 0), const0_rtx);
230d793d
RS
3970
3971 /* Apply De Morgan's laws to reduce number of patterns for machines
3972 with negating logical insns (and-not, nand, etc.). If result has
3973 only one NOT, put it first, since that is how the patterns are
3974 coded. */
3975
3976 if (GET_CODE (XEXP (x, 0)) == IOR || GET_CODE (XEXP (x, 0)) == AND)
3977 {
663522cb 3978 rtx in1 = XEXP (XEXP (x, 0), 0), in2 = XEXP (XEXP (x, 0), 1);
5bd60ce6 3979 enum machine_mode op_mode;
230d793d 3980
5bd60ce6
RH
3981 op_mode = GET_MODE (in1);
3982 in1 = gen_unary (NOT, op_mode, op_mode, in1);
230d793d 3983
5bd60ce6
RH
3984 op_mode = GET_MODE (in2);
3985 if (op_mode == VOIDmode)
3986 op_mode = mode;
3987 in2 = gen_unary (NOT, op_mode, op_mode, in2);
663522cb 3988
5bd60ce6 3989 if (GET_CODE (in2) == NOT && GET_CODE (in1) != NOT)
663522cb
KH
3990 {
3991 rtx tem = in2;
3992 in2 = in1; in1 = tem;
3993 }
3994
3995 return gen_rtx_combine (GET_CODE (XEXP (x, 0)) == IOR ? AND : IOR,
3996 mode, in1, in2);
3997 }
230d793d
RS
3998 break;
3999
4000 case NEG:
4001 /* (neg (plus X 1)) can become (not X). */
4002 if (GET_CODE (XEXP (x, 0)) == PLUS
4003 && XEXP (XEXP (x, 0), 1) == const1_rtx)
8079805d 4004 return gen_rtx_combine (NOT, mode, XEXP (XEXP (x, 0), 0));
230d793d
RS
4005
4006 /* Similarly, (neg (not X)) is (plus X 1). */
4007 if (GET_CODE (XEXP (x, 0)) == NOT)
8079805d 4008 return plus_constant (XEXP (XEXP (x, 0), 0), 1);
230d793d 4009
230d793d
RS
4010 /* (neg (minus X Y)) can become (minus Y X). */
4011 if (GET_CODE (XEXP (x, 0)) == MINUS
3ad2180a 4012 && (! FLOAT_MODE_P (mode)
0f41302f 4013 /* x-y != -(y-x) with IEEE floating point. */
7e2a0d8e
RK
4014 || TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
4015 || flag_fast_math))
8079805d
RK
4016 return gen_binary (MINUS, mode, XEXP (XEXP (x, 0), 1),
4017 XEXP (XEXP (x, 0), 0));
230d793d 4018
0f41302f 4019 /* (neg (xor A 1)) is (plus A -1) if A is known to be either 0 or 1. */
d0ab8cd3 4020 if (GET_CODE (XEXP (x, 0)) == XOR && XEXP (XEXP (x, 0), 1) == const1_rtx
951553af 4021 && nonzero_bits (XEXP (XEXP (x, 0), 0), mode) == 1)
8079805d 4022 return gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0), constm1_rtx);
d0ab8cd3 4023
230d793d
RS
4024 /* NEG commutes with ASHIFT since it is multiplication. Only do this
4025 if we can then eliminate the NEG (e.g.,
4026 if the operand is a constant). */
4027
4028 if (GET_CODE (XEXP (x, 0)) == ASHIFT)
4029 {
4030 temp = simplify_unary_operation (NEG, mode,
4031 XEXP (XEXP (x, 0), 0), mode);
4032 if (temp)
4033 {
4034 SUBST (XEXP (XEXP (x, 0), 0), temp);
4035 return XEXP (x, 0);
4036 }
4037 }
4038
4039 temp = expand_compound_operation (XEXP (x, 0));
4040
4041 /* For C equal to the width of MODE minus 1, (neg (ashiftrt X C)) can be
4042 replaced by (lshiftrt X C). This will convert
4043 (neg (sign_extract X 1 Y)) to (zero_extract X 1 Y). */
4044
4045 if (GET_CODE (temp) == ASHIFTRT
4046 && GET_CODE (XEXP (temp, 1)) == CONST_INT
4047 && INTVAL (XEXP (temp, 1)) == GET_MODE_BITSIZE (mode) - 1)
8079805d
RK
4048 return simplify_shift_const (temp, LSHIFTRT, mode, XEXP (temp, 0),
4049 INTVAL (XEXP (temp, 1)));
230d793d 4050
951553af 4051 /* If X has only a single bit that might be nonzero, say, bit I, convert
230d793d
RS
4052 (neg X) to (ashiftrt (ashift X C-I) C-I) where C is the bitsize of
4053 MODE minus 1. This will convert (neg (zero_extract X 1 Y)) to
4054 (sign_extract X 1 Y). But only do this if TEMP isn't a register
4055 or a SUBREG of one since we'd be making the expression more
4056 complex if it was just a register. */
4057
4058 if (GET_CODE (temp) != REG
4059 && ! (GET_CODE (temp) == SUBREG
4060 && GET_CODE (SUBREG_REG (temp)) == REG)
951553af 4061 && (i = exact_log2 (nonzero_bits (temp, mode))) >= 0)
230d793d
RS
4062 {
4063 rtx temp1 = simplify_shift_const
5f4f0e22
CH
4064 (NULL_RTX, ASHIFTRT, mode,
4065 simplify_shift_const (NULL_RTX, ASHIFT, mode, temp,
230d793d
RS
4066 GET_MODE_BITSIZE (mode) - 1 - i),
4067 GET_MODE_BITSIZE (mode) - 1 - i);
4068
4069 /* If all we did was surround TEMP with the two shifts, we
4070 haven't improved anything, so don't use it. Otherwise,
4071 we are better off with TEMP1. */
4072 if (GET_CODE (temp1) != ASHIFTRT
4073 || GET_CODE (XEXP (temp1, 0)) != ASHIFT
4074 || XEXP (XEXP (temp1, 0), 0) != temp)
8079805d 4075 return temp1;
230d793d
RS
4076 }
4077 break;
4078
2ca9ae17 4079 case TRUNCATE:
e30fb98f
JL
4080 /* We can't handle truncation to a partial integer mode here
4081 because we don't know the real bitsize of the partial
4082 integer mode. */
4083 if (GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
4084 break;
4085
80608e27
JL
4086 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4087 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
4088 GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))))
2ca9ae17
JW
4089 SUBST (XEXP (x, 0),
4090 force_to_mode (XEXP (x, 0), GET_MODE (XEXP (x, 0)),
4091 GET_MODE_MASK (mode), NULL_RTX, 0));
0f13a422
ILT
4092
4093 /* (truncate:SI ({sign,zero}_extend:DI foo:SI)) == foo:SI. */
4094 if ((GET_CODE (XEXP (x, 0)) == SIGN_EXTEND
4095 || GET_CODE (XEXP (x, 0)) == ZERO_EXTEND)
4096 && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode)
4097 return XEXP (XEXP (x, 0), 0);
4098
4099 /* (truncate:SI (OP:DI ({sign,zero}_extend:DI foo:SI))) is
4100 (OP:SI foo:SI) if OP is NEG or ABS. */
4101 if ((GET_CODE (XEXP (x, 0)) == ABS
4102 || GET_CODE (XEXP (x, 0)) == NEG)
4103 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == SIGN_EXTEND
4104 || GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND)
4105 && GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == mode)
4106 return gen_unary (GET_CODE (XEXP (x, 0)), mode, mode,
4107 XEXP (XEXP (XEXP (x, 0), 0), 0));
4108
4109 /* (truncate:SI (subreg:DI (truncate:SI X) 0)) is
4110 (truncate:SI x). */
4111 if (GET_CODE (XEXP (x, 0)) == SUBREG
4112 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == TRUNCATE
4113 && subreg_lowpart_p (XEXP (x, 0)))
4114 return SUBREG_REG (XEXP (x, 0));
4115
4116 /* If we know that the value is already truncated, we can
14a774a9
RK
4117 replace the TRUNCATE with a SUBREG if TRULY_NOOP_TRUNCATION
4118 is nonzero for the corresponding modes. But don't do this
4119 for an (LSHIFTRT (MULT ...)) since this will cause problems
4120 with the umulXi3_highpart patterns. */
6a992214
JL
4121 if (TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
4122 GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
4123 && num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
14a774a9
RK
4124 >= GET_MODE_BITSIZE (mode) + 1
4125 && ! (GET_CODE (XEXP (x, 0)) == LSHIFTRT
4126 && GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT))
0f13a422
ILT
4127 return gen_lowpart_for_combine (mode, XEXP (x, 0));
4128
4129 /* A truncate of a comparison can be replaced with a subreg if
4130 STORE_FLAG_VALUE permits. This is like the previous test,
4131 but it works even if the comparison is done in a mode larger
4132 than HOST_BITS_PER_WIDE_INT. */
4133 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4134 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
663522cb 4135 && ((HOST_WIDE_INT) STORE_FLAG_VALUE & ~GET_MODE_MASK (mode)) == 0)
0f13a422
ILT
4136 return gen_lowpart_for_combine (mode, XEXP (x, 0));
4137
4138 /* Similarly, a truncate of a register whose value is a
4139 comparison can be replaced with a subreg if STORE_FLAG_VALUE
4140 permits. */
4141 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
663522cb 4142 && ((HOST_WIDE_INT) STORE_FLAG_VALUE & ~GET_MODE_MASK (mode)) == 0
0f13a422
ILT
4143 && (temp = get_last_value (XEXP (x, 0)))
4144 && GET_RTX_CLASS (GET_CODE (temp)) == '<')
4145 return gen_lowpart_for_combine (mode, XEXP (x, 0));
4146
2ca9ae17
JW
4147 break;
4148
230d793d
RS
4149 case FLOAT_TRUNCATE:
4150 /* (float_truncate:SF (float_extend:DF foo:SF)) = foo:SF. */
4151 if (GET_CODE (XEXP (x, 0)) == FLOAT_EXTEND
4152 && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode)
663522cb 4153 return XEXP (XEXP (x, 0), 0);
4635f748
RK
4154
4155 /* (float_truncate:SF (OP:DF (float_extend:DF foo:sf))) is
4156 (OP:SF foo:SF) if OP is NEG or ABS. */
4157 if ((GET_CODE (XEXP (x, 0)) == ABS
4158 || GET_CODE (XEXP (x, 0)) == NEG)
4159 && GET_CODE (XEXP (XEXP (x, 0), 0)) == FLOAT_EXTEND
4160 && GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == mode)
0c1c8ea6
RK
4161 return gen_unary (GET_CODE (XEXP (x, 0)), mode, mode,
4162 XEXP (XEXP (XEXP (x, 0), 0), 0));
1d12df72
RK
4163
4164 /* (float_truncate:SF (subreg:DF (float_truncate:SF X) 0))
4165 is (float_truncate:SF x). */
4166 if (GET_CODE (XEXP (x, 0)) == SUBREG
4167 && subreg_lowpart_p (XEXP (x, 0))
4168 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == FLOAT_TRUNCATE)
4169 return SUBREG_REG (XEXP (x, 0));
663522cb 4170 break;
230d793d
RS
4171
4172#ifdef HAVE_cc0
4173 case COMPARE:
4174 /* Convert (compare FOO (const_int 0)) to FOO unless we aren't
4175 using cc0, in which case we want to leave it as a COMPARE
4176 so we can distinguish it from a register-register-copy. */
4177 if (XEXP (x, 1) == const0_rtx)
4178 return XEXP (x, 0);
4179
4180 /* In IEEE floating point, x-0 is not the same as x. */
4181 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
7e2a0d8e
RK
4182 || ! FLOAT_MODE_P (GET_MODE (XEXP (x, 0)))
4183 || flag_fast_math)
230d793d
RS
4184 && XEXP (x, 1) == CONST0_RTX (GET_MODE (XEXP (x, 0))))
4185 return XEXP (x, 0);
4186 break;
4187#endif
4188
4189 case CONST:
4190 /* (const (const X)) can become (const X). Do it this way rather than
4191 returning the inner CONST since CONST can be shared with a
4192 REG_EQUAL note. */
4193 if (GET_CODE (XEXP (x, 0)) == CONST)
4194 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4195 break;
4196
4197#ifdef HAVE_lo_sum
4198 case LO_SUM:
4199 /* Convert (lo_sum (high FOO) FOO) to FOO. This is necessary so we
4200 can add in an offset. find_split_point will split this address up
4201 again if it doesn't match. */
4202 if (GET_CODE (XEXP (x, 0)) == HIGH
4203 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1)))
4204 return XEXP (x, 1);
4205 break;
4206#endif
4207
4208 case PLUS:
4209 /* If we have (plus (plus (A const) B)), associate it so that CONST is
4210 outermost. That's because that's the way indexed addresses are
4211 supposed to appear. This code used to check many more cases, but
4212 they are now checked elsewhere. */
4213 if (GET_CODE (XEXP (x, 0)) == PLUS
4214 && CONSTANT_ADDRESS_P (XEXP (XEXP (x, 0), 1)))
4215 return gen_binary (PLUS, mode,
4216 gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0),
4217 XEXP (x, 1)),
4218 XEXP (XEXP (x, 0), 1));
4219
4220 /* (plus (xor (and <foo> (const_int pow2 - 1)) <c>) <-c>)
4221 when c is (const_int (pow2 + 1) / 2) is a sign extension of a
4222 bit-field and can be replaced by either a sign_extend or a
e6380233
JL
4223 sign_extract. The `and' may be a zero_extend and the two
4224 <c>, -<c> constants may be reversed. */
230d793d
RS
4225 if (GET_CODE (XEXP (x, 0)) == XOR
4226 && GET_CODE (XEXP (x, 1)) == CONST_INT
4227 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
663522cb 4228 && INTVAL (XEXP (x, 1)) == -INTVAL (XEXP (XEXP (x, 0), 1))
e6380233
JL
4229 && ((i = exact_log2 (INTVAL (XEXP (XEXP (x, 0), 1)))) >= 0
4230 || (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
5f4f0e22 4231 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
230d793d
RS
4232 && ((GET_CODE (XEXP (XEXP (x, 0), 0)) == AND
4233 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
4234 && (INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))
5f4f0e22 4235 == ((HOST_WIDE_INT) 1 << (i + 1)) - 1))
230d793d
RS
4236 || (GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND
4237 && (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)))
770ae6cc 4238 == (unsigned int) i + 1))))
8079805d
RK
4239 return simplify_shift_const
4240 (NULL_RTX, ASHIFTRT, mode,
4241 simplify_shift_const (NULL_RTX, ASHIFT, mode,
4242 XEXP (XEXP (XEXP (x, 0), 0), 0),
4243 GET_MODE_BITSIZE (mode) - (i + 1)),
4244 GET_MODE_BITSIZE (mode) - (i + 1));
230d793d 4245
bc0776c6
RK
4246 /* (plus (comparison A B) C) can become (neg (rev-comp A B)) if
4247 C is 1 and STORE_FLAG_VALUE is -1 or if C is -1 and STORE_FLAG_VALUE
4248 is 1. This produces better code than the alternative immediately
4249 below. */
4250 if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
bc0776c6 4251 && ((STORE_FLAG_VALUE == -1 && XEXP (x, 1) == const1_rtx)
9a915772
JH
4252 || (STORE_FLAG_VALUE == 1 && XEXP (x, 1) == constm1_rtx))
4253 && (reversed = reversed_comparison (XEXP (x, 0), mode,
4254 XEXP (XEXP (x, 0), 0),
4255 XEXP (XEXP (x, 0), 1))))
8079805d 4256 return
9a915772 4257 gen_unary (NEG, mode, mode, reversed);
bc0776c6
RK
4258
4259 /* If only the low-order bit of X is possibly nonzero, (plus x -1)
230d793d
RS
4260 can become (ashiftrt (ashift (xor x 1) C) C) where C is
4261 the bitsize of the mode - 1. This allows simplification of
4262 "a = (b & 8) == 0;" */
4263 if (XEXP (x, 1) == constm1_rtx
4264 && GET_CODE (XEXP (x, 0)) != REG
4265 && ! (GET_CODE (XEXP (x,0)) == SUBREG
4266 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG)
951553af 4267 && nonzero_bits (XEXP (x, 0), mode) == 1)
8079805d
RK
4268 return simplify_shift_const (NULL_RTX, ASHIFTRT, mode,
4269 simplify_shift_const (NULL_RTX, ASHIFT, mode,
4270 gen_rtx_combine (XOR, mode,
4271 XEXP (x, 0), const1_rtx),
4272 GET_MODE_BITSIZE (mode) - 1),
4273 GET_MODE_BITSIZE (mode) - 1);
02f4ada4
RK
4274
4275 /* If we are adding two things that have no bits in common, convert
4276 the addition into an IOR. This will often be further simplified,
4277 for example in cases like ((a & 1) + (a & 2)), which can
4278 become a & 3. */
4279
ac49a949 4280 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
951553af
RK
4281 && (nonzero_bits (XEXP (x, 0), mode)
4282 & nonzero_bits (XEXP (x, 1), mode)) == 0)
085f1714
RH
4283 {
4284 /* Try to simplify the expression further. */
4285 rtx tor = gen_binary (IOR, mode, XEXP (x, 0), XEXP (x, 1));
4286 temp = combine_simplify_rtx (tor, mode, last, in_dest);
4287
4288 /* If we could, great. If not, do not go ahead with the IOR
4289 replacement, since PLUS appears in many special purpose
4290 address arithmetic instructions. */
4291 if (GET_CODE (temp) != CLOBBER && temp != tor)
4292 return temp;
4293 }
230d793d
RS
4294 break;
4295
4296 case MINUS:
0802d516
RK
4297 /* If STORE_FLAG_VALUE is 1, (minus 1 (comparison foo bar)) can be done
4298 by reversing the comparison code if valid. */
4299 if (STORE_FLAG_VALUE == 1
4300 && XEXP (x, 0) == const1_rtx
5109d49f 4301 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<'
9a915772
JH
4302 && (reversed = reversed_comparison (XEXP (x, 1), mode,
4303 XEXP (XEXP (x, 1), 0),
4304 XEXP (XEXP (x, 1), 1))))
4305 return reversed;
5109d49f 4306
230d793d
RS
4307 /* (minus <foo> (and <foo> (const_int -pow2))) becomes
4308 (and <foo> (const_int pow2-1)) */
4309 if (GET_CODE (XEXP (x, 1)) == AND
4310 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
663522cb 4311 && exact_log2 (-INTVAL (XEXP (XEXP (x, 1), 1))) >= 0
230d793d 4312 && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0)))
8079805d 4313 return simplify_and_const_int (NULL_RTX, mode, XEXP (x, 0),
663522cb 4314 -INTVAL (XEXP (XEXP (x, 1), 1)) - 1);
7bef8680
RK
4315
4316 /* Canonicalize (minus A (plus B C)) to (minus (minus A B) C) for
4317 integers. */
4318 if (GET_CODE (XEXP (x, 1)) == PLUS && INTEGRAL_MODE_P (mode))
8079805d
RK
4319 return gen_binary (MINUS, mode,
4320 gen_binary (MINUS, mode, XEXP (x, 0),
4321 XEXP (XEXP (x, 1), 0)),
4322 XEXP (XEXP (x, 1), 1));
230d793d
RS
4323 break;
4324
4325 case MULT:
4326 /* If we have (mult (plus A B) C), apply the distributive law and then
4327 the inverse distributive law to see if things simplify. This
4328 occurs mostly in addresses, often when unrolling loops. */
4329
4330 if (GET_CODE (XEXP (x, 0)) == PLUS)
4331 {
4332 x = apply_distributive_law
4333 (gen_binary (PLUS, mode,
4334 gen_binary (MULT, mode,
4335 XEXP (XEXP (x, 0), 0), XEXP (x, 1)),
4336 gen_binary (MULT, mode,
3749f4ca
BS
4337 XEXP (XEXP (x, 0), 1),
4338 copy_rtx (XEXP (x, 1)))));
230d793d
RS
4339
4340 if (GET_CODE (x) != MULT)
8079805d 4341 return x;
230d793d 4342 }
230d793d
RS
4343 break;
4344
4345 case UDIV:
4346 /* If this is a divide by a power of two, treat it as a shift if
4347 its first operand is a shift. */
4348 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4349 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0
4350 && (GET_CODE (XEXP (x, 0)) == ASHIFT
4351 || GET_CODE (XEXP (x, 0)) == LSHIFTRT
4352 || GET_CODE (XEXP (x, 0)) == ASHIFTRT
4353 || GET_CODE (XEXP (x, 0)) == ROTATE
4354 || GET_CODE (XEXP (x, 0)) == ROTATERT))
8079805d 4355 return simplify_shift_const (NULL_RTX, LSHIFTRT, mode, XEXP (x, 0), i);
230d793d
RS
4356 break;
4357
4358 case EQ: case NE:
4359 case GT: case GTU: case GE: case GEU:
4360 case LT: case LTU: case LE: case LEU:
69bc0a1f
JH
4361 case UNEQ: case LTGT:
4362 case UNGT: case UNGE:
4363 case UNLT: case UNLE:
4364 case UNORDERED: case ORDERED:
230d793d
RS
4365 /* If the first operand is a condition code, we can't do anything
4366 with it. */
4367 if (GET_CODE (XEXP (x, 0)) == COMPARE
4368 || (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) != MODE_CC
4369#ifdef HAVE_cc0
4370 && XEXP (x, 0) != cc0_rtx
4371#endif
663522cb 4372 ))
230d793d
RS
4373 {
4374 rtx op0 = XEXP (x, 0);
4375 rtx op1 = XEXP (x, 1);
4376 enum rtx_code new_code;
4377
4378 if (GET_CODE (op0) == COMPARE)
4379 op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
4380
4381 /* Simplify our comparison, if possible. */
4382 new_code = simplify_comparison (code, &op0, &op1);
4383
230d793d 4384 /* If STORE_FLAG_VALUE is 1, we can convert (ne x 0) to simply X
951553af 4385 if only the low-order bit is possibly nonzero in X (such as when
5109d49f
RK
4386 X is a ZERO_EXTRACT of one bit). Similarly, we can convert EQ to
4387 (xor X 1) or (minus 1 X); we use the former. Finally, if X is
4388 known to be either 0 or -1, NE becomes a NEG and EQ becomes
4389 (plus X 1).
4390
4391 Remove any ZERO_EXTRACT we made when thinking this was a
4392 comparison. It may now be simpler to use, e.g., an AND. If a
4393 ZERO_EXTRACT is indeed appropriate, it will be placed back by
4394 the call to make_compound_operation in the SET case. */
4395
0802d516
RK
4396 if (STORE_FLAG_VALUE == 1
4397 && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
a191f0ee
RH
4398 && op1 == const0_rtx
4399 && mode == GET_MODE (op0)
4400 && nonzero_bits (op0, mode) == 1)
818b11b9
RK
4401 return gen_lowpart_for_combine (mode,
4402 expand_compound_operation (op0));
5109d49f 4403
0802d516
RK
4404 else if (STORE_FLAG_VALUE == 1
4405 && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
5109d49f 4406 && op1 == const0_rtx
a191f0ee 4407 && mode == GET_MODE (op0)
5109d49f
RK
4408 && (num_sign_bit_copies (op0, mode)
4409 == GET_MODE_BITSIZE (mode)))
4410 {
4411 op0 = expand_compound_operation (op0);
0c1c8ea6 4412 return gen_unary (NEG, mode, mode,
8079805d 4413 gen_lowpart_for_combine (mode, op0));
5109d49f
RK
4414 }
4415
0802d516
RK
4416 else if (STORE_FLAG_VALUE == 1
4417 && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
230d793d 4418 && op1 == const0_rtx
a191f0ee 4419 && mode == GET_MODE (op0)
5109d49f 4420 && nonzero_bits (op0, mode) == 1)
818b11b9
RK
4421 {
4422 op0 = expand_compound_operation (op0);
8079805d
RK
4423 return gen_binary (XOR, mode,
4424 gen_lowpart_for_combine (mode, op0),
4425 const1_rtx);
5109d49f 4426 }
818b11b9 4427
0802d516
RK
4428 else if (STORE_FLAG_VALUE == 1
4429 && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
5109d49f 4430 && op1 == const0_rtx
a191f0ee 4431 && mode == GET_MODE (op0)
5109d49f
RK
4432 && (num_sign_bit_copies (op0, mode)
4433 == GET_MODE_BITSIZE (mode)))
4434 {
4435 op0 = expand_compound_operation (op0);
8079805d 4436 return plus_constant (gen_lowpart_for_combine (mode, op0), 1);
818b11b9 4437 }
230d793d 4438
5109d49f
RK
4439 /* If STORE_FLAG_VALUE is -1, we have cases similar to
4440 those above. */
0802d516
RK
4441 if (STORE_FLAG_VALUE == -1
4442 && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
230d793d 4443 && op1 == const0_rtx
5109d49f
RK
4444 && (num_sign_bit_copies (op0, mode)
4445 == GET_MODE_BITSIZE (mode)))
4446 return gen_lowpart_for_combine (mode,
4447 expand_compound_operation (op0));
4448
0802d516
RK
4449 else if (STORE_FLAG_VALUE == -1
4450 && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
5109d49f 4451 && op1 == const0_rtx
a191f0ee 4452 && mode == GET_MODE (op0)
5109d49f
RK
4453 && nonzero_bits (op0, mode) == 1)
4454 {
4455 op0 = expand_compound_operation (op0);
0c1c8ea6 4456 return gen_unary (NEG, mode, mode,
8079805d 4457 gen_lowpart_for_combine (mode, op0));
5109d49f
RK
4458 }
4459
0802d516
RK
4460 else if (STORE_FLAG_VALUE == -1
4461 && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
5109d49f 4462 && op1 == const0_rtx
a191f0ee 4463 && mode == GET_MODE (op0)
5109d49f
RK
4464 && (num_sign_bit_copies (op0, mode)
4465 == GET_MODE_BITSIZE (mode)))
230d793d 4466 {
818b11b9 4467 op0 = expand_compound_operation (op0);
0c1c8ea6 4468 return gen_unary (NOT, mode, mode,
8079805d 4469 gen_lowpart_for_combine (mode, op0));
5109d49f
RK
4470 }
4471
4472 /* If X is 0/1, (eq X 0) is X-1. */
0802d516
RK
4473 else if (STORE_FLAG_VALUE == -1
4474 && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
5109d49f 4475 && op1 == const0_rtx
a191f0ee 4476 && mode == GET_MODE (op0)
5109d49f
RK
4477 && nonzero_bits (op0, mode) == 1)
4478 {
4479 op0 = expand_compound_operation (op0);
8079805d 4480 return plus_constant (gen_lowpart_for_combine (mode, op0), -1);
230d793d 4481 }
230d793d
RS
4482
4483 /* If STORE_FLAG_VALUE says to just test the sign bit and X has just
951553af
RK
4484 one bit that might be nonzero, we can convert (ne x 0) to
4485 (ashift x c) where C puts the bit in the sign bit. Remove any
4486 AND with STORE_FLAG_VALUE when we are done, since we are only
4487 going to test the sign bit. */
3f508eca 4488 if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
5f4f0e22 4489 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
0802d516 4490 && ((STORE_FLAG_VALUE & GET_MODE_MASK (mode))
e51712db 4491 == (unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE(mode)-1))
230d793d
RS
4492 && op1 == const0_rtx
4493 && mode == GET_MODE (op0)
5109d49f 4494 && (i = exact_log2 (nonzero_bits (op0, mode))) >= 0)
230d793d 4495 {
818b11b9
RK
4496 x = simplify_shift_const (NULL_RTX, ASHIFT, mode,
4497 expand_compound_operation (op0),
230d793d
RS
4498 GET_MODE_BITSIZE (mode) - 1 - i);
4499 if (GET_CODE (x) == AND && XEXP (x, 1) == const_true_rtx)
4500 return XEXP (x, 0);
4501 else
4502 return x;
4503 }
4504
4505 /* If the code changed, return a whole new comparison. */
4506 if (new_code != code)
4507 return gen_rtx_combine (new_code, mode, op0, op1);
4508
663522cb 4509 /* Otherwise, keep this operation, but maybe change its operands.
230d793d
RS
4510 This also converts (ne (compare FOO BAR) 0) to (ne FOO BAR). */
4511 SUBST (XEXP (x, 0), op0);
4512 SUBST (XEXP (x, 1), op1);
4513 }
4514 break;
663522cb 4515
230d793d 4516 case IF_THEN_ELSE:
8079805d 4517 return simplify_if_then_else (x);
9210df58 4518
8079805d
RK
4519 case ZERO_EXTRACT:
4520 case SIGN_EXTRACT:
4521 case ZERO_EXTEND:
4522 case SIGN_EXTEND:
0f41302f 4523 /* If we are processing SET_DEST, we are done. */
8079805d
RK
4524 if (in_dest)
4525 return x;
d0ab8cd3 4526
8079805d 4527 return expand_compound_operation (x);
d0ab8cd3 4528
8079805d
RK
4529 case SET:
4530 return simplify_set (x);
1a26b032 4531
8079805d
RK
4532 case AND:
4533 case IOR:
4534 case XOR:
4535 return simplify_logical (x, last);
d0ab8cd3 4536
663522cb 4537 case ABS:
8079805d
RK
4538 /* (abs (neg <foo>)) -> (abs <foo>) */
4539 if (GET_CODE (XEXP (x, 0)) == NEG)
4540 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
1a26b032 4541
b472527b
JL
4542 /* If the mode of the operand is VOIDmode (i.e. if it is ASM_OPERANDS),
4543 do nothing. */
4544 if (GET_MODE (XEXP (x, 0)) == VOIDmode)
4545 break;
f40421ce 4546
8079805d
RK
4547 /* If operand is something known to be positive, ignore the ABS. */
4548 if (GET_CODE (XEXP (x, 0)) == FFS || GET_CODE (XEXP (x, 0)) == ABS
4549 || ((GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4550 <= HOST_BITS_PER_WIDE_INT)
4551 && ((nonzero_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
4552 & ((HOST_WIDE_INT) 1
4553 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1)))
4554 == 0)))
4555 return XEXP (x, 0);
1a26b032 4556
8079805d
RK
4557 /* If operand is known to be only -1 or 0, convert ABS to NEG. */
4558 if (num_sign_bit_copies (XEXP (x, 0), mode) == GET_MODE_BITSIZE (mode))
4559 return gen_rtx_combine (NEG, mode, XEXP (x, 0));
1a26b032 4560
8079805d 4561 break;
1a26b032 4562
8079805d
RK
4563 case FFS:
4564 /* (ffs (*_extend <X>)) = (ffs <X>) */
4565 if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND
4566 || GET_CODE (XEXP (x, 0)) == ZERO_EXTEND)
4567 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4568 break;
1a26b032 4569
8079805d
RK
4570 case FLOAT:
4571 /* (float (sign_extend <X>)) = (float <X>). */
4572 if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND)
4573 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4574 break;
1a26b032 4575
8079805d
RK
4576 case ASHIFT:
4577 case LSHIFTRT:
4578 case ASHIFTRT:
4579 case ROTATE:
4580 case ROTATERT:
4581 /* If this is a shift by a constant amount, simplify it. */
4582 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
663522cb 4583 return simplify_shift_const (x, code, mode, XEXP (x, 0),
8079805d
RK
4584 INTVAL (XEXP (x, 1)));
4585
4586#ifdef SHIFT_COUNT_TRUNCATED
4587 else if (SHIFT_COUNT_TRUNCATED && GET_CODE (XEXP (x, 1)) != REG)
4588 SUBST (XEXP (x, 1),
4589 force_to_mode (XEXP (x, 1), GET_MODE (x),
663522cb 4590 ((HOST_WIDE_INT) 1
8079805d
RK
4591 << exact_log2 (GET_MODE_BITSIZE (GET_MODE (x))))
4592 - 1,
4593 NULL_RTX, 0));
4594#endif
4595
4596 break;
e9a25f70 4597
82be40f7
BS
4598 case VEC_SELECT:
4599 {
4600 rtx op0 = XEXP (x, 0);
4601 rtx op1 = XEXP (x, 1);
4602 int len;
4603
4604 if (GET_CODE (op1) != PARALLEL)
4605 abort ();
4606 len = XVECLEN (op1, 0);
4607 if (len == 1
4608 && GET_CODE (XVECEXP (op1, 0, 0)) == CONST_INT
4609 && GET_CODE (op0) == VEC_CONCAT)
4610 {
4611 int offset = INTVAL (XVECEXP (op1, 0, 0)) * GET_MODE_SIZE (GET_MODE (x));
4612
4613 /* Try to find the element in the VEC_CONCAT. */
4614 for (;;)
4615 {
4616 if (GET_MODE (op0) == GET_MODE (x))
4617 return op0;
4618 if (GET_CODE (op0) == VEC_CONCAT)
4619 {
4620 HOST_WIDE_INT op0_size = GET_MODE_SIZE (GET_MODE (XEXP (op0, 0)));
4621 if (op0_size < offset)
4622 op0 = XEXP (op0, 0);
4623 else
4624 {
4625 offset -= op0_size;
4626 op0 = XEXP (op0, 1);
4627 }
4628 }
4629 else
4630 break;
4631 }
4632 }
4633 }
4634
4635 break;
4636
e9a25f70
JL
4637 default:
4638 break;
8079805d
RK
4639 }
4640
4641 return x;
4642}
4643\f
4644/* Simplify X, an IF_THEN_ELSE expression. Return the new expression. */
5109d49f 4645
8079805d
RK
4646static rtx
4647simplify_if_then_else (x)
4648 rtx x;
4649{
4650 enum machine_mode mode = GET_MODE (x);
4651 rtx cond = XEXP (x, 0);
4652 rtx true = XEXP (x, 1);
4653 rtx false = XEXP (x, 2);
4654 enum rtx_code true_code = GET_CODE (cond);
4655 int comparison_p = GET_RTX_CLASS (true_code) == '<';
4656 rtx temp;
4657 int i;
9a915772
JH
4658 enum rtx_code false_code;
4659 rtx reversed;
8079805d 4660
0f41302f 4661 /* Simplify storing of the truth value. */
8079805d
RK
4662 if (comparison_p && true == const_true_rtx && false == const0_rtx)
4663 return gen_binary (true_code, mode, XEXP (cond, 0), XEXP (cond, 1));
663522cb 4664
0f41302f 4665 /* Also when the truth value has to be reversed. */
9a915772
JH
4666 if (comparison_p
4667 && true == const0_rtx && false == const_true_rtx
4668 && (reversed = reversed_comparison (cond, mode, XEXP (cond, 0),
4669 XEXP (cond, 1))))
4670 return reversed;
8079805d
RK
4671
4672 /* Sometimes we can simplify the arm of an IF_THEN_ELSE if a register used
4673 in it is being compared against certain values. Get the true and false
4674 comparisons and see if that says anything about the value of each arm. */
4675
9a915772
JH
4676 if (comparison_p
4677 && ((false_code = combine_reversed_comparison_code (cond))
4678 != UNKNOWN)
8079805d
RK
4679 && GET_CODE (XEXP (cond, 0)) == REG)
4680 {
4681 HOST_WIDE_INT nzb;
4682 rtx from = XEXP (cond, 0);
8079805d
RK
4683 rtx true_val = XEXP (cond, 1);
4684 rtx false_val = true_val;
4685 int swapped = 0;
9210df58 4686
8079805d 4687 /* If FALSE_CODE is EQ, swap the codes and arms. */
5109d49f 4688
8079805d 4689 if (false_code == EQ)
1a26b032 4690 {
8079805d
RK
4691 swapped = 1, true_code = EQ, false_code = NE;
4692 temp = true, true = false, false = temp;
4693 }
5109d49f 4694
8079805d
RK
4695 /* If we are comparing against zero and the expression being tested has
4696 only a single bit that might be nonzero, that is its value when it is
4697 not equal to zero. Similarly if it is known to be -1 or 0. */
4698
4699 if (true_code == EQ && true_val == const0_rtx
4700 && exact_log2 (nzb = nonzero_bits (from, GET_MODE (from))) >= 0)
4701 false_code = EQ, false_val = GEN_INT (nzb);
4702 else if (true_code == EQ && true_val == const0_rtx
4703 && (num_sign_bit_copies (from, GET_MODE (from))
4704 == GET_MODE_BITSIZE (GET_MODE (from))))
4705 false_code = EQ, false_val = constm1_rtx;
4706
4707 /* Now simplify an arm if we know the value of the register in the
4708 branch and it is used in the arm. Be careful due to the potential
4709 of locally-shared RTL. */
4710
4711 if (reg_mentioned_p (from, true))
4712 true = subst (known_cond (copy_rtx (true), true_code, from, true_val),
4713 pc_rtx, pc_rtx, 0, 0);
4714 if (reg_mentioned_p (from, false))
4715 false = subst (known_cond (copy_rtx (false), false_code,
4716 from, false_val),
4717 pc_rtx, pc_rtx, 0, 0);
4718
4719 SUBST (XEXP (x, 1), swapped ? false : true);
4720 SUBST (XEXP (x, 2), swapped ? true : false);
4721
4722 true = XEXP (x, 1), false = XEXP (x, 2), true_code = GET_CODE (cond);
4723 }
5109d49f 4724
8079805d
RK
4725 /* If we have (if_then_else FOO (pc) (label_ref BAR)) and FOO can be
4726 reversed, do so to avoid needing two sets of patterns for
4727 subtract-and-branch insns. Similarly if we have a constant in the true
4728 arm, the false arm is the same as the first operand of the comparison, or
4729 the false arm is more complicated than the true arm. */
4730
9a915772
JH
4731 if (comparison_p
4732 && combine_reversed_comparison_code (cond) != UNKNOWN
663522cb 4733 && (true == pc_rtx
8079805d
RK
4734 || (CONSTANT_P (true)
4735 && GET_CODE (false) != CONST_INT && false != pc_rtx)
4736 || true == const0_rtx
4737 || (GET_RTX_CLASS (GET_CODE (true)) == 'o'
4738 && GET_RTX_CLASS (GET_CODE (false)) != 'o')
4739 || (GET_CODE (true) == SUBREG
4740 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (true))) == 'o'
4741 && GET_RTX_CLASS (GET_CODE (false)) != 'o')
4742 || reg_mentioned_p (true, false)
4743 || rtx_equal_p (false, XEXP (cond, 0))))
4744 {
9a915772 4745 true_code = reversed_comparison_code (cond, NULL);
8079805d 4746 SUBST (XEXP (x, 0),
9a915772
JH
4747 reversed_comparison (cond, GET_MODE (cond), XEXP (cond, 0),
4748 XEXP (cond, 1)));
5109d49f 4749
8079805d
RK
4750 SUBST (XEXP (x, 1), false);
4751 SUBST (XEXP (x, 2), true);
1a26b032 4752
8079805d 4753 temp = true, true = false, false = temp, cond = XEXP (x, 0);
bb821298 4754
0f41302f 4755 /* It is possible that the conditional has been simplified out. */
bb821298
RK
4756 true_code = GET_CODE (cond);
4757 comparison_p = GET_RTX_CLASS (true_code) == '<';
8079805d 4758 }
abe6e52f 4759
8079805d 4760 /* If the two arms are identical, we don't need the comparison. */
1a26b032 4761
8079805d
RK
4762 if (rtx_equal_p (true, false) && ! side_effects_p (cond))
4763 return true;
1a26b032 4764
5be669c7
RK
4765 /* Convert a == b ? b : a to "a". */
4766 if (true_code == EQ && ! side_effects_p (cond)
6ff85fd8 4767 && (! FLOAT_MODE_P (mode) || flag_fast_math)
5be669c7
RK
4768 && rtx_equal_p (XEXP (cond, 0), false)
4769 && rtx_equal_p (XEXP (cond, 1), true))
4770 return false;
4771 else if (true_code == NE && ! side_effects_p (cond)
6ff85fd8 4772 && (! FLOAT_MODE_P (mode) || flag_fast_math)
5be669c7
RK
4773 && rtx_equal_p (XEXP (cond, 0), true)
4774 && rtx_equal_p (XEXP (cond, 1), false))
4775 return true;
4776
8079805d
RK
4777 /* Look for cases where we have (abs x) or (neg (abs X)). */
4778
4779 if (GET_MODE_CLASS (mode) == MODE_INT
4780 && GET_CODE (false) == NEG
4781 && rtx_equal_p (true, XEXP (false, 0))
4782 && comparison_p
4783 && rtx_equal_p (true, XEXP (cond, 0))
4784 && ! side_effects_p (true))
4785 switch (true_code)
4786 {
4787 case GT:
4788 case GE:
0c1c8ea6 4789 return gen_unary (ABS, mode, mode, true);
8079805d
RK
4790 case LT:
4791 case LE:
0c1c8ea6 4792 return gen_unary (NEG, mode, mode, gen_unary (ABS, mode, mode, true));
e9a25f70
JL
4793 default:
4794 break;
8079805d
RK
4795 }
4796
4797 /* Look for MIN or MAX. */
4798
34c8be72 4799 if ((! FLOAT_MODE_P (mode) || flag_fast_math)
8079805d
RK
4800 && comparison_p
4801 && rtx_equal_p (XEXP (cond, 0), true)
4802 && rtx_equal_p (XEXP (cond, 1), false)
4803 && ! side_effects_p (cond))
4804 switch (true_code)
4805 {
4806 case GE:
4807 case GT:
4808 return gen_binary (SMAX, mode, true, false);
4809 case LE:
4810 case LT:
4811 return gen_binary (SMIN, mode, true, false);
4812 case GEU:
4813 case GTU:
4814 return gen_binary (UMAX, mode, true, false);
4815 case LEU:
4816 case LTU:
4817 return gen_binary (UMIN, mode, true, false);
e9a25f70
JL
4818 default:
4819 break;
8079805d 4820 }
663522cb 4821
8079805d
RK
4822 /* If we have (if_then_else COND (OP Z C1) Z) and OP is an identity when its
4823 second operand is zero, this can be done as (OP Z (mult COND C2)) where
4824 C2 = C1 * STORE_FLAG_VALUE. Similarly if OP has an outer ZERO_EXTEND or
4825 SIGN_EXTEND as long as Z is already extended (so we don't destroy it).
4826 We can do this kind of thing in some cases when STORE_FLAG_VALUE is
0802d516 4827 neither 1 or -1, but it isn't worth checking for. */
8079805d 4828
0802d516
RK
4829 if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
4830 && comparison_p && mode != VOIDmode && ! side_effects_p (x))
8079805d
RK
4831 {
4832 rtx t = make_compound_operation (true, SET);
4833 rtx f = make_compound_operation (false, SET);
4834 rtx cond_op0 = XEXP (cond, 0);
4835 rtx cond_op1 = XEXP (cond, 1);
6a651371 4836 enum rtx_code op = NIL, extend_op = NIL;
8079805d 4837 enum machine_mode m = mode;
6a651371 4838 rtx z = 0, c1 = NULL_RTX;
8079805d 4839
8079805d
RK
4840 if ((GET_CODE (t) == PLUS || GET_CODE (t) == MINUS
4841 || GET_CODE (t) == IOR || GET_CODE (t) == XOR
4842 || GET_CODE (t) == ASHIFT
4843 || GET_CODE (t) == LSHIFTRT || GET_CODE (t) == ASHIFTRT)
4844 && rtx_equal_p (XEXP (t, 0), f))
4845 c1 = XEXP (t, 1), op = GET_CODE (t), z = f;
4846
4847 /* If an identity-zero op is commutative, check whether there
0f41302f 4848 would be a match if we swapped the operands. */
8079805d
RK
4849 else if ((GET_CODE (t) == PLUS || GET_CODE (t) == IOR
4850 || GET_CODE (t) == XOR)
4851 && rtx_equal_p (XEXP (t, 1), f))
4852 c1 = XEXP (t, 0), op = GET_CODE (t), z = f;
4853 else if (GET_CODE (t) == SIGN_EXTEND
4854 && (GET_CODE (XEXP (t, 0)) == PLUS
4855 || GET_CODE (XEXP (t, 0)) == MINUS
4856 || GET_CODE (XEXP (t, 0)) == IOR
4857 || GET_CODE (XEXP (t, 0)) == XOR
4858 || GET_CODE (XEXP (t, 0)) == ASHIFT
4859 || GET_CODE (XEXP (t, 0)) == LSHIFTRT
4860 || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
4861 && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
4862 && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
4863 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
4864 && (num_sign_bit_copies (f, GET_MODE (f))
4865 > (GET_MODE_BITSIZE (mode)
4866 - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 0))))))
4867 {
4868 c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
4869 extend_op = SIGN_EXTEND;
4870 m = GET_MODE (XEXP (t, 0));
1a26b032 4871 }
8079805d
RK
4872 else if (GET_CODE (t) == SIGN_EXTEND
4873 && (GET_CODE (XEXP (t, 0)) == PLUS
4874 || GET_CODE (XEXP (t, 0)) == IOR
4875 || GET_CODE (XEXP (t, 0)) == XOR)
4876 && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
4877 && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
4878 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
4879 && (num_sign_bit_copies (f, GET_MODE (f))
4880 > (GET_MODE_BITSIZE (mode)
4881 - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 1))))))
4882 {
4883 c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
4884 extend_op = SIGN_EXTEND;
4885 m = GET_MODE (XEXP (t, 0));
4886 }
4887 else if (GET_CODE (t) == ZERO_EXTEND
4888 && (GET_CODE (XEXP (t, 0)) == PLUS
4889 || GET_CODE (XEXP (t, 0)) == MINUS
4890 || GET_CODE (XEXP (t, 0)) == IOR
4891 || GET_CODE (XEXP (t, 0)) == XOR
4892 || GET_CODE (XEXP (t, 0)) == ASHIFT
4893 || GET_CODE (XEXP (t, 0)) == LSHIFTRT
4894 || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
4895 && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
4896 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4897 && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
4898 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
4899 && ((nonzero_bits (f, GET_MODE (f))
663522cb 4900 & ~GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 0))))
8079805d
RK
4901 == 0))
4902 {
4903 c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
4904 extend_op = ZERO_EXTEND;
4905 m = GET_MODE (XEXP (t, 0));
4906 }
4907 else if (GET_CODE (t) == ZERO_EXTEND
4908 && (GET_CODE (XEXP (t, 0)) == PLUS
4909 || GET_CODE (XEXP (t, 0)) == IOR
4910 || GET_CODE (XEXP (t, 0)) == XOR)
4911 && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
4912 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4913 && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
4914 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
4915 && ((nonzero_bits (f, GET_MODE (f))
663522cb 4916 & ~GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 1))))
8079805d
RK
4917 == 0))
4918 {
4919 c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
4920 extend_op = ZERO_EXTEND;
4921 m = GET_MODE (XEXP (t, 0));
4922 }
663522cb 4923
8079805d
RK
4924 if (z)
4925 {
4926 temp = subst (gen_binary (true_code, m, cond_op0, cond_op1),
4927 pc_rtx, pc_rtx, 0, 0);
4928 temp = gen_binary (MULT, m, temp,
4929 gen_binary (MULT, m, c1, const_true_rtx));
4930 temp = subst (temp, pc_rtx, pc_rtx, 0, 0);
4931 temp = gen_binary (op, m, gen_lowpart_for_combine (m, z), temp);
4932
4933 if (extend_op != NIL)
0c1c8ea6 4934 temp = gen_unary (extend_op, mode, m, temp);
8079805d
RK
4935
4936 return temp;
4937 }
4938 }
224eeff2 4939
8079805d
RK
4940 /* If we have (if_then_else (ne A 0) C1 0) and either A is known to be 0 or
4941 1 and C1 is a single bit or A is known to be 0 or -1 and C1 is the
4942 negation of a single bit, we can convert this operation to a shift. We
4943 can actually do this more generally, but it doesn't seem worth it. */
4944
4945 if (true_code == NE && XEXP (cond, 1) == const0_rtx
4946 && false == const0_rtx && GET_CODE (true) == CONST_INT
4947 && ((1 == nonzero_bits (XEXP (cond, 0), mode)
4948 && (i = exact_log2 (INTVAL (true))) >= 0)
4949 || ((num_sign_bit_copies (XEXP (cond, 0), mode)
4950 == GET_MODE_BITSIZE (mode))
663522cb 4951 && (i = exact_log2 (-INTVAL (true))) >= 0)))
8079805d
RK
4952 return
4953 simplify_shift_const (NULL_RTX, ASHIFT, mode,
4954 gen_lowpart_for_combine (mode, XEXP (cond, 0)), i);
230d793d 4955
8079805d
RK
4956 return x;
4957}
4958\f
4959/* Simplify X, a SET expression. Return the new expression. */
230d793d 4960
8079805d
RK
4961static rtx
4962simplify_set (x)
4963 rtx x;
4964{
4965 rtx src = SET_SRC (x);
4966 rtx dest = SET_DEST (x);
4967 enum machine_mode mode
4968 = GET_MODE (src) != VOIDmode ? GET_MODE (src) : GET_MODE (dest);
4969 rtx other_insn;
4970 rtx *cc_use;
4971
4972 /* (set (pc) (return)) gets written as (return). */
4973 if (GET_CODE (dest) == PC && GET_CODE (src) == RETURN)
4974 return src;
230d793d 4975
87e3e0c1
RK
4976 /* Now that we know for sure which bits of SRC we are using, see if we can
4977 simplify the expression for the object knowing that we only need the
4978 low-order bits. */
4979
4980 if (GET_MODE_CLASS (mode) == MODE_INT)
c5c76735 4981 {
e8dc6d50 4982 src = force_to_mode (src, mode, ~(HOST_WIDE_INT) 0, NULL_RTX, 0);
c5c76735
JL
4983 SUBST (SET_SRC (x), src);
4984 }
87e3e0c1 4985
8079805d
RK
4986 /* If we are setting CC0 or if the source is a COMPARE, look for the use of
4987 the comparison result and try to simplify it unless we already have used
4988 undobuf.other_insn. */
4989 if ((GET_CODE (src) == COMPARE
230d793d 4990#ifdef HAVE_cc0
8079805d 4991 || dest == cc0_rtx
230d793d 4992#endif
8079805d
RK
4993 )
4994 && (cc_use = find_single_use (dest, subst_insn, &other_insn)) != 0
4995 && (undobuf.other_insn == 0 || other_insn == undobuf.other_insn)
4996 && GET_RTX_CLASS (GET_CODE (*cc_use)) == '<'
c0d3ac4d 4997 && rtx_equal_p (XEXP (*cc_use, 0), dest))
8079805d
RK
4998 {
4999 enum rtx_code old_code = GET_CODE (*cc_use);
5000 enum rtx_code new_code;
5001 rtx op0, op1;
5002 int other_changed = 0;
5003 enum machine_mode compare_mode = GET_MODE (dest);
5004
5005 if (GET_CODE (src) == COMPARE)
5006 op0 = XEXP (src, 0), op1 = XEXP (src, 1);
5007 else
5008 op0 = src, op1 = const0_rtx;
230d793d 5009
8079805d
RK
5010 /* Simplify our comparison, if possible. */
5011 new_code = simplify_comparison (old_code, &op0, &op1);
230d793d 5012
c141a106 5013#ifdef EXTRA_CC_MODES
8079805d
RK
5014 /* If this machine has CC modes other than CCmode, check to see if we
5015 need to use a different CC mode here. */
5016 compare_mode = SELECT_CC_MODE (new_code, op0, op1);
c141a106 5017#endif /* EXTRA_CC_MODES */
230d793d 5018
c141a106 5019#if !defined (HAVE_cc0) && defined (EXTRA_CC_MODES)
8079805d
RK
5020 /* If the mode changed, we have to change SET_DEST, the mode in the
5021 compare, and the mode in the place SET_DEST is used. If SET_DEST is
5022 a hard register, just build new versions with the proper mode. If it
5023 is a pseudo, we lose unless it is only time we set the pseudo, in
5024 which case we can safely change its mode. */
5025 if (compare_mode != GET_MODE (dest))
5026 {
770ae6cc 5027 unsigned int regno = REGNO (dest);
38a448ca 5028 rtx new_dest = gen_rtx_REG (compare_mode, regno);
8079805d
RK
5029
5030 if (regno < FIRST_PSEUDO_REGISTER
b1f21e0a 5031 || (REG_N_SETS (regno) == 1 && ! REG_USERVAR_P (dest)))
230d793d 5032 {
8079805d
RK
5033 if (regno >= FIRST_PSEUDO_REGISTER)
5034 SUBST (regno_reg_rtx[regno], new_dest);
230d793d 5035
8079805d
RK
5036 SUBST (SET_DEST (x), new_dest);
5037 SUBST (XEXP (*cc_use, 0), new_dest);
5038 other_changed = 1;
230d793d 5039
8079805d 5040 dest = new_dest;
230d793d 5041 }
8079805d 5042 }
230d793d
RS
5043#endif
5044
8079805d
RK
5045 /* If the code changed, we have to build a new comparison in
5046 undobuf.other_insn. */
5047 if (new_code != old_code)
5048 {
5049 unsigned HOST_WIDE_INT mask;
5050
5051 SUBST (*cc_use, gen_rtx_combine (new_code, GET_MODE (*cc_use),
5052 dest, const0_rtx));
5053
5054 /* If the only change we made was to change an EQ into an NE or
5055 vice versa, OP0 has only one bit that might be nonzero, and OP1
5056 is zero, check if changing the user of the condition code will
5057 produce a valid insn. If it won't, we can keep the original code
5058 in that insn by surrounding our operation with an XOR. */
5059
5060 if (((old_code == NE && new_code == EQ)
5061 || (old_code == EQ && new_code == NE))
5062 && ! other_changed && op1 == const0_rtx
5063 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
5064 && exact_log2 (mask = nonzero_bits (op0, GET_MODE (op0))) >= 0)
230d793d 5065 {
8079805d 5066 rtx pat = PATTERN (other_insn), note = 0;
230d793d 5067
8e2f6e35 5068 if ((recog_for_combine (&pat, other_insn, &note) < 0
8079805d
RK
5069 && ! check_asm_operands (pat)))
5070 {
5071 PUT_CODE (*cc_use, old_code);
5072 other_insn = 0;
230d793d 5073
8079805d 5074 op0 = gen_binary (XOR, GET_MODE (op0), op0, GEN_INT (mask));
230d793d 5075 }
230d793d
RS
5076 }
5077
8079805d
RK
5078 other_changed = 1;
5079 }
5080
5081 if (other_changed)
5082 undobuf.other_insn = other_insn;
230d793d
RS
5083
5084#ifdef HAVE_cc0
8079805d
RK
5085 /* If we are now comparing against zero, change our source if
5086 needed. If we do not use cc0, we always have a COMPARE. */
5087 if (op1 == const0_rtx && dest == cc0_rtx)
5088 {
5089 SUBST (SET_SRC (x), op0);
5090 src = op0;
5091 }
5092 else
230d793d
RS
5093#endif
5094
8079805d
RK
5095 /* Otherwise, if we didn't previously have a COMPARE in the
5096 correct mode, we need one. */
5097 if (GET_CODE (src) != COMPARE || GET_MODE (src) != compare_mode)
5098 {
5099 SUBST (SET_SRC (x),
5100 gen_rtx_combine (COMPARE, compare_mode, op0, op1));
5101 src = SET_SRC (x);
230d793d
RS
5102 }
5103 else
5104 {
8079805d
RK
5105 /* Otherwise, update the COMPARE if needed. */
5106 SUBST (XEXP (src, 0), op0);
5107 SUBST (XEXP (src, 1), op1);
230d793d 5108 }
8079805d
RK
5109 }
5110 else
5111 {
5112 /* Get SET_SRC in a form where we have placed back any
5113 compound expressions. Then do the checks below. */
5114 src = make_compound_operation (src, SET);
5115 SUBST (SET_SRC (x), src);
5116 }
230d793d 5117
8079805d
RK
5118 /* If we have (set x (subreg:m1 (op:m2 ...) 0)) with OP being some operation,
5119 and X being a REG or (subreg (reg)), we may be able to convert this to
663522cb 5120 (set (subreg:m2 x) (op)).
df62f951 5121
8079805d
RK
5122 We can always do this if M1 is narrower than M2 because that means that
5123 we only care about the low bits of the result.
df62f951 5124
8079805d 5125 However, on machines without WORD_REGISTER_OPERATIONS defined, we cannot
9ec36da5 5126 perform a narrower operation than requested since the high-order bits will
8079805d
RK
5127 be undefined. On machine where it is defined, this transformation is safe
5128 as long as M1 and M2 have the same number of words. */
663522cb 5129
8079805d
RK
5130 if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
5131 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (src))) != 'o'
5132 && (((GET_MODE_SIZE (GET_MODE (src)) + (UNITS_PER_WORD - 1))
5133 / UNITS_PER_WORD)
5134 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5135 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
8baf60bb 5136#ifndef WORD_REGISTER_OPERATIONS
8079805d
RK
5137 && (GET_MODE_SIZE (GET_MODE (src))
5138 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
df62f951 5139#endif
02188693 5140#ifdef CLASS_CANNOT_CHANGE_MODE
f507a070
RK
5141 && ! (GET_CODE (dest) == REG && REGNO (dest) < FIRST_PSEUDO_REGISTER
5142 && (TEST_HARD_REG_BIT
02188693 5143 (reg_class_contents[(int) CLASS_CANNOT_CHANGE_MODE],
f507a070 5144 REGNO (dest)))
02188693
RH
5145 && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (src),
5146 GET_MODE (SUBREG_REG (src))))
663522cb 5147#endif
8079805d
RK
5148 && (GET_CODE (dest) == REG
5149 || (GET_CODE (dest) == SUBREG
5150 && GET_CODE (SUBREG_REG (dest)) == REG)))
5151 {
5152 SUBST (SET_DEST (x),
5153 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (src)),
5154 dest));
5155 SUBST (SET_SRC (x), SUBREG_REG (src));
5156
5157 src = SET_SRC (x), dest = SET_DEST (x);
5158 }
df62f951 5159
8baf60bb 5160#ifdef LOAD_EXTEND_OP
8079805d
RK
5161 /* If we have (set FOO (subreg:M (mem:N BAR) 0)) with M wider than N, this
5162 would require a paradoxical subreg. Replace the subreg with a
0f41302f 5163 zero_extend to avoid the reload that would otherwise be required. */
8079805d
RK
5164
5165 if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
5166 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))) != NIL
5167 && SUBREG_WORD (src) == 0
5168 && (GET_MODE_SIZE (GET_MODE (src))
5169 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
5170 && GET_CODE (SUBREG_REG (src)) == MEM)
5171 {
5172 SUBST (SET_SRC (x),
5173 gen_rtx_combine (LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))),
5174 GET_MODE (src), XEXP (src, 0)));
5175
5176 src = SET_SRC (x);
5177 }
230d793d
RS
5178#endif
5179
8079805d
RK
5180 /* If we don't have a conditional move, SET_SRC is an IF_THEN_ELSE, and we
5181 are comparing an item known to be 0 or -1 against 0, use a logical
5182 operation instead. Check for one of the arms being an IOR of the other
5183 arm with some value. We compute three terms to be IOR'ed together. In
5184 practice, at most two will be nonzero. Then we do the IOR's. */
5185
5186 if (GET_CODE (dest) != PC
5187 && GET_CODE (src) == IF_THEN_ELSE
36b8d792 5188 && GET_MODE_CLASS (GET_MODE (src)) == MODE_INT
8079805d
RK
5189 && (GET_CODE (XEXP (src, 0)) == EQ || GET_CODE (XEXP (src, 0)) == NE)
5190 && XEXP (XEXP (src, 0), 1) == const0_rtx
6dd49058 5191 && GET_MODE (src) == GET_MODE (XEXP (XEXP (src, 0), 0))
ea414472
DE
5192#ifdef HAVE_conditional_move
5193 && ! can_conditionally_move_p (GET_MODE (src))
5194#endif
8079805d
RK
5195 && (num_sign_bit_copies (XEXP (XEXP (src, 0), 0),
5196 GET_MODE (XEXP (XEXP (src, 0), 0)))
5197 == GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (src, 0), 0))))
5198 && ! side_effects_p (src))
5199 {
5200 rtx true = (GET_CODE (XEXP (src, 0)) == NE
5201 ? XEXP (src, 1) : XEXP (src, 2));
5202 rtx false = (GET_CODE (XEXP (src, 0)) == NE
5203 ? XEXP (src, 2) : XEXP (src, 1));
5204 rtx term1 = const0_rtx, term2, term3;
5205
5206 if (GET_CODE (true) == IOR && rtx_equal_p (XEXP (true, 0), false))
5207 term1 = false, true = XEXP (true, 1), false = const0_rtx;
5208 else if (GET_CODE (true) == IOR
5209 && rtx_equal_p (XEXP (true, 1), false))
5210 term1 = false, true = XEXP (true, 0), false = const0_rtx;
5211 else if (GET_CODE (false) == IOR
5212 && rtx_equal_p (XEXP (false, 0), true))
5213 term1 = true, false = XEXP (false, 1), true = const0_rtx;
5214 else if (GET_CODE (false) == IOR
5215 && rtx_equal_p (XEXP (false, 1), true))
5216 term1 = true, false = XEXP (false, 0), true = const0_rtx;
5217
5218 term2 = gen_binary (AND, GET_MODE (src), XEXP (XEXP (src, 0), 0), true);
5219 term3 = gen_binary (AND, GET_MODE (src),
0c1c8ea6 5220 gen_unary (NOT, GET_MODE (src), GET_MODE (src),
8079805d
RK
5221 XEXP (XEXP (src, 0), 0)),
5222 false);
5223
5224 SUBST (SET_SRC (x),
5225 gen_binary (IOR, GET_MODE (src),
5226 gen_binary (IOR, GET_MODE (src), term1, term2),
5227 term3));
5228
5229 src = SET_SRC (x);
5230 }
230d793d 5231
246e00f2
RK
5232 /* If either SRC or DEST is a CLOBBER of (const_int 0), make this
5233 whole thing fail. */
5234 if (GET_CODE (src) == CLOBBER && XEXP (src, 0) == const0_rtx)
5235 return src;
5236 else if (GET_CODE (dest) == CLOBBER && XEXP (dest, 0) == const0_rtx)
5237 return dest;
5238 else
5239 /* Convert this into a field assignment operation, if possible. */
5240 return make_field_assignment (x);
8079805d
RK
5241}
5242\f
5243/* Simplify, X, and AND, IOR, or XOR operation, and return the simplified
5244 result. LAST is nonzero if this is the last retry. */
5245
5246static rtx
5247simplify_logical (x, last)
5248 rtx x;
5249 int last;
5250{
5251 enum machine_mode mode = GET_MODE (x);
5252 rtx op0 = XEXP (x, 0);
5253 rtx op1 = XEXP (x, 1);
9a915772 5254 rtx reversed;
8079805d
RK
5255
5256 switch (GET_CODE (x))
5257 {
230d793d 5258 case AND:
663522cb 5259 /* Convert (A ^ B) & A to A & (~B) since the latter is often a single
8079805d
RK
5260 insn (and may simplify more). */
5261 if (GET_CODE (op0) == XOR
5262 && rtx_equal_p (XEXP (op0, 0), op1)
5263 && ! side_effects_p (op1))
0c1c8ea6
RK
5264 x = gen_binary (AND, mode,
5265 gen_unary (NOT, mode, mode, XEXP (op0, 1)), op1);
8079805d
RK
5266
5267 if (GET_CODE (op0) == XOR
5268 && rtx_equal_p (XEXP (op0, 1), op1)
5269 && ! side_effects_p (op1))
0c1c8ea6
RK
5270 x = gen_binary (AND, mode,
5271 gen_unary (NOT, mode, mode, XEXP (op0, 0)), op1);
8079805d 5272
663522cb 5273 /* Similarly for (~(A ^ B)) & A. */
8079805d
RK
5274 if (GET_CODE (op0) == NOT
5275 && GET_CODE (XEXP (op0, 0)) == XOR
5276 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), op1)
5277 && ! side_effects_p (op1))
5278 x = gen_binary (AND, mode, XEXP (XEXP (op0, 0), 1), op1);
5279
5280 if (GET_CODE (op0) == NOT
5281 && GET_CODE (XEXP (op0, 0)) == XOR
5282 && rtx_equal_p (XEXP (XEXP (op0, 0), 1), op1)
5283 && ! side_effects_p (op1))
5284 x = gen_binary (AND, mode, XEXP (XEXP (op0, 0), 0), op1);
5285
2e8f9abf
DM
5286 /* We can call simplify_and_const_int only if we don't lose
5287 any (sign) bits when converting INTVAL (op1) to
5288 "unsigned HOST_WIDE_INT". */
5289 if (GET_CODE (op1) == CONST_INT
5290 && (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5291 || INTVAL (op1) > 0))
230d793d 5292 {
8079805d 5293 x = simplify_and_const_int (x, mode, op0, INTVAL (op1));
230d793d
RS
5294
5295 /* If we have (ior (and (X C1) C2)) and the next restart would be
5296 the last, simplify this by making C1 as small as possible
0f41302f 5297 and then exit. */
8079805d
RK
5298 if (last
5299 && GET_CODE (x) == IOR && GET_CODE (op0) == AND
5300 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5301 && GET_CODE (op1) == CONST_INT)
5302 return gen_binary (IOR, mode,
5303 gen_binary (AND, mode, XEXP (op0, 0),
5304 GEN_INT (INTVAL (XEXP (op0, 1))
663522cb 5305 & ~INTVAL (op1))), op1);
230d793d
RS
5306
5307 if (GET_CODE (x) != AND)
8079805d 5308 return x;
0e32506c 5309
663522cb 5310 if (GET_RTX_CLASS (GET_CODE (x)) == 'c'
0e32506c
RK
5311 || GET_RTX_CLASS (GET_CODE (x)) == '2')
5312 op0 = XEXP (x, 0), op1 = XEXP (x, 1);
230d793d
RS
5313 }
5314
5315 /* Convert (A | B) & A to A. */
8079805d
RK
5316 if (GET_CODE (op0) == IOR
5317 && (rtx_equal_p (XEXP (op0, 0), op1)
5318 || rtx_equal_p (XEXP (op0, 1), op1))
5319 && ! side_effects_p (XEXP (op0, 0))
5320 && ! side_effects_p (XEXP (op0, 1)))
5321 return op1;
230d793d 5322
d0ab8cd3 5323 /* In the following group of tests (and those in case IOR below),
230d793d
RS
5324 we start with some combination of logical operations and apply
5325 the distributive law followed by the inverse distributive law.
5326 Most of the time, this results in no change. However, if some of
5327 the operands are the same or inverses of each other, simplifications
5328 will result.
5329
5330 For example, (and (ior A B) (not B)) can occur as the result of
5331 expanding a bit field assignment. When we apply the distributive
5332 law to this, we get (ior (and (A (not B))) (and (B (not B)))),
663522cb 5333 which then simplifies to (and (A (not B))).
230d793d 5334
8079805d 5335 If we have (and (ior A B) C), apply the distributive law and then
230d793d
RS
5336 the inverse distributive law to see if things simplify. */
5337
8079805d 5338 if (GET_CODE (op0) == IOR || GET_CODE (op0) == XOR)
230d793d
RS
5339 {
5340 x = apply_distributive_law
8079805d
RK
5341 (gen_binary (GET_CODE (op0), mode,
5342 gen_binary (AND, mode, XEXP (op0, 0), op1),
3749f4ca
BS
5343 gen_binary (AND, mode, XEXP (op0, 1),
5344 copy_rtx (op1))));
230d793d 5345 if (GET_CODE (x) != AND)
8079805d 5346 return x;
230d793d
RS
5347 }
5348
8079805d
RK
5349 if (GET_CODE (op1) == IOR || GET_CODE (op1) == XOR)
5350 return apply_distributive_law
5351 (gen_binary (GET_CODE (op1), mode,
5352 gen_binary (AND, mode, XEXP (op1, 0), op0),
3749f4ca
BS
5353 gen_binary (AND, mode, XEXP (op1, 1),
5354 copy_rtx (op0))));
230d793d
RS
5355
5356 /* Similarly, taking advantage of the fact that
5357 (and (not A) (xor B C)) == (xor (ior A B) (ior A C)) */
5358
8079805d
RK
5359 if (GET_CODE (op0) == NOT && GET_CODE (op1) == XOR)
5360 return apply_distributive_law
5361 (gen_binary (XOR, mode,
5362 gen_binary (IOR, mode, XEXP (op0, 0), XEXP (op1, 0)),
3749f4ca
BS
5363 gen_binary (IOR, mode, copy_rtx (XEXP (op0, 0)),
5364 XEXP (op1, 1))));
663522cb 5365
8079805d
RK
5366 else if (GET_CODE (op1) == NOT && GET_CODE (op0) == XOR)
5367 return apply_distributive_law
5368 (gen_binary (XOR, mode,
5369 gen_binary (IOR, mode, XEXP (op1, 0), XEXP (op0, 0)),
3749f4ca 5370 gen_binary (IOR, mode, copy_rtx (XEXP (op1, 0)), XEXP (op0, 1))));
230d793d
RS
5371 break;
5372
5373 case IOR:
951553af 5374 /* (ior A C) is C if all bits of A that might be nonzero are on in C. */
8079805d 5375 if (GET_CODE (op1) == CONST_INT
ac49a949 5376 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
663522cb 5377 && (nonzero_bits (op0, mode) & ~INTVAL (op1)) == 0)
8079805d 5378 return op1;
d0ab8cd3 5379
230d793d 5380 /* Convert (A & B) | A to A. */
8079805d
RK
5381 if (GET_CODE (op0) == AND
5382 && (rtx_equal_p (XEXP (op0, 0), op1)
5383 || rtx_equal_p (XEXP (op0, 1), op1))
5384 && ! side_effects_p (XEXP (op0, 0))
5385 && ! side_effects_p (XEXP (op0, 1)))
5386 return op1;
230d793d
RS
5387
5388 /* If we have (ior (and A B) C), apply the distributive law and then
5389 the inverse distributive law to see if things simplify. */
5390
8079805d 5391 if (GET_CODE (op0) == AND)
230d793d
RS
5392 {
5393 x = apply_distributive_law
5394 (gen_binary (AND, mode,
8079805d 5395 gen_binary (IOR, mode, XEXP (op0, 0), op1),
3749f4ca
BS
5396 gen_binary (IOR, mode, XEXP (op0, 1),
5397 copy_rtx (op1))));
230d793d
RS
5398
5399 if (GET_CODE (x) != IOR)
8079805d 5400 return x;
230d793d
RS
5401 }
5402
8079805d 5403 if (GET_CODE (op1) == AND)
230d793d
RS
5404 {
5405 x = apply_distributive_law
5406 (gen_binary (AND, mode,
8079805d 5407 gen_binary (IOR, mode, XEXP (op1, 0), op0),
3749f4ca
BS
5408 gen_binary (IOR, mode, XEXP (op1, 1),
5409 copy_rtx (op0))));
230d793d
RS
5410
5411 if (GET_CODE (x) != IOR)
8079805d 5412 return x;
230d793d
RS
5413 }
5414
5415 /* Convert (ior (ashift A CX) (lshiftrt A CY)) where CX+CY equals the
5416 mode size to (rotate A CX). */
5417
8079805d
RK
5418 if (((GET_CODE (op0) == ASHIFT && GET_CODE (op1) == LSHIFTRT)
5419 || (GET_CODE (op1) == ASHIFT && GET_CODE (op0) == LSHIFTRT))
5420 && rtx_equal_p (XEXP (op0, 0), XEXP (op1, 0))
5421 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5422 && GET_CODE (XEXP (op1, 1)) == CONST_INT
5423 && (INTVAL (XEXP (op0, 1)) + INTVAL (XEXP (op1, 1))
230d793d 5424 == GET_MODE_BITSIZE (mode)))
38a448ca
RH
5425 return gen_rtx_ROTATE (mode, XEXP (op0, 0),
5426 (GET_CODE (op0) == ASHIFT
5427 ? XEXP (op0, 1) : XEXP (op1, 1)));
230d793d 5428
71923da7
RK
5429 /* If OP0 is (ashiftrt (plus ...) C), it might actually be
5430 a (sign_extend (plus ...)). If so, OP1 is a CONST_INT, and the PLUS
5431 does not affect any of the bits in OP1, it can really be done
5432 as a PLUS and we can associate. We do this by seeing if OP1
5433 can be safely shifted left C bits. */
5434 if (GET_CODE (op1) == CONST_INT && GET_CODE (op0) == ASHIFTRT
5435 && GET_CODE (XEXP (op0, 0)) == PLUS
5436 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
5437 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5438 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT)
5439 {
5440 int count = INTVAL (XEXP (op0, 1));
5441 HOST_WIDE_INT mask = INTVAL (op1) << count;
5442
5443 if (mask >> count == INTVAL (op1)
5444 && (mask & nonzero_bits (XEXP (op0, 0), mode)) == 0)
5445 {
5446 SUBST (XEXP (XEXP (op0, 0), 1),
5447 GEN_INT (INTVAL (XEXP (XEXP (op0, 0), 1)) | mask));
5448 return op0;
5449 }
5450 }
230d793d
RS
5451 break;
5452
5453 case XOR:
79e8185c
JH
5454 /* If we are XORing two things that have no bits in common,
5455 convert them into an IOR. This helps to detect rotation encoded
5456 using those methods and possibly other simplifications. */
5457
5458 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5459 && (nonzero_bits (op0, mode)
5460 & nonzero_bits (op1, mode)) == 0)
5461 return (gen_binary (IOR, mode, op0, op1));
5462
230d793d
RS
5463 /* Convert (XOR (NOT x) (NOT y)) to (XOR x y).
5464 Also convert (XOR (NOT x) y) to (NOT (XOR x y)), similarly for
5465 (NOT y). */
5466 {
5467 int num_negated = 0;
230d793d 5468
8079805d
RK
5469 if (GET_CODE (op0) == NOT)
5470 num_negated++, op0 = XEXP (op0, 0);
5471 if (GET_CODE (op1) == NOT)
5472 num_negated++, op1 = XEXP (op1, 0);
230d793d
RS
5473
5474 if (num_negated == 2)
5475 {
8079805d
RK
5476 SUBST (XEXP (x, 0), op0);
5477 SUBST (XEXP (x, 1), op1);
230d793d
RS
5478 }
5479 else if (num_negated == 1)
0c1c8ea6 5480 return gen_unary (NOT, mode, mode, gen_binary (XOR, mode, op0, op1));
230d793d
RS
5481 }
5482
5483 /* Convert (xor (and A B) B) to (and (not A) B). The latter may
5484 correspond to a machine insn or result in further simplifications
5485 if B is a constant. */
5486
8079805d
RK
5487 if (GET_CODE (op0) == AND
5488 && rtx_equal_p (XEXP (op0, 1), op1)
5489 && ! side_effects_p (op1))
0c1c8ea6
RK
5490 return gen_binary (AND, mode,
5491 gen_unary (NOT, mode, mode, XEXP (op0, 0)),
8079805d 5492 op1);
230d793d 5493
8079805d
RK
5494 else if (GET_CODE (op0) == AND
5495 && rtx_equal_p (XEXP (op0, 0), op1)
5496 && ! side_effects_p (op1))
0c1c8ea6
RK
5497 return gen_binary (AND, mode,
5498 gen_unary (NOT, mode, mode, XEXP (op0, 1)),
8079805d 5499 op1);
230d793d 5500
230d793d 5501 /* (xor (comparison foo bar) (const_int 1)) can become the reversed
0802d516
RK
5502 comparison if STORE_FLAG_VALUE is 1. */
5503 if (STORE_FLAG_VALUE == 1
5504 && op1 == const1_rtx
8079805d 5505 && GET_RTX_CLASS (GET_CODE (op0)) == '<'
9a915772
JH
5506 && (reversed = reversed_comparison (op0, mode, XEXP (op0, 0),
5507 XEXP (op0, 1))))
5508 return reversed;
500c518b
RK
5509
5510 /* (lshiftrt foo C) where C is the number of bits in FOO minus 1
5511 is (lt foo (const_int 0)), so we can perform the above
0802d516 5512 simplification if STORE_FLAG_VALUE is 1. */
500c518b 5513
0802d516
RK
5514 if (STORE_FLAG_VALUE == 1
5515 && op1 == const1_rtx
8079805d
RK
5516 && GET_CODE (op0) == LSHIFTRT
5517 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5518 && INTVAL (XEXP (op0, 1)) == GET_MODE_BITSIZE (mode) - 1)
5519 return gen_rtx_combine (GE, mode, XEXP (op0, 0), const0_rtx);
230d793d
RS
5520
5521 /* (xor (comparison foo bar) (const_int sign-bit))
5522 when STORE_FLAG_VALUE is the sign bit. */
5f4f0e22 5523 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
0802d516 5524 && ((STORE_FLAG_VALUE & GET_MODE_MASK (mode))
e51712db 5525 == (unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
8079805d
RK
5526 && op1 == const_true_rtx
5527 && GET_RTX_CLASS (GET_CODE (op0)) == '<'
9a915772
JH
5528 && (reversed = reversed_comparison (op0, mode, XEXP (op0, 0),
5529 XEXP (op0, 1))))
5530 return reversed;
0918eca0 5531
230d793d 5532 break;
e9a25f70
JL
5533
5534 default:
5535 abort ();
230d793d
RS
5536 }
5537
5538 return x;
5539}
5540\f
5541/* We consider ZERO_EXTRACT, SIGN_EXTRACT, and SIGN_EXTEND as "compound
5542 operations" because they can be replaced with two more basic operations.
5543 ZERO_EXTEND is also considered "compound" because it can be replaced with
5544 an AND operation, which is simpler, though only one operation.
5545
5546 The function expand_compound_operation is called with an rtx expression
663522cb 5547 and will convert it to the appropriate shifts and AND operations,
230d793d
RS
5548 simplifying at each stage.
5549
5550 The function make_compound_operation is called to convert an expression
5551 consisting of shifts and ANDs into the equivalent compound expression.
5552 It is the inverse of this function, loosely speaking. */
5553
5554static rtx
5555expand_compound_operation (x)
5556 rtx x;
5557{
770ae6cc 5558 unsigned HOST_WIDE_INT pos = 0, len;
230d793d 5559 int unsignedp = 0;
770ae6cc 5560 unsigned int modewidth;
230d793d
RS
5561 rtx tem;
5562
5563 switch (GET_CODE (x))
5564 {
5565 case ZERO_EXTEND:
5566 unsignedp = 1;
5567 case SIGN_EXTEND:
75473182
RS
5568 /* We can't necessarily use a const_int for a multiword mode;
5569 it depends on implicitly extending the value.
5570 Since we don't know the right way to extend it,
5571 we can't tell whether the implicit way is right.
5572
5573 Even for a mode that is no wider than a const_int,
5574 we can't win, because we need to sign extend one of its bits through
5575 the rest of it, and we don't know which bit. */
230d793d 5576 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
75473182 5577 return x;
230d793d 5578
8079805d
RK
5579 /* Return if (subreg:MODE FROM 0) is not a safe replacement for
5580 (zero_extend:MODE FROM) or (sign_extend:MODE FROM). It is for any MEM
5581 because (SUBREG (MEM...)) is guaranteed to cause the MEM to be
5582 reloaded. If not for that, MEM's would very rarely be safe.
5583
5584 Reject MODEs bigger than a word, because we might not be able
5585 to reference a two-register group starting with an arbitrary register
5586 (and currently gen_lowpart might crash for a SUBREG). */
663522cb 5587
8079805d 5588 if (GET_MODE_SIZE (GET_MODE (XEXP (x, 0))) > UNITS_PER_WORD)
230d793d
RS
5589 return x;
5590
5591 len = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)));
5592 /* If the inner object has VOIDmode (the only way this can happen
5593 is if it is a ASM_OPERANDS), we can't do anything since we don't
5594 know how much masking to do. */
5595 if (len == 0)
5596 return x;
5597
5598 break;
5599
5600 case ZERO_EXTRACT:
5601 unsignedp = 1;
5602 case SIGN_EXTRACT:
5603 /* If the operand is a CLOBBER, just return it. */
5604 if (GET_CODE (XEXP (x, 0)) == CLOBBER)
5605 return XEXP (x, 0);
5606
5607 if (GET_CODE (XEXP (x, 1)) != CONST_INT
5608 || GET_CODE (XEXP (x, 2)) != CONST_INT
5609 || GET_MODE (XEXP (x, 0)) == VOIDmode)
5610 return x;
5611
5612 len = INTVAL (XEXP (x, 1));
5613 pos = INTVAL (XEXP (x, 2));
5614
5615 /* If this goes outside the object being extracted, replace the object
5616 with a (use (mem ...)) construct that only combine understands
5617 and is used only for this purpose. */
5618 if (len + pos > GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
38a448ca 5619 SUBST (XEXP (x, 0), gen_rtx_USE (GET_MODE (x), XEXP (x, 0)));
230d793d 5620
f76b9db2
ILT
5621 if (BITS_BIG_ENDIAN)
5622 pos = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - len - pos;
5623
230d793d
RS
5624 break;
5625
5626 default:
5627 return x;
5628 }
0f808b6f
JH
5629 /* Convert sign extension to zero extension, if we know that the high
5630 bit is not set, as this is easier to optimize. It will be converted
5631 back to cheaper alternative in make_extraction. */
5632 if (GET_CODE (x) == SIGN_EXTEND
5633 && (GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
5634 && ((nonzero_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
663522cb 5635 & ~(((unsigned HOST_WIDE_INT)
0f808b6f
JH
5636 GET_MODE_MASK (GET_MODE (XEXP (x, 0))))
5637 >> 1))
5638 == 0)))
5639 {
5640 rtx temp = gen_rtx_ZERO_EXTEND (GET_MODE (x), XEXP (x, 0));
5641 return expand_compound_operation (temp);
5642 }
230d793d 5643
0f13a422
ILT
5644 /* We can optimize some special cases of ZERO_EXTEND. */
5645 if (GET_CODE (x) == ZERO_EXTEND)
5646 {
5647 /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI if we
5648 know that the last value didn't have any inappropriate bits
5649 set. */
5650 if (GET_CODE (XEXP (x, 0)) == TRUNCATE
5651 && GET_MODE (XEXP (XEXP (x, 0), 0)) == GET_MODE (x)
5652 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
5653 && (nonzero_bits (XEXP (XEXP (x, 0), 0), GET_MODE (x))
663522cb 5654 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
0f13a422
ILT
5655 return XEXP (XEXP (x, 0), 0);
5656
5657 /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)). */
5658 if (GET_CODE (XEXP (x, 0)) == SUBREG
5659 && GET_MODE (SUBREG_REG (XEXP (x, 0))) == GET_MODE (x)
5660 && subreg_lowpart_p (XEXP (x, 0))
5661 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
5662 && (nonzero_bits (SUBREG_REG (XEXP (x, 0)), GET_MODE (x))
663522cb 5663 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
0f13a422
ILT
5664 return SUBREG_REG (XEXP (x, 0));
5665
5666 /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI when foo
5667 is a comparison and STORE_FLAG_VALUE permits. This is like
5668 the first case, but it works even when GET_MODE (x) is larger
5669 than HOST_WIDE_INT. */
5670 if (GET_CODE (XEXP (x, 0)) == TRUNCATE
5671 && GET_MODE (XEXP (XEXP (x, 0), 0)) == GET_MODE (x)
5672 && GET_RTX_CLASS (GET_CODE (XEXP (XEXP (x, 0), 0))) == '<'
5673 && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
5674 <= HOST_BITS_PER_WIDE_INT)
5675 && ((HOST_WIDE_INT) STORE_FLAG_VALUE
663522cb 5676 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
0f13a422
ILT
5677 return XEXP (XEXP (x, 0), 0);
5678
5679 /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)). */
5680 if (GET_CODE (XEXP (x, 0)) == SUBREG
5681 && GET_MODE (SUBREG_REG (XEXP (x, 0))) == GET_MODE (x)
5682 && subreg_lowpart_p (XEXP (x, 0))
5683 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0)))) == '<'
5684 && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
5685 <= HOST_BITS_PER_WIDE_INT)
5686 && ((HOST_WIDE_INT) STORE_FLAG_VALUE
663522cb 5687 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
0f13a422
ILT
5688 return SUBREG_REG (XEXP (x, 0));
5689
0f13a422
ILT
5690 }
5691
230d793d
RS
5692 /* If we reach here, we want to return a pair of shifts. The inner
5693 shift is a left shift of BITSIZE - POS - LEN bits. The outer
5694 shift is a right shift of BITSIZE - LEN bits. It is arithmetic or
5695 logical depending on the value of UNSIGNEDP.
5696
5697 If this was a ZERO_EXTEND or ZERO_EXTRACT, this pair of shifts will be
5698 converted into an AND of a shift.
5699
5700 We must check for the case where the left shift would have a negative
5701 count. This can happen in a case like (x >> 31) & 255 on machines
5702 that can't shift by a constant. On those machines, we would first
663522cb 5703 combine the shift with the AND to produce a variable-position
230d793d
RS
5704 extraction. Then the constant of 31 would be substituted in to produce
5705 a such a position. */
5706
5707 modewidth = GET_MODE_BITSIZE (GET_MODE (x));
770ae6cc 5708 if (modewidth + len >= pos)
5f4f0e22 5709 tem = simplify_shift_const (NULL_RTX, unsignedp ? LSHIFTRT : ASHIFTRT,
230d793d 5710 GET_MODE (x),
5f4f0e22
CH
5711 simplify_shift_const (NULL_RTX, ASHIFT,
5712 GET_MODE (x),
230d793d
RS
5713 XEXP (x, 0),
5714 modewidth - pos - len),
5715 modewidth - len);
5716
5f4f0e22
CH
5717 else if (unsignedp && len < HOST_BITS_PER_WIDE_INT)
5718 tem = simplify_and_const_int (NULL_RTX, GET_MODE (x),
5719 simplify_shift_const (NULL_RTX, LSHIFTRT,
230d793d
RS
5720 GET_MODE (x),
5721 XEXP (x, 0), pos),
5f4f0e22 5722 ((HOST_WIDE_INT) 1 << len) - 1);
230d793d
RS
5723 else
5724 /* Any other cases we can't handle. */
5725 return x;
230d793d
RS
5726
5727 /* If we couldn't do this for some reason, return the original
5728 expression. */
5729 if (GET_CODE (tem) == CLOBBER)
5730 return x;
5731
5732 return tem;
5733}
5734\f
5735/* X is a SET which contains an assignment of one object into
5736 a part of another (such as a bit-field assignment, STRICT_LOW_PART,
5737 or certain SUBREGS). If possible, convert it into a series of
5738 logical operations.
5739
5740 We half-heartedly support variable positions, but do not at all
5741 support variable lengths. */
5742
5743static rtx
5744expand_field_assignment (x)
5745 rtx x;
5746{
5747 rtx inner;
0f41302f 5748 rtx pos; /* Always counts from low bit. */
230d793d
RS
5749 int len;
5750 rtx mask;
5751 enum machine_mode compute_mode;
5752
5753 /* Loop until we find something we can't simplify. */
5754 while (1)
5755 {
5756 if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
5757 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG)
5758 {
5759 inner = SUBREG_REG (XEXP (SET_DEST (x), 0));
5760 len = GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)));
4d9cfc7b 5761 pos = GEN_INT (BITS_PER_WORD * SUBREG_WORD (XEXP (SET_DEST (x), 0)));
230d793d
RS
5762 }
5763 else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
5764 && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT)
5765 {
5766 inner = XEXP (SET_DEST (x), 0);
5767 len = INTVAL (XEXP (SET_DEST (x), 1));
5768 pos = XEXP (SET_DEST (x), 2);
5769
5770 /* If the position is constant and spans the width of INNER,
5771 surround INNER with a USE to indicate this. */
5772 if (GET_CODE (pos) == CONST_INT
5773 && INTVAL (pos) + len > GET_MODE_BITSIZE (GET_MODE (inner)))
38a448ca 5774 inner = gen_rtx_USE (GET_MODE (SET_DEST (x)), inner);
230d793d 5775
f76b9db2
ILT
5776 if (BITS_BIG_ENDIAN)
5777 {
5778 if (GET_CODE (pos) == CONST_INT)
5779 pos = GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner)) - len
5780 - INTVAL (pos));
5781 else if (GET_CODE (pos) == MINUS
5782 && GET_CODE (XEXP (pos, 1)) == CONST_INT
5783 && (INTVAL (XEXP (pos, 1))
5784 == GET_MODE_BITSIZE (GET_MODE (inner)) - len))
5785 /* If position is ADJUST - X, new position is X. */
5786 pos = XEXP (pos, 0);
5787 else
5788 pos = gen_binary (MINUS, GET_MODE (pos),
5789 GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner))
5790 - len),
5791 pos);
5792 }
230d793d
RS
5793 }
5794
5795 /* A SUBREG between two modes that occupy the same numbers of words
5796 can be done by moving the SUBREG to the source. */
5797 else if (GET_CODE (SET_DEST (x)) == SUBREG
b1e9c8a9
AO
5798 /* We need SUBREGs to compute nonzero_bits properly. */
5799 && nonzero_sign_valid
230d793d
RS
5800 && (((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
5801 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
5802 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
5803 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
5804 {
38a448ca 5805 x = gen_rtx_SET (VOIDmode, SUBREG_REG (SET_DEST (x)),
c5c76735
JL
5806 gen_lowpart_for_combine
5807 (GET_MODE (SUBREG_REG (SET_DEST (x))),
5808 SET_SRC (x)));
230d793d
RS
5809 continue;
5810 }
5811 else
5812 break;
5813
5814 while (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
5815 inner = SUBREG_REG (inner);
5816
5817 compute_mode = GET_MODE (inner);
5818
861556b4
RH
5819 /* Don't attempt bitwise arithmetic on non-integral modes. */
5820 if (! INTEGRAL_MODE_P (compute_mode))
5821 {
5822 enum machine_mode imode;
5823
5824 /* Something is probably seriously wrong if this matches. */
5825 if (! FLOAT_MODE_P (compute_mode))
5826 break;
5827
5828 /* Try to find an integral mode to pun with. */
5829 imode = mode_for_size (GET_MODE_BITSIZE (compute_mode), MODE_INT, 0);
5830 if (imode == BLKmode)
5831 break;
5832
5833 compute_mode = imode;
5834 inner = gen_lowpart_for_combine (imode, inner);
5835 }
5836
230d793d 5837 /* Compute a mask of LEN bits, if we can do this on the host machine. */
5f4f0e22
CH
5838 if (len < HOST_BITS_PER_WIDE_INT)
5839 mask = GEN_INT (((HOST_WIDE_INT) 1 << len) - 1);
230d793d
RS
5840 else
5841 break;
5842
5843 /* Now compute the equivalent expression. Make a copy of INNER
5844 for the SET_DEST in case it is a MEM into which we will substitute;
5845 we don't want shared RTL in that case. */
c5c76735
JL
5846 x = gen_rtx_SET
5847 (VOIDmode, copy_rtx (inner),
5848 gen_binary (IOR, compute_mode,
5849 gen_binary (AND, compute_mode,
5850 gen_unary (NOT, compute_mode,
5851 compute_mode,
5852 gen_binary (ASHIFT,
5853 compute_mode,
5854 mask, pos)),
5855 inner),
5856 gen_binary (ASHIFT, compute_mode,
5857 gen_binary (AND, compute_mode,
5858 gen_lowpart_for_combine
5859 (compute_mode, SET_SRC (x)),
5860 mask),
5861 pos)));
230d793d
RS
5862 }
5863
5864 return x;
5865}
5866\f
8999a12e
RK
5867/* Return an RTX for a reference to LEN bits of INNER. If POS_RTX is nonzero,
5868 it is an RTX that represents a variable starting position; otherwise,
5869 POS is the (constant) starting bit position (counted from the LSB).
230d793d
RS
5870
5871 INNER may be a USE. This will occur when we started with a bitfield
5872 that went outside the boundary of the object in memory, which is
5873 allowed on most machines. To isolate this case, we produce a USE
5874 whose mode is wide enough and surround the MEM with it. The only
5875 code that understands the USE is this routine. If it is not removed,
5876 it will cause the resulting insn not to match.
5877
663522cb 5878 UNSIGNEDP is non-zero for an unsigned reference and zero for a
230d793d
RS
5879 signed reference.
5880
5881 IN_DEST is non-zero if this is a reference in the destination of a
5882 SET. This is used when a ZERO_ or SIGN_EXTRACT isn't needed. If non-zero,
5883 a STRICT_LOW_PART will be used, if zero, ZERO_EXTEND or SIGN_EXTEND will
5884 be used.
5885
5886 IN_COMPARE is non-zero if we are in a COMPARE. This means that a
5887 ZERO_EXTRACT should be built even for bits starting at bit 0.
5888
76184def
DE
5889 MODE is the desired mode of the result (if IN_DEST == 0).
5890
5891 The result is an RTX for the extraction or NULL_RTX if the target
5892 can't handle it. */
230d793d
RS
5893
5894static rtx
5895make_extraction (mode, inner, pos, pos_rtx, len,
5896 unsignedp, in_dest, in_compare)
5897 enum machine_mode mode;
5898 rtx inner;
770ae6cc 5899 HOST_WIDE_INT pos;
230d793d 5900 rtx pos_rtx;
770ae6cc 5901 unsigned HOST_WIDE_INT len;
230d793d
RS
5902 int unsignedp;
5903 int in_dest, in_compare;
5904{
94b4b17a
RS
5905 /* This mode describes the size of the storage area
5906 to fetch the overall value from. Within that, we
5907 ignore the POS lowest bits, etc. */
230d793d
RS
5908 enum machine_mode is_mode = GET_MODE (inner);
5909 enum machine_mode inner_mode;
d7cd794f
RK
5910 enum machine_mode wanted_inner_mode = byte_mode;
5911 enum machine_mode wanted_inner_reg_mode = word_mode;
230d793d
RS
5912 enum machine_mode pos_mode = word_mode;
5913 enum machine_mode extraction_mode = word_mode;
5914 enum machine_mode tmode = mode_for_size (len, MODE_INT, 1);
5915 int spans_byte = 0;
5916 rtx new = 0;
8999a12e 5917 rtx orig_pos_rtx = pos_rtx;
770ae6cc 5918 HOST_WIDE_INT orig_pos;
230d793d
RS
5919
5920 /* Get some information about INNER and get the innermost object. */
5921 if (GET_CODE (inner) == USE)
94b4b17a 5922 /* (use:SI (mem:QI foo)) stands for (mem:SI foo). */
230d793d
RS
5923 /* We don't need to adjust the position because we set up the USE
5924 to pretend that it was a full-word object. */
5925 spans_byte = 1, inner = XEXP (inner, 0);
5926 else if (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
94b4b17a
RS
5927 {
5928 /* If going from (subreg:SI (mem:QI ...)) to (mem:QI ...),
5929 consider just the QI as the memory to extract from.
5930 The subreg adds or removes high bits; its mode is
5931 irrelevant to the meaning of this extraction,
5932 since POS and LEN count from the lsb. */
5933 if (GET_CODE (SUBREG_REG (inner)) == MEM)
5934 is_mode = GET_MODE (SUBREG_REG (inner));
5935 inner = SUBREG_REG (inner);
5936 }
230d793d
RS
5937
5938 inner_mode = GET_MODE (inner);
5939
5940 if (pos_rtx && GET_CODE (pos_rtx) == CONST_INT)
8999a12e 5941 pos = INTVAL (pos_rtx), pos_rtx = 0;
230d793d
RS
5942
5943 /* See if this can be done without an extraction. We never can if the
5944 width of the field is not the same as that of some integer mode. For
5945 registers, we can only avoid the extraction if the position is at the
5946 low-order bit and this is either not in the destination or we have the
5947 appropriate STRICT_LOW_PART operation available.
5948
5949 For MEM, we can avoid an extract if the field starts on an appropriate
5950 boundary and we can change the mode of the memory reference. However,
5951 we cannot directly access the MEM if we have a USE and the underlying
5952 MEM is not TMODE. This combination means that MEM was being used in a
5953 context where bits outside its mode were being referenced; that is only
5954 valid in bit-field insns. */
5955
5956 if (tmode != BLKmode
5957 && ! (spans_byte && inner_mode != tmode)
4d9cfc7b
RK
5958 && ((pos_rtx == 0 && (pos % BITS_PER_WORD) == 0
5959 && GET_CODE (inner) != MEM
230d793d 5960 && (! in_dest
df62f951
RK
5961 || (GET_CODE (inner) == REG
5962 && (movstrict_optab->handlers[(int) tmode].insn_code
5963 != CODE_FOR_nothing))))
8999a12e 5964 || (GET_CODE (inner) == MEM && pos_rtx == 0
dfbe1b2f
RK
5965 && (pos
5966 % (STRICT_ALIGNMENT ? GET_MODE_ALIGNMENT (tmode)
5967 : BITS_PER_UNIT)) == 0
230d793d
RS
5968 /* We can't do this if we are widening INNER_MODE (it
5969 may not be aligned, for one thing). */
5970 && GET_MODE_BITSIZE (inner_mode) >= GET_MODE_BITSIZE (tmode)
5971 && (inner_mode == tmode
5972 || (! mode_dependent_address_p (XEXP (inner, 0))
5973 && ! MEM_VOLATILE_P (inner))))))
5974 {
230d793d
RS
5975 /* If INNER is a MEM, make a new MEM that encompasses just the desired
5976 field. If the original and current mode are the same, we need not
663522cb 5977 adjust the offset. Otherwise, we do if bytes big endian.
230d793d 5978
4d9cfc7b
RK
5979 If INNER is not a MEM, get a piece consisting of just the field
5980 of interest (in this case POS % BITS_PER_WORD must be 0). */
230d793d
RS
5981
5982 if (GET_CODE (inner) == MEM)
5983 {
94b4b17a
RS
5984 int offset;
5985 /* POS counts from lsb, but make OFFSET count in memory order. */
5986 if (BYTES_BIG_ENDIAN)
5987 offset = (GET_MODE_BITSIZE (is_mode) - len - pos) / BITS_PER_UNIT;
5988 else
5989 offset = pos / BITS_PER_UNIT;
230d793d 5990
38a448ca 5991 new = gen_rtx_MEM (tmode, plus_constant (XEXP (inner, 0), offset));
c6df88cb 5992 MEM_COPY_ATTRIBUTES (new, inner);
230d793d 5993 }
df62f951 5994 else if (GET_CODE (inner) == REG)
c0d3ac4d
RK
5995 {
5996 /* We can't call gen_lowpart_for_combine here since we always want
5997 a SUBREG and it would sometimes return a new hard register. */
5998 if (tmode != inner_mode)
38a448ca
RH
5999 new = gen_rtx_SUBREG (tmode, inner,
6000 (WORDS_BIG_ENDIAN
c5c76735
JL
6001 && (GET_MODE_SIZE (inner_mode)
6002 > UNITS_PER_WORD)
38a448ca
RH
6003 ? (((GET_MODE_SIZE (inner_mode)
6004 - GET_MODE_SIZE (tmode))
6005 / UNITS_PER_WORD)
6006 - pos / BITS_PER_WORD)
6007 : pos / BITS_PER_WORD));
c0d3ac4d
RK
6008 else
6009 new = inner;
6010 }
230d793d 6011 else
6139ff20
RK
6012 new = force_to_mode (inner, tmode,
6013 len >= HOST_BITS_PER_WIDE_INT
0345195a 6014 ? ~(unsigned HOST_WIDE_INT) 0
729a2125 6015 : ((unsigned HOST_WIDE_INT) 1 << len) - 1,
e3d616e3 6016 NULL_RTX, 0);
230d793d 6017
663522cb 6018 /* If this extraction is going into the destination of a SET,
230d793d
RS
6019 make a STRICT_LOW_PART unless we made a MEM. */
6020
6021 if (in_dest)
6022 return (GET_CODE (new) == MEM ? new
77fa0940 6023 : (GET_CODE (new) != SUBREG
38a448ca 6024 ? gen_rtx_CLOBBER (tmode, const0_rtx)
77fa0940 6025 : gen_rtx_combine (STRICT_LOW_PART, VOIDmode, new)));
230d793d 6026
0f808b6f
JH
6027 if (mode == tmode)
6028 return new;
6029
6030 /* If we know that no extraneous bits are set, and that the high
6031 bit is not set, convert the extraction to the cheaper of
6032 sign and zero extension, that are equivalent in these cases. */
6033 if (flag_expensive_optimizations
6034 && (GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT
6035 && ((nonzero_bits (new, tmode)
663522cb
KH
6036 & ~(((unsigned HOST_WIDE_INT)
6037 GET_MODE_MASK (tmode))
6038 >> 1))
0f808b6f
JH
6039 == 0)))
6040 {
6041 rtx temp = gen_rtx_ZERO_EXTEND (mode, new);
6042 rtx temp1 = gen_rtx_SIGN_EXTEND (mode, new);
6043
6044 /* Prefer ZERO_EXTENSION, since it gives more information to
6045 backends. */
25ffb1f6 6046 if (rtx_cost (temp, SET) <= rtx_cost (temp1, SET))
0f808b6f
JH
6047 return temp;
6048 return temp1;
6049 }
6050
230d793d
RS
6051 /* Otherwise, sign- or zero-extend unless we already are in the
6052 proper mode. */
6053
0f808b6f
JH
6054 return (gen_rtx_combine (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
6055 mode, new));
230d793d
RS
6056 }
6057
cc471082
RS
6058 /* Unless this is a COMPARE or we have a funny memory reference,
6059 don't do anything with zero-extending field extracts starting at
6060 the low-order bit since they are simple AND operations. */
8999a12e
RK
6061 if (pos_rtx == 0 && pos == 0 && ! in_dest
6062 && ! in_compare && ! spans_byte && unsignedp)
230d793d
RS
6063 return 0;
6064
c5c76735
JL
6065 /* Unless we are allowed to span bytes or INNER is not MEM, reject this if
6066 we would be spanning bytes or if the position is not a constant and the
6067 length is not 1. In all other cases, we would only be going outside
6068 our object in cases when an original shift would have been
e7373556 6069 undefined. */
c5c76735 6070 if (! spans_byte && GET_CODE (inner) == MEM
e7373556
RK
6071 && ((pos_rtx == 0 && pos + len > GET_MODE_BITSIZE (is_mode))
6072 || (pos_rtx != 0 && len != 1)))
6073 return 0;
6074
d7cd794f 6075 /* Get the mode to use should INNER not be a MEM, the mode for the position,
230d793d
RS
6076 and the mode for the result. */
6077#ifdef HAVE_insv
6078 if (in_dest)
6079 {
0d8e55d8 6080 wanted_inner_reg_mode
a995e389
RH
6081 = insn_data[(int) CODE_FOR_insv].operand[0].mode;
6082 if (wanted_inner_reg_mode == VOIDmode)
6083 wanted_inner_reg_mode = word_mode;
6084
6085 pos_mode = insn_data[(int) CODE_FOR_insv].operand[2].mode;
6086 if (pos_mode == VOIDmode)
6087 pos_mode = word_mode;
6088
6089 extraction_mode = insn_data[(int) CODE_FOR_insv].operand[3].mode;
6090 if (extraction_mode == VOIDmode)
6091 extraction_mode = word_mode;
230d793d
RS
6092 }
6093#endif
6094
6095#ifdef HAVE_extzv
6096 if (! in_dest && unsignedp)
6097 {
0d8e55d8 6098 wanted_inner_reg_mode
a995e389
RH
6099 = insn_data[(int) CODE_FOR_extzv].operand[1].mode;
6100 if (wanted_inner_reg_mode == VOIDmode)
6101 wanted_inner_reg_mode = word_mode;
6102
6103 pos_mode = insn_data[(int) CODE_FOR_extzv].operand[3].mode;
6104 if (pos_mode == VOIDmode)
6105 pos_mode = word_mode;
6106
6107 extraction_mode = insn_data[(int) CODE_FOR_extzv].operand[0].mode;
6108 if (extraction_mode == VOIDmode)
6109 extraction_mode = word_mode;
230d793d
RS
6110 }
6111#endif
6112
6113#ifdef HAVE_extv
6114 if (! in_dest && ! unsignedp)
6115 {
0d8e55d8 6116 wanted_inner_reg_mode
a995e389
RH
6117 = insn_data[(int) CODE_FOR_extv].operand[1].mode;
6118 if (wanted_inner_reg_mode == VOIDmode)
6119 wanted_inner_reg_mode = word_mode;
6120
6121 pos_mode = insn_data[(int) CODE_FOR_extv].operand[3].mode;
6122 if (pos_mode == VOIDmode)
6123 pos_mode = word_mode;
6124
6125 extraction_mode = insn_data[(int) CODE_FOR_extv].operand[0].mode;
6126 if (extraction_mode == VOIDmode)
6127 extraction_mode = word_mode;
230d793d
RS
6128 }
6129#endif
6130
6131 /* Never narrow an object, since that might not be safe. */
6132
6133 if (mode != VOIDmode
6134 && GET_MODE_SIZE (extraction_mode) < GET_MODE_SIZE (mode))
6135 extraction_mode = mode;
6136
6137 if (pos_rtx && GET_MODE (pos_rtx) != VOIDmode
6138 && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
6139 pos_mode = GET_MODE (pos_rtx);
6140
d7cd794f
RK
6141 /* If this is not from memory, the desired mode is wanted_inner_reg_mode;
6142 if we have to change the mode of memory and cannot, the desired mode is
6143 EXTRACTION_MODE. */
6144 if (GET_CODE (inner) != MEM)
6145 wanted_inner_mode = wanted_inner_reg_mode;
6146 else if (inner_mode != wanted_inner_mode
6147 && (mode_dependent_address_p (XEXP (inner, 0))
6148 || MEM_VOLATILE_P (inner)))
6149 wanted_inner_mode = extraction_mode;
230d793d 6150
6139ff20
RK
6151 orig_pos = pos;
6152
f76b9db2
ILT
6153 if (BITS_BIG_ENDIAN)
6154 {
cf54c2cd
DE
6155 /* POS is passed as if BITS_BIG_ENDIAN == 0, so we need to convert it to
6156 BITS_BIG_ENDIAN style. If position is constant, compute new
6157 position. Otherwise, build subtraction.
6158 Note that POS is relative to the mode of the original argument.
6159 If it's a MEM we need to recompute POS relative to that.
6160 However, if we're extracting from (or inserting into) a register,
6161 we want to recompute POS relative to wanted_inner_mode. */
6162 int width = (GET_CODE (inner) == MEM
6163 ? GET_MODE_BITSIZE (is_mode)
6164 : GET_MODE_BITSIZE (wanted_inner_mode));
6165
f76b9db2 6166 if (pos_rtx == 0)
cf54c2cd 6167 pos = width - len - pos;
f76b9db2
ILT
6168 else
6169 pos_rtx
6170 = gen_rtx_combine (MINUS, GET_MODE (pos_rtx),
cf54c2cd
DE
6171 GEN_INT (width - len), pos_rtx);
6172 /* POS may be less than 0 now, but we check for that below.
6173 Note that it can only be less than 0 if GET_CODE (inner) != MEM. */
f76b9db2 6174 }
230d793d
RS
6175
6176 /* If INNER has a wider mode, make it smaller. If this is a constant
6177 extract, try to adjust the byte to point to the byte containing
6178 the value. */
d7cd794f
RK
6179 if (wanted_inner_mode != VOIDmode
6180 && GET_MODE_SIZE (wanted_inner_mode) < GET_MODE_SIZE (is_mode)
230d793d 6181 && ((GET_CODE (inner) == MEM
d7cd794f 6182 && (inner_mode == wanted_inner_mode
230d793d
RS
6183 || (! mode_dependent_address_p (XEXP (inner, 0))
6184 && ! MEM_VOLATILE_P (inner))))))
6185 {
6186 int offset = 0;
6187
6188 /* The computations below will be correct if the machine is big
6189 endian in both bits and bytes or little endian in bits and bytes.
6190 If it is mixed, we must adjust. */
663522cb 6191
230d793d 6192 /* If bytes are big endian and we had a paradoxical SUBREG, we must
0f41302f 6193 adjust OFFSET to compensate. */
f76b9db2
ILT
6194 if (BYTES_BIG_ENDIAN
6195 && ! spans_byte
230d793d
RS
6196 && GET_MODE_SIZE (inner_mode) < GET_MODE_SIZE (is_mode))
6197 offset -= GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (inner_mode);
230d793d
RS
6198
6199 /* If this is a constant position, we can move to the desired byte. */
8999a12e 6200 if (pos_rtx == 0)
230d793d
RS
6201 {
6202 offset += pos / BITS_PER_UNIT;
d7cd794f 6203 pos %= GET_MODE_BITSIZE (wanted_inner_mode);
230d793d
RS
6204 }
6205
f76b9db2
ILT
6206 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
6207 && ! spans_byte
d7cd794f 6208 && is_mode != wanted_inner_mode)
c6b3f1f2 6209 offset = (GET_MODE_SIZE (is_mode)
d7cd794f 6210 - GET_MODE_SIZE (wanted_inner_mode) - offset);
c6b3f1f2 6211
d7cd794f 6212 if (offset != 0 || inner_mode != wanted_inner_mode)
230d793d 6213 {
38a448ca
RH
6214 rtx newmem = gen_rtx_MEM (wanted_inner_mode,
6215 plus_constant (XEXP (inner, 0), offset));
bf49b139 6216
c6df88cb 6217 MEM_COPY_ATTRIBUTES (newmem, inner);
230d793d
RS
6218 inner = newmem;
6219 }
6220 }
6221
9e74dc41
RK
6222 /* If INNER is not memory, we can always get it into the proper mode. If we
6223 are changing its mode, POS must be a constant and smaller than the size
6224 of the new mode. */
230d793d 6225 else if (GET_CODE (inner) != MEM)
9e74dc41
RK
6226 {
6227 if (GET_MODE (inner) != wanted_inner_mode
6228 && (pos_rtx != 0
6229 || orig_pos + len > GET_MODE_BITSIZE (wanted_inner_mode)))
6230 return 0;
6231
6232 inner = force_to_mode (inner, wanted_inner_mode,
6233 pos_rtx
6234 || len + orig_pos >= HOST_BITS_PER_WIDE_INT
0345195a 6235 ? ~(unsigned HOST_WIDE_INT) 0
729a2125
RK
6236 : ((((unsigned HOST_WIDE_INT) 1 << len) - 1)
6237 << orig_pos),
9e74dc41
RK
6238 NULL_RTX, 0);
6239 }
230d793d
RS
6240
6241 /* Adjust mode of POS_RTX, if needed. If we want a wider mode, we
6242 have to zero extend. Otherwise, we can just use a SUBREG. */
8999a12e 6243 if (pos_rtx != 0
230d793d 6244 && GET_MODE_SIZE (pos_mode) > GET_MODE_SIZE (GET_MODE (pos_rtx)))
0f808b6f
JH
6245 {
6246 rtx temp = gen_rtx_combine (ZERO_EXTEND, pos_mode, pos_rtx);
6247
6248 /* If we know that no extraneous bits are set, and that the high
6249 bit is not set, convert extraction to cheaper one - eighter
6250 SIGN_EXTENSION or ZERO_EXTENSION, that are equivalent in these
6251 cases. */
6252 if (flag_expensive_optimizations
6253 && (GET_MODE_BITSIZE (GET_MODE (pos_rtx)) <= HOST_BITS_PER_WIDE_INT
6254 && ((nonzero_bits (pos_rtx, GET_MODE (pos_rtx))
663522cb
KH
6255 & ~(((unsigned HOST_WIDE_INT)
6256 GET_MODE_MASK (GET_MODE (pos_rtx)))
6257 >> 1))
0f808b6f
JH
6258 == 0)))
6259 {
6260 rtx temp1 = gen_rtx_SIGN_EXTEND (pos_mode, pos_rtx);
6261
25ffb1f6 6262 /* Prefer ZERO_EXTENSION, since it gives more information to
0f808b6f
JH
6263 backends. */
6264 if (rtx_cost (temp1, SET) < rtx_cost (temp, SET))
6265 temp = temp1;
6266 }
6267 pos_rtx = temp;
6268 }
8999a12e 6269 else if (pos_rtx != 0
230d793d
RS
6270 && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
6271 pos_rtx = gen_lowpart_for_combine (pos_mode, pos_rtx);
6272
8999a12e
RK
6273 /* Make POS_RTX unless we already have it and it is correct. If we don't
6274 have a POS_RTX but we do have an ORIG_POS_RTX, the latter must
0f41302f 6275 be a CONST_INT. */
8999a12e
RK
6276 if (pos_rtx == 0 && orig_pos_rtx != 0 && INTVAL (orig_pos_rtx) == pos)
6277 pos_rtx = orig_pos_rtx;
6278
6279 else if (pos_rtx == 0)
5f4f0e22 6280 pos_rtx = GEN_INT (pos);
230d793d
RS
6281
6282 /* Make the required operation. See if we can use existing rtx. */
6283 new = gen_rtx_combine (unsignedp ? ZERO_EXTRACT : SIGN_EXTRACT,
5f4f0e22 6284 extraction_mode, inner, GEN_INT (len), pos_rtx);
230d793d
RS
6285 if (! in_dest)
6286 new = gen_lowpart_for_combine (mode, new);
6287
6288 return new;
6289}
6290\f
71923da7
RK
6291/* See if X contains an ASHIFT of COUNT or more bits that can be commuted
6292 with any other operations in X. Return X without that shift if so. */
6293
6294static rtx
6295extract_left_shift (x, count)
6296 rtx x;
6297 int count;
6298{
6299 enum rtx_code code = GET_CODE (x);
6300 enum machine_mode mode = GET_MODE (x);
6301 rtx tem;
6302
6303 switch (code)
6304 {
6305 case ASHIFT:
6306 /* This is the shift itself. If it is wide enough, we will return
6307 either the value being shifted if the shift count is equal to
6308 COUNT or a shift for the difference. */
6309 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6310 && INTVAL (XEXP (x, 1)) >= count)
6311 return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (x, 0),
6312 INTVAL (XEXP (x, 1)) - count);
6313 break;
6314
6315 case NEG: case NOT:
6316 if ((tem = extract_left_shift (XEXP (x, 0), count)) != 0)
0c1c8ea6 6317 return gen_unary (code, mode, mode, tem);
71923da7
RK
6318
6319 break;
6320
6321 case PLUS: case IOR: case XOR: case AND:
6322 /* If we can safely shift this constant and we find the inner shift,
6323 make a new operation. */
6324 if (GET_CODE (XEXP (x,1)) == CONST_INT
b729186a 6325 && (INTVAL (XEXP (x, 1)) & ((((HOST_WIDE_INT) 1 << count)) - 1)) == 0
71923da7 6326 && (tem = extract_left_shift (XEXP (x, 0), count)) != 0)
663522cb 6327 return gen_binary (code, mode, tem,
71923da7
RK
6328 GEN_INT (INTVAL (XEXP (x, 1)) >> count));
6329
6330 break;
663522cb 6331
e9a25f70
JL
6332 default:
6333 break;
71923da7
RK
6334 }
6335
6336 return 0;
6337}
6338\f
230d793d
RS
6339/* Look at the expression rooted at X. Look for expressions
6340 equivalent to ZERO_EXTRACT, SIGN_EXTRACT, ZERO_EXTEND, SIGN_EXTEND.
6341 Form these expressions.
6342
6343 Return the new rtx, usually just X.
6344
6345 Also, for machines like the Vax that don't have logical shift insns,
6346 try to convert logical to arithmetic shift operations in cases where
6347 they are equivalent. This undoes the canonicalizations to logical
6348 shifts done elsewhere.
6349
6350 We try, as much as possible, to re-use rtl expressions to save memory.
6351
6352 IN_CODE says what kind of expression we are processing. Normally, it is
42495ca0
RK
6353 SET. In a memory address (inside a MEM, PLUS or minus, the latter two
6354 being kludges), it is MEM. When processing the arguments of a comparison
230d793d
RS
6355 or a COMPARE against zero, it is COMPARE. */
6356
6357static rtx
6358make_compound_operation (x, in_code)
6359 rtx x;
6360 enum rtx_code in_code;
6361{
6362 enum rtx_code code = GET_CODE (x);
6363 enum machine_mode mode = GET_MODE (x);
6364 int mode_width = GET_MODE_BITSIZE (mode);
71923da7 6365 rtx rhs, lhs;
230d793d 6366 enum rtx_code next_code;
f24ad0e4 6367 int i;
230d793d 6368 rtx new = 0;
280f58ba 6369 rtx tem;
6f7d635c 6370 const char *fmt;
230d793d
RS
6371
6372 /* Select the code to be used in recursive calls. Once we are inside an
6373 address, we stay there. If we have a comparison, set to COMPARE,
6374 but once inside, go back to our default of SET. */
6375
42495ca0 6376 next_code = (code == MEM || code == PLUS || code == MINUS ? MEM
230d793d
RS
6377 : ((code == COMPARE || GET_RTX_CLASS (code) == '<')
6378 && XEXP (x, 1) == const0_rtx) ? COMPARE
6379 : in_code == COMPARE ? SET : in_code);
6380
6381 /* Process depending on the code of this operation. If NEW is set
6382 non-zero, it will be returned. */
6383
6384 switch (code)
6385 {
6386 case ASHIFT:
230d793d
RS
6387 /* Convert shifts by constants into multiplications if inside
6388 an address. */
6389 if (in_code == MEM && GET_CODE (XEXP (x, 1)) == CONST_INT
5f4f0e22 6390 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
230d793d 6391 && INTVAL (XEXP (x, 1)) >= 0)
280f58ba
RK
6392 {
6393 new = make_compound_operation (XEXP (x, 0), next_code);
6394 new = gen_rtx_combine (MULT, mode, new,
6395 GEN_INT ((HOST_WIDE_INT) 1
6396 << INTVAL (XEXP (x, 1))));
6397 }
230d793d
RS
6398 break;
6399
6400 case AND:
6401 /* If the second operand is not a constant, we can't do anything
6402 with it. */
6403 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6404 break;
6405
6406 /* If the constant is a power of two minus one and the first operand
6407 is a logical right shift, make an extraction. */
6408 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
6409 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
280f58ba
RK
6410 {
6411 new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
6412 new = make_extraction (mode, new, 0, XEXP (XEXP (x, 0), 1), i, 1,
6413 0, in_code == COMPARE);
6414 }
dfbe1b2f 6415
230d793d
RS
6416 /* Same as previous, but for (subreg (lshiftrt ...)) in first op. */
6417 else if (GET_CODE (XEXP (x, 0)) == SUBREG
6418 && subreg_lowpart_p (XEXP (x, 0))
6419 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == LSHIFTRT
6420 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
280f58ba
RK
6421 {
6422 new = make_compound_operation (XEXP (SUBREG_REG (XEXP (x, 0)), 0),
6423 next_code);
2f99f437 6424 new = make_extraction (GET_MODE (SUBREG_REG (XEXP (x, 0))), new, 0,
280f58ba
RK
6425 XEXP (SUBREG_REG (XEXP (x, 0)), 1), i, 1,
6426 0, in_code == COMPARE);
6427 }
45620ed4 6428 /* Same as previous, but for (xor/ior (lshiftrt...) (lshiftrt...)). */
c2f9f64e
JW
6429 else if ((GET_CODE (XEXP (x, 0)) == XOR
6430 || GET_CODE (XEXP (x, 0)) == IOR)
6431 && GET_CODE (XEXP (XEXP (x, 0), 0)) == LSHIFTRT
6432 && GET_CODE (XEXP (XEXP (x, 0), 1)) == LSHIFTRT
6433 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
6434 {
6435 /* Apply the distributive law, and then try to make extractions. */
6436 new = gen_rtx_combine (GET_CODE (XEXP (x, 0)), mode,
38a448ca
RH
6437 gen_rtx_AND (mode, XEXP (XEXP (x, 0), 0),
6438 XEXP (x, 1)),
6439 gen_rtx_AND (mode, XEXP (XEXP (x, 0), 1),
6440 XEXP (x, 1)));
c2f9f64e
JW
6441 new = make_compound_operation (new, in_code);
6442 }
a7c99304
RK
6443
6444 /* If we are have (and (rotate X C) M) and C is larger than the number
6445 of bits in M, this is an extraction. */
6446
6447 else if (GET_CODE (XEXP (x, 0)) == ROTATE
6448 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6449 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0
6450 && i <= INTVAL (XEXP (XEXP (x, 0), 1)))
280f58ba
RK
6451 {
6452 new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
6453 new = make_extraction (mode, new,
6454 (GET_MODE_BITSIZE (mode)
6455 - INTVAL (XEXP (XEXP (x, 0), 1))),
6456 NULL_RTX, i, 1, 0, in_code == COMPARE);
6457 }
a7c99304
RK
6458
6459 /* On machines without logical shifts, if the operand of the AND is
230d793d
RS
6460 a logical shift and our mask turns off all the propagated sign
6461 bits, we can replace the logical shift with an arithmetic shift. */
d0ab8cd3
RK
6462 else if (ashr_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
6463 && (lshr_optab->handlers[(int) mode].insn_code
6464 == CODE_FOR_nothing)
230d793d
RS
6465 && GET_CODE (XEXP (x, 0)) == LSHIFTRT
6466 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6467 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
5f4f0e22
CH
6468 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
6469 && mode_width <= HOST_BITS_PER_WIDE_INT)
230d793d 6470 {
5f4f0e22 6471 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
230d793d
RS
6472
6473 mask >>= INTVAL (XEXP (XEXP (x, 0), 1));
6474 if ((INTVAL (XEXP (x, 1)) & ~mask) == 0)
6475 SUBST (XEXP (x, 0),
280f58ba
RK
6476 gen_rtx_combine (ASHIFTRT, mode,
6477 make_compound_operation (XEXP (XEXP (x, 0), 0),
6478 next_code),
230d793d
RS
6479 XEXP (XEXP (x, 0), 1)));
6480 }
6481
6482 /* If the constant is one less than a power of two, this might be
6483 representable by an extraction even if no shift is present.
6484 If it doesn't end up being a ZERO_EXTEND, we will ignore it unless
6485 we are in a COMPARE. */
6486 else if ((i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
280f58ba
RK
6487 new = make_extraction (mode,
6488 make_compound_operation (XEXP (x, 0),
6489 next_code),
6490 0, NULL_RTX, i, 1, 0, in_code == COMPARE);
230d793d
RS
6491
6492 /* If we are in a comparison and this is an AND with a power of two,
6493 convert this into the appropriate bit extract. */
6494 else if (in_code == COMPARE
6495 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
280f58ba
RK
6496 new = make_extraction (mode,
6497 make_compound_operation (XEXP (x, 0),
6498 next_code),
6499 i, NULL_RTX, 1, 1, 0, 1);
230d793d
RS
6500
6501 break;
6502
6503 case LSHIFTRT:
6504 /* If the sign bit is known to be zero, replace this with an
6505 arithmetic shift. */
d0ab8cd3
RK
6506 if (ashr_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing
6507 && lshr_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
5f4f0e22 6508 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 6509 && (nonzero_bits (XEXP (x, 0), mode) & (1 << (mode_width - 1))) == 0)
230d793d 6510 {
280f58ba
RK
6511 new = gen_rtx_combine (ASHIFTRT, mode,
6512 make_compound_operation (XEXP (x, 0),
6513 next_code),
6514 XEXP (x, 1));
230d793d
RS
6515 break;
6516 }
6517
0f41302f 6518 /* ... fall through ... */
230d793d
RS
6519
6520 case ASHIFTRT:
71923da7
RK
6521 lhs = XEXP (x, 0);
6522 rhs = XEXP (x, 1);
6523
230d793d
RS
6524 /* If we have (ashiftrt (ashift foo C1) C2) with C2 >= C1,
6525 this is a SIGN_EXTRACT. */
71923da7
RK
6526 if (GET_CODE (rhs) == CONST_INT
6527 && GET_CODE (lhs) == ASHIFT
6528 && GET_CODE (XEXP (lhs, 1)) == CONST_INT
6529 && INTVAL (rhs) >= INTVAL (XEXP (lhs, 1)))
280f58ba 6530 {
71923da7 6531 new = make_compound_operation (XEXP (lhs, 0), next_code);
280f58ba 6532 new = make_extraction (mode, new,
71923da7
RK
6533 INTVAL (rhs) - INTVAL (XEXP (lhs, 1)),
6534 NULL_RTX, mode_width - INTVAL (rhs),
d0ab8cd3 6535 code == LSHIFTRT, 0, in_code == COMPARE);
8231ad94 6536 break;
d0ab8cd3
RK
6537 }
6538
71923da7
RK
6539 /* See if we have operations between an ASHIFTRT and an ASHIFT.
6540 If so, try to merge the shifts into a SIGN_EXTEND. We could
6541 also do this for some cases of SIGN_EXTRACT, but it doesn't
6542 seem worth the effort; the case checked for occurs on Alpha. */
663522cb 6543
71923da7
RK
6544 if (GET_RTX_CLASS (GET_CODE (lhs)) != 'o'
6545 && ! (GET_CODE (lhs) == SUBREG
6546 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (lhs))) == 'o'))
6547 && GET_CODE (rhs) == CONST_INT
6548 && INTVAL (rhs) < HOST_BITS_PER_WIDE_INT
6549 && (new = extract_left_shift (lhs, INTVAL (rhs))) != 0)
6550 new = make_extraction (mode, make_compound_operation (new, next_code),
6551 0, NULL_RTX, mode_width - INTVAL (rhs),
6552 code == LSHIFTRT, 0, in_code == COMPARE);
663522cb 6553
230d793d 6554 break;
280f58ba
RK
6555
6556 case SUBREG:
6557 /* Call ourselves recursively on the inner expression. If we are
6558 narrowing the object and it has a different RTL code from
6559 what it originally did, do this SUBREG as a force_to_mode. */
6560
0a5cbff6 6561 tem = make_compound_operation (SUBREG_REG (x), in_code);
280f58ba
RK
6562 if (GET_CODE (tem) != GET_CODE (SUBREG_REG (x))
6563 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (tem))
6564 && subreg_lowpart_p (x))
0a5cbff6 6565 {
e8dc6d50
JH
6566 rtx newer = force_to_mode (tem, mode, ~(HOST_WIDE_INT) 0,
6567 NULL_RTX, 0);
0a5cbff6
RK
6568
6569 /* If we have something other than a SUBREG, we might have
6570 done an expansion, so rerun outselves. */
6571 if (GET_CODE (newer) != SUBREG)
6572 newer = make_compound_operation (newer, in_code);
6573
6574 return newer;
6575 }
6f28d3e9
RH
6576
6577 /* If this is a paradoxical subreg, and the new code is a sign or
6578 zero extension, omit the subreg and widen the extension. If it
6579 is a regular subreg, we can still get rid of the subreg by not
6580 widening so much, or in fact removing the extension entirely. */
6581 if ((GET_CODE (tem) == SIGN_EXTEND
6582 || GET_CODE (tem) == ZERO_EXTEND)
6583 && subreg_lowpart_p (x))
6584 {
6585 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (tem))
6586 || (GET_MODE_SIZE (mode) >
6587 GET_MODE_SIZE (GET_MODE (XEXP (tem, 0)))))
6588 tem = gen_rtx_combine (GET_CODE (tem), mode, XEXP (tem, 0));
6589 else
6590 tem = gen_lowpart_for_combine (mode, XEXP (tem, 0));
6591 return tem;
6592 }
e9a25f70 6593 break;
663522cb 6594
e9a25f70
JL
6595 default:
6596 break;
230d793d
RS
6597 }
6598
6599 if (new)
6600 {
df62f951 6601 x = gen_lowpart_for_combine (mode, new);
230d793d
RS
6602 code = GET_CODE (x);
6603 }
6604
6605 /* Now recursively process each operand of this operation. */
6606 fmt = GET_RTX_FORMAT (code);
6607 for (i = 0; i < GET_RTX_LENGTH (code); i++)
6608 if (fmt[i] == 'e')
6609 {
6610 new = make_compound_operation (XEXP (x, i), next_code);
6611 SUBST (XEXP (x, i), new);
6612 }
6613
6614 return x;
6615}
6616\f
6617/* Given M see if it is a value that would select a field of bits
663522cb
KH
6618 within an item, but not the entire word. Return -1 if not.
6619 Otherwise, return the starting position of the field, where 0 is the
6620 low-order bit.
230d793d
RS
6621
6622 *PLEN is set to the length of the field. */
6623
6624static int
6625get_pos_from_mask (m, plen)
5f4f0e22 6626 unsigned HOST_WIDE_INT m;
770ae6cc 6627 unsigned HOST_WIDE_INT *plen;
230d793d
RS
6628{
6629 /* Get the bit number of the first 1 bit from the right, -1 if none. */
663522cb 6630 int pos = exact_log2 (m & -m);
d3bc8938 6631 int len;
230d793d
RS
6632
6633 if (pos < 0)
6634 return -1;
6635
6636 /* Now shift off the low-order zero bits and see if we have a power of
6637 two minus 1. */
d3bc8938 6638 len = exact_log2 ((m >> pos) + 1);
230d793d 6639
d3bc8938 6640 if (len <= 0)
230d793d
RS
6641 return -1;
6642
d3bc8938 6643 *plen = len;
230d793d
RS
6644 return pos;
6645}
6646\f
6139ff20
RK
6647/* See if X can be simplified knowing that we will only refer to it in
6648 MODE and will only refer to those bits that are nonzero in MASK.
6649 If other bits are being computed or if masking operations are done
6650 that select a superset of the bits in MASK, they can sometimes be
6651 ignored.
6652
6653 Return a possibly simplified expression, but always convert X to
6654 MODE. If X is a CONST_INT, AND the CONST_INT with MASK.
dfbe1b2f 6655
663522cb 6656 Also, if REG is non-zero and X is a register equal in value to REG,
e3d616e3
RK
6657 replace X with REG.
6658
6659 If JUST_SELECT is nonzero, don't optimize by noticing that bits in MASK
6660 are all off in X. This is used when X will be complemented, by either
180b8e4b 6661 NOT, NEG, or XOR. */
dfbe1b2f
RK
6662
6663static rtx
e3d616e3 6664force_to_mode (x, mode, mask, reg, just_select)
dfbe1b2f
RK
6665 rtx x;
6666 enum machine_mode mode;
6139ff20 6667 unsigned HOST_WIDE_INT mask;
dfbe1b2f 6668 rtx reg;
e3d616e3 6669 int just_select;
dfbe1b2f
RK
6670{
6671 enum rtx_code code = GET_CODE (x);
180b8e4b 6672 int next_select = just_select || code == XOR || code == NOT || code == NEG;
ef026f91
RS
6673 enum machine_mode op_mode;
6674 unsigned HOST_WIDE_INT fuller_mask, nonzero;
6139ff20
RK
6675 rtx op0, op1, temp;
6676
132d2040
RK
6677 /* If this is a CALL or ASM_OPERANDS, don't do anything. Some of the
6678 code below will do the wrong thing since the mode of such an
663522cb 6679 expression is VOIDmode.
be3d27d6
CI
6680
6681 Also do nothing if X is a CLOBBER; this can happen if X was
6682 the return value from a call to gen_lowpart_for_combine. */
6683 if (code == CALL || code == ASM_OPERANDS || code == CLOBBER)
246e00f2
RK
6684 return x;
6685
6139ff20
RK
6686 /* We want to perform the operation is its present mode unless we know
6687 that the operation is valid in MODE, in which case we do the operation
6688 in MODE. */
1c75dfa4
RK
6689 op_mode = ((GET_MODE_CLASS (mode) == GET_MODE_CLASS (GET_MODE (x))
6690 && code_to_optab[(int) code] != 0
ef026f91
RS
6691 && (code_to_optab[(int) code]->handlers[(int) mode].insn_code
6692 != CODE_FOR_nothing))
6693 ? mode : GET_MODE (x));
e3d616e3 6694
aa988991
RS
6695 /* It is not valid to do a right-shift in a narrower mode
6696 than the one it came in with. */
6697 if ((code == LSHIFTRT || code == ASHIFTRT)
6698 && GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (GET_MODE (x)))
6699 op_mode = GET_MODE (x);
ef026f91
RS
6700
6701 /* Truncate MASK to fit OP_MODE. */
6702 if (op_mode)
6703 mask &= GET_MODE_MASK (op_mode);
6139ff20
RK
6704
6705 /* When we have an arithmetic operation, or a shift whose count we
6706 do not know, we need to assume that all bit the up to the highest-order
6707 bit in MASK will be needed. This is how we form such a mask. */
ef026f91
RS
6708 if (op_mode)
6709 fuller_mask = (GET_MODE_BITSIZE (op_mode) >= HOST_BITS_PER_WIDE_INT
6710 ? GET_MODE_MASK (op_mode)
729a2125
RK
6711 : (((unsigned HOST_WIDE_INT) 1 << (floor_log2 (mask) + 1))
6712 - 1));
ef026f91 6713 else
663522cb 6714 fuller_mask = ~(HOST_WIDE_INT) 0;
ef026f91
RS
6715
6716 /* Determine what bits of X are guaranteed to be (non)zero. */
6717 nonzero = nonzero_bits (x, mode);
6139ff20
RK
6718
6719 /* If none of the bits in X are needed, return a zero. */
e3d616e3 6720 if (! just_select && (nonzero & mask) == 0)
6139ff20 6721 return const0_rtx;
dfbe1b2f 6722
6139ff20
RK
6723 /* If X is a CONST_INT, return a new one. Do this here since the
6724 test below will fail. */
6725 if (GET_CODE (x) == CONST_INT)
ceb7983c
RK
6726 {
6727 HOST_WIDE_INT cval = INTVAL (x) & mask;
6728 int width = GET_MODE_BITSIZE (mode);
6729
6730 /* If MODE is narrower that HOST_WIDE_INT and CVAL is a negative
6731 number, sign extend it. */
6732 if (width > 0 && width < HOST_BITS_PER_WIDE_INT
6733 && (cval & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
6734 cval |= (HOST_WIDE_INT) -1 << width;
663522cb 6735
ceb7983c
RK
6736 return GEN_INT (cval);
6737 }
dfbe1b2f 6738
180b8e4b
RK
6739 /* If X is narrower than MODE and we want all the bits in X's mode, just
6740 get X in the proper mode. */
6741 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode)
663522cb 6742 && (GET_MODE_MASK (GET_MODE (x)) & ~mask) == 0)
dfbe1b2f
RK
6743 return gen_lowpart_for_combine (mode, x);
6744
71923da7
RK
6745 /* If we aren't changing the mode, X is not a SUBREG, and all zero bits in
6746 MASK are already known to be zero in X, we need not do anything. */
663522cb 6747 if (GET_MODE (x) == mode && code != SUBREG && (~mask & nonzero) == 0)
6139ff20
RK
6748 return x;
6749
dfbe1b2f
RK
6750 switch (code)
6751 {
6139ff20
RK
6752 case CLOBBER:
6753 /* If X is a (clobber (const_int)), return it since we know we are
0f41302f 6754 generating something that won't match. */
6139ff20
RK
6755 return x;
6756
6139ff20
RK
6757 case USE:
6758 /* X is a (use (mem ..)) that was made from a bit-field extraction that
6759 spanned the boundary of the MEM. If we are now masking so it is
6760 within that boundary, we don't need the USE any more. */
f76b9db2 6761 if (! BITS_BIG_ENDIAN
663522cb 6762 && (mask & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
e3d616e3 6763 return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
f76b9db2 6764 break;
6139ff20 6765
dfbe1b2f
RK
6766 case SIGN_EXTEND:
6767 case ZERO_EXTEND:
6768 case ZERO_EXTRACT:
6769 case SIGN_EXTRACT:
6770 x = expand_compound_operation (x);
6771 if (GET_CODE (x) != code)
e3d616e3 6772 return force_to_mode (x, mode, mask, reg, next_select);
dfbe1b2f
RK
6773 break;
6774
6775 case REG:
6776 if (reg != 0 && (rtx_equal_p (get_last_value (reg), x)
6777 || rtx_equal_p (reg, get_last_value (x))))
6778 x = reg;
6779 break;
6780
dfbe1b2f 6781 case SUBREG:
6139ff20 6782 if (subreg_lowpart_p (x)
180b8e4b
RK
6783 /* We can ignore the effect of this SUBREG if it narrows the mode or
6784 if the constant masks to zero all the bits the mode doesn't
6785 have. */
6139ff20
RK
6786 && ((GET_MODE_SIZE (GET_MODE (x))
6787 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
6139ff20
RK
6788 || (0 == (mask
6789 & GET_MODE_MASK (GET_MODE (x))
663522cb 6790 & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x)))))))
e3d616e3 6791 return force_to_mode (SUBREG_REG (x), mode, mask, reg, next_select);
dfbe1b2f
RK
6792 break;
6793
6794 case AND:
6139ff20
RK
6795 /* If this is an AND with a constant, convert it into an AND
6796 whose constant is the AND of that constant with MASK. If it
6797 remains an AND of MASK, delete it since it is redundant. */
dfbe1b2f 6798
2ca9ae17 6799 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
dfbe1b2f 6800 {
6139ff20
RK
6801 x = simplify_and_const_int (x, op_mode, XEXP (x, 0),
6802 mask & INTVAL (XEXP (x, 1)));
dfbe1b2f
RK
6803
6804 /* If X is still an AND, see if it is an AND with a mask that
71923da7
RK
6805 is just some low-order bits. If so, and it is MASK, we don't
6806 need it. */
dfbe1b2f
RK
6807
6808 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
e51712db 6809 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (x, 1)) == mask)
dfbe1b2f 6810 x = XEXP (x, 0);
d0ab8cd3 6811
71923da7
RK
6812 /* If it remains an AND, try making another AND with the bits
6813 in the mode mask that aren't in MASK turned on. If the
6814 constant in the AND is wide enough, this might make a
6815 cheaper constant. */
6816
6817 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
2ca9ae17
JW
6818 && GET_MODE_MASK (GET_MODE (x)) != mask
6819 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
71923da7
RK
6820 {
6821 HOST_WIDE_INT cval = (INTVAL (XEXP (x, 1))
663522cb 6822 | (GET_MODE_MASK (GET_MODE (x)) & ~mask));
71923da7
RK
6823 int width = GET_MODE_BITSIZE (GET_MODE (x));
6824 rtx y;
6825
6826 /* If MODE is narrower that HOST_WIDE_INT and CVAL is a negative
6827 number, sign extend it. */
6828 if (width > 0 && width < HOST_BITS_PER_WIDE_INT
6829 && (cval & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
6830 cval |= (HOST_WIDE_INT) -1 << width;
6831
6832 y = gen_binary (AND, GET_MODE (x), XEXP (x, 0), GEN_INT (cval));
6833 if (rtx_cost (y, SET) < rtx_cost (x, SET))
6834 x = y;
6835 }
6836
d0ab8cd3 6837 break;
dfbe1b2f
RK
6838 }
6839
6139ff20 6840 goto binop;
dfbe1b2f
RK
6841
6842 case PLUS:
6139ff20
RK
6843 /* In (and (plus FOO C1) M), if M is a mask that just turns off
6844 low-order bits (as in an alignment operation) and FOO is already
6845 aligned to that boundary, mask C1 to that boundary as well.
6846 This may eliminate that PLUS and, later, the AND. */
9fa6d012
TG
6847
6848 {
770ae6cc 6849 unsigned int width = GET_MODE_BITSIZE (mode);
9fa6d012
TG
6850 unsigned HOST_WIDE_INT smask = mask;
6851
6852 /* If MODE is narrower than HOST_WIDE_INT and mask is a negative
6853 number, sign extend it. */
6854
6855 if (width < HOST_BITS_PER_WIDE_INT
6856 && (smask & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
6857 smask |= (HOST_WIDE_INT) -1 << width;
6858
6859 if (GET_CODE (XEXP (x, 1)) == CONST_INT
0e9ff885
DM
6860 && exact_log2 (- smask) >= 0)
6861 {
6862#ifdef STACK_BIAS
6863 if (STACK_BIAS
6864 && (XEXP (x, 0) == stack_pointer_rtx
6865 || XEXP (x, 0) == frame_pointer_rtx))
6866 {
663522cb
KH
6867 int sp_alignment = STACK_BOUNDARY / BITS_PER_UNIT;
6868 unsigned HOST_WIDE_INT sp_mask = GET_MODE_MASK (mode);
6869
6870 sp_mask &= ~(sp_alignment - 1);
6871 if ((sp_mask & ~smask) == 0
6872 && ((INTVAL (XEXP (x, 1)) - STACK_BIAS) & ~smask) != 0)
0e9ff885 6873 return force_to_mode (plus_constant (XEXP (x, 0),
663522cb 6874 ((INTVAL (XEXP (x, 1)) -
835c8e04 6875 STACK_BIAS) & smask)
0e9ff885 6876 + STACK_BIAS),
663522cb
KH
6877 mode, smask, reg, next_select);
6878 }
0e9ff885 6879#endif
663522cb
KH
6880 if ((nonzero_bits (XEXP (x, 0), mode) & ~smask) == 0
6881 && (INTVAL (XEXP (x, 1)) & ~smask) != 0)
0e9ff885 6882 return force_to_mode (plus_constant (XEXP (x, 0),
663522cb 6883 (INTVAL (XEXP (x, 1))
835c8e04
DT
6884 & smask)),
6885 mode, smask, reg, next_select);
0e9ff885 6886 }
9fa6d012 6887 }
6139ff20 6888
0f41302f 6889 /* ... fall through ... */
6139ff20 6890
dfbe1b2f 6891 case MULT:
6139ff20
RK
6892 /* For PLUS, MINUS and MULT, we need any bits less significant than the
6893 most significant bit in MASK since carries from those bits will
6894 affect the bits we are interested in. */
6895 mask = fuller_mask;
6896 goto binop;
6897
d41638e4
RH
6898 case MINUS:
6899 /* If X is (minus C Y) where C's least set bit is larger than any bit
6900 in the mask, then we may replace with (neg Y). */
6901 if (GET_CODE (XEXP (x, 0)) == CONST_INT
0345195a
RK
6902 && (((unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 0))
6903 & -INTVAL (XEXP (x, 0))))
6904 > mask))
d41638e4
RH
6905 {
6906 x = gen_unary (NEG, GET_MODE (x), GET_MODE (x), XEXP (x, 1));
6907 return force_to_mode (x, mode, mask, reg, next_select);
6908 }
6909
6910 /* Similarly, if C contains every bit in the mask, then we may
6911 replace with (not Y). */
6912 if (GET_CODE (XEXP (x, 0)) == CONST_INT
0345195a
RK
6913 && ((INTVAL (XEXP (x, 0)) | (HOST_WIDE_INT) mask)
6914 == INTVAL (XEXP (x, 0))))
d41638e4
RH
6915 {
6916 x = gen_unary (NOT, GET_MODE (x), GET_MODE (x), XEXP (x, 1));
6917 return force_to_mode (x, mode, mask, reg, next_select);
6918 }
6919
6920 mask = fuller_mask;
6921 goto binop;
6922
dfbe1b2f
RK
6923 case IOR:
6924 case XOR:
6139ff20
RK
6925 /* If X is (ior (lshiftrt FOO C1) C2), try to commute the IOR and
6926 LSHIFTRT so we end up with an (and (lshiftrt (ior ...) ...) ...)
6927 operation which may be a bitfield extraction. Ensure that the
6928 constant we form is not wider than the mode of X. */
6929
6930 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
6931 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6932 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
6933 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
6934 && GET_CODE (XEXP (x, 1)) == CONST_INT
6935 && ((INTVAL (XEXP (XEXP (x, 0), 1))
6936 + floor_log2 (INTVAL (XEXP (x, 1))))
6937 < GET_MODE_BITSIZE (GET_MODE (x)))
6938 && (INTVAL (XEXP (x, 1))
663522cb 6939 & ~nonzero_bits (XEXP (x, 0), GET_MODE (x))) == 0)
6139ff20
RK
6940 {
6941 temp = GEN_INT ((INTVAL (XEXP (x, 1)) & mask)
663522cb 6942 << INTVAL (XEXP (XEXP (x, 0), 1)));
6139ff20
RK
6943 temp = gen_binary (GET_CODE (x), GET_MODE (x),
6944 XEXP (XEXP (x, 0), 0), temp);
d4d2b13f
RK
6945 x = gen_binary (LSHIFTRT, GET_MODE (x), temp,
6946 XEXP (XEXP (x, 0), 1));
e3d616e3 6947 return force_to_mode (x, mode, mask, reg, next_select);
6139ff20
RK
6948 }
6949
6950 binop:
dfbe1b2f 6951 /* For most binary operations, just propagate into the operation and
6139ff20
RK
6952 change the mode if we have an operation of that mode. */
6953
e3d616e3
RK
6954 op0 = gen_lowpart_for_combine (op_mode,
6955 force_to_mode (XEXP (x, 0), mode, mask,
6956 reg, next_select));
6957 op1 = gen_lowpart_for_combine (op_mode,
6958 force_to_mode (XEXP (x, 1), mode, mask,
6959 reg, next_select));
6139ff20 6960
2dd484ed
RK
6961 /* If OP1 is a CONST_INT and X is an IOR or XOR, clear bits outside
6962 MASK since OP1 might have been sign-extended but we never want
6963 to turn on extra bits, since combine might have previously relied
6964 on them being off. */
6965 if (GET_CODE (op1) == CONST_INT && (code == IOR || code == XOR)
6966 && (INTVAL (op1) & mask) != 0)
6967 op1 = GEN_INT (INTVAL (op1) & mask);
663522cb 6968
6139ff20
RK
6969 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
6970 x = gen_binary (code, op_mode, op0, op1);
d0ab8cd3 6971 break;
dfbe1b2f
RK
6972
6973 case ASHIFT:
dfbe1b2f 6974 /* For left shifts, do the same, but just for the first operand.
f6785026
RK
6975 However, we cannot do anything with shifts where we cannot
6976 guarantee that the counts are smaller than the size of the mode
6977 because such a count will have a different meaning in a
6139ff20 6978 wider mode. */
f6785026
RK
6979
6980 if (! (GET_CODE (XEXP (x, 1)) == CONST_INT
6139ff20 6981 && INTVAL (XEXP (x, 1)) >= 0
f6785026
RK
6982 && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (mode))
6983 && ! (GET_MODE (XEXP (x, 1)) != VOIDmode
6984 && (nonzero_bits (XEXP (x, 1), GET_MODE (XEXP (x, 1)))
adb7a1cb 6985 < (unsigned HOST_WIDE_INT) GET_MODE_BITSIZE (mode))))
f6785026 6986 break;
663522cb 6987
6139ff20
RK
6988 /* If the shift count is a constant and we can do arithmetic in
6989 the mode of the shift, refine which bits we need. Otherwise, use the
6990 conservative form of the mask. */
6991 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6992 && INTVAL (XEXP (x, 1)) >= 0
6993 && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (op_mode)
6994 && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
6995 mask >>= INTVAL (XEXP (x, 1));
6996 else
6997 mask = fuller_mask;
6998
6999 op0 = gen_lowpart_for_combine (op_mode,
7000 force_to_mode (XEXP (x, 0), op_mode,
e3d616e3 7001 mask, reg, next_select));
6139ff20
RK
7002
7003 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
663522cb 7004 x = gen_binary (code, op_mode, op0, XEXP (x, 1));
d0ab8cd3 7005 break;
dfbe1b2f
RK
7006
7007 case LSHIFTRT:
1347292b
JW
7008 /* Here we can only do something if the shift count is a constant,
7009 this shift constant is valid for the host, and we can do arithmetic
7010 in OP_MODE. */
dfbe1b2f
RK
7011
7012 if (GET_CODE (XEXP (x, 1)) == CONST_INT
1347292b 7013 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
6139ff20 7014 && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
d0ab8cd3 7015 {
6139ff20 7016 rtx inner = XEXP (x, 0);
402b6c2a 7017 unsigned HOST_WIDE_INT inner_mask;
6139ff20
RK
7018
7019 /* Select the mask of the bits we need for the shift operand. */
402b6c2a 7020 inner_mask = mask << INTVAL (XEXP (x, 1));
d0ab8cd3 7021
6139ff20 7022 /* We can only change the mode of the shift if we can do arithmetic
402b6c2a
JW
7023 in the mode of the shift and INNER_MASK is no wider than the
7024 width of OP_MODE. */
6139ff20 7025 if (GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT
663522cb 7026 || (inner_mask & ~GET_MODE_MASK (op_mode)) != 0)
d0ab8cd3
RK
7027 op_mode = GET_MODE (x);
7028
402b6c2a 7029 inner = force_to_mode (inner, op_mode, inner_mask, reg, next_select);
6139ff20
RK
7030
7031 if (GET_MODE (x) != op_mode || inner != XEXP (x, 0))
7032 x = gen_binary (LSHIFTRT, op_mode, inner, XEXP (x, 1));
d0ab8cd3 7033 }
6139ff20
RK
7034
7035 /* If we have (and (lshiftrt FOO C1) C2) where the combination of the
7036 shift and AND produces only copies of the sign bit (C2 is one less
7037 than a power of two), we can do this with just a shift. */
7038
7039 if (GET_CODE (x) == LSHIFTRT
7040 && GET_CODE (XEXP (x, 1)) == CONST_INT
cfff35c1
JW
7041 /* The shift puts one of the sign bit copies in the least significant
7042 bit. */
6139ff20
RK
7043 && ((INTVAL (XEXP (x, 1))
7044 + num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0))))
7045 >= GET_MODE_BITSIZE (GET_MODE (x)))
7046 && exact_log2 (mask + 1) >= 0
cfff35c1
JW
7047 /* Number of bits left after the shift must be more than the mask
7048 needs. */
7049 && ((INTVAL (XEXP (x, 1)) + exact_log2 (mask + 1))
7050 <= GET_MODE_BITSIZE (GET_MODE (x)))
7051 /* Must be more sign bit copies than the mask needs. */
770ae6cc 7052 && ((int) num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
6139ff20
RK
7053 >= exact_log2 (mask + 1)))
7054 x = gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0),
7055 GEN_INT (GET_MODE_BITSIZE (GET_MODE (x))
7056 - exact_log2 (mask + 1)));
fae2db47
JW
7057
7058 goto shiftrt;
d0ab8cd3
RK
7059
7060 case ASHIFTRT:
6139ff20
RK
7061 /* If we are just looking for the sign bit, we don't need this shift at
7062 all, even if it has a variable count. */
9bf22b75 7063 if (GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
e51712db 7064 && (mask == ((unsigned HOST_WIDE_INT) 1
9bf22b75 7065 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
e3d616e3 7066 return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
6139ff20
RK
7067
7068 /* If this is a shift by a constant, get a mask that contains those bits
7069 that are not copies of the sign bit. We then have two cases: If
7070 MASK only includes those bits, this can be a logical shift, which may
7071 allow simplifications. If MASK is a single-bit field not within
7072 those bits, we are requesting a copy of the sign bit and hence can
7073 shift the sign bit to the appropriate location. */
7074
7075 if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) >= 0
7076 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
7077 {
7078 int i = -1;
7079
b69960ac
RK
7080 /* If the considered data is wider then HOST_WIDE_INT, we can't
7081 represent a mask for all its bits in a single scalar.
7082 But we only care about the lower bits, so calculate these. */
7083
6a11342f 7084 if (GET_MODE_BITSIZE (GET_MODE (x)) > HOST_BITS_PER_WIDE_INT)
b69960ac 7085 {
663522cb 7086 nonzero = ~(HOST_WIDE_INT) 0;
b69960ac
RK
7087
7088 /* GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1))
7089 is the number of bits a full-width mask would have set.
7090 We need only shift if these are fewer than nonzero can
7091 hold. If not, we must keep all bits set in nonzero. */
7092
7093 if (GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1))
7094 < HOST_BITS_PER_WIDE_INT)
7095 nonzero >>= INTVAL (XEXP (x, 1))
7096 + HOST_BITS_PER_WIDE_INT
7097 - GET_MODE_BITSIZE (GET_MODE (x)) ;
7098 }
7099 else
7100 {
7101 nonzero = GET_MODE_MASK (GET_MODE (x));
7102 nonzero >>= INTVAL (XEXP (x, 1));
7103 }
6139ff20 7104
663522cb 7105 if ((mask & ~nonzero) == 0
6139ff20
RK
7106 || (i = exact_log2 (mask)) >= 0)
7107 {
7108 x = simplify_shift_const
7109 (x, LSHIFTRT, GET_MODE (x), XEXP (x, 0),
7110 i < 0 ? INTVAL (XEXP (x, 1))
7111 : GET_MODE_BITSIZE (GET_MODE (x)) - 1 - i);
7112
7113 if (GET_CODE (x) != ASHIFTRT)
e3d616e3 7114 return force_to_mode (x, mode, mask, reg, next_select);
6139ff20
RK
7115 }
7116 }
7117
7118 /* If MASK is 1, convert this to a LSHIFTRT. This can be done
7119 even if the shift count isn't a constant. */
7120 if (mask == 1)
7121 x = gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0), XEXP (x, 1));
7122
fae2db47
JW
7123 shiftrt:
7124
7125 /* If this is a zero- or sign-extension operation that just affects bits
4c002f29
RK
7126 we don't care about, remove it. Be sure the call above returned
7127 something that is still a shift. */
d0ab8cd3 7128
4c002f29
RK
7129 if ((GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ASHIFTRT)
7130 && GET_CODE (XEXP (x, 1)) == CONST_INT
d0ab8cd3 7131 && INTVAL (XEXP (x, 1)) >= 0
6139ff20
RK
7132 && (INTVAL (XEXP (x, 1))
7133 <= GET_MODE_BITSIZE (GET_MODE (x)) - (floor_log2 (mask) + 1))
d0ab8cd3
RK
7134 && GET_CODE (XEXP (x, 0)) == ASHIFT
7135 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
7136 && INTVAL (XEXP (XEXP (x, 0), 1)) == INTVAL (XEXP (x, 1)))
e3d616e3
RK
7137 return force_to_mode (XEXP (XEXP (x, 0), 0), mode, mask,
7138 reg, next_select);
6139ff20 7139
dfbe1b2f
RK
7140 break;
7141
6139ff20
RK
7142 case ROTATE:
7143 case ROTATERT:
7144 /* If the shift count is constant and we can do computations
7145 in the mode of X, compute where the bits we care about are.
7146 Otherwise, we can't do anything. Don't change the mode of
7147 the shift or propagate MODE into the shift, though. */
7148 if (GET_CODE (XEXP (x, 1)) == CONST_INT
7149 && INTVAL (XEXP (x, 1)) >= 0)
7150 {
7151 temp = simplify_binary_operation (code == ROTATE ? ROTATERT : ROTATE,
7152 GET_MODE (x), GEN_INT (mask),
7153 XEXP (x, 1));
7d171a1e 7154 if (temp && GET_CODE(temp) == CONST_INT)
6139ff20
RK
7155 SUBST (XEXP (x, 0),
7156 force_to_mode (XEXP (x, 0), GET_MODE (x),
e3d616e3 7157 INTVAL (temp), reg, next_select));
6139ff20
RK
7158 }
7159 break;
663522cb 7160
dfbe1b2f 7161 case NEG:
180b8e4b
RK
7162 /* If we just want the low-order bit, the NEG isn't needed since it
7163 won't change the low-order bit. */
7164 if (mask == 1)
7165 return force_to_mode (XEXP (x, 0), mode, mask, reg, just_select);
7166
6139ff20
RK
7167 /* We need any bits less significant than the most significant bit in
7168 MASK since carries from those bits will affect the bits we are
7169 interested in. */
7170 mask = fuller_mask;
7171 goto unop;
7172
dfbe1b2f 7173 case NOT:
6139ff20
RK
7174 /* (not FOO) is (xor FOO CONST), so if FOO is an LSHIFTRT, we can do the
7175 same as the XOR case above. Ensure that the constant we form is not
7176 wider than the mode of X. */
7177
7178 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
7179 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
7180 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
7181 && (INTVAL (XEXP (XEXP (x, 0), 1)) + floor_log2 (mask)
7182 < GET_MODE_BITSIZE (GET_MODE (x)))
7183 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT)
7184 {
7185 temp = GEN_INT (mask << INTVAL (XEXP (XEXP (x, 0), 1)));
7186 temp = gen_binary (XOR, GET_MODE (x), XEXP (XEXP (x, 0), 0), temp);
7187 x = gen_binary (LSHIFTRT, GET_MODE (x), temp, XEXP (XEXP (x, 0), 1));
7188
e3d616e3 7189 return force_to_mode (x, mode, mask, reg, next_select);
6139ff20
RK
7190 }
7191
f82da7d2
JW
7192 /* (and (not FOO) CONST) is (not (or FOO (not CONST))), so we must
7193 use the full mask inside the NOT. */
7194 mask = fuller_mask;
7195
6139ff20 7196 unop:
e3d616e3
RK
7197 op0 = gen_lowpart_for_combine (op_mode,
7198 force_to_mode (XEXP (x, 0), mode, mask,
7199 reg, next_select));
6139ff20 7200 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
0c1c8ea6 7201 x = gen_unary (code, op_mode, op_mode, op0);
6139ff20
RK
7202 break;
7203
7204 case NE:
7205 /* (and (ne FOO 0) CONST) can be (and FOO CONST) if CONST is included
3aceff0d 7206 in STORE_FLAG_VALUE and FOO has a single bit that might be nonzero,
1a6ec070 7207 which is equal to STORE_FLAG_VALUE. */
663522cb 7208 if ((mask & ~STORE_FLAG_VALUE) == 0 && XEXP (x, 1) == const0_rtx
3aceff0d 7209 && exact_log2 (nonzero_bits (XEXP (x, 0), mode)) >= 0
1a6ec070 7210 && nonzero_bits (XEXP (x, 0), mode) == STORE_FLAG_VALUE)
e3d616e3 7211 return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
6139ff20 7212
d0ab8cd3
RK
7213 break;
7214
7215 case IF_THEN_ELSE:
7216 /* We have no way of knowing if the IF_THEN_ELSE can itself be
7217 written in a narrower mode. We play it safe and do not do so. */
7218
7219 SUBST (XEXP (x, 1),
7220 gen_lowpart_for_combine (GET_MODE (x),
7221 force_to_mode (XEXP (x, 1), mode,
e3d616e3 7222 mask, reg, next_select)));
d0ab8cd3
RK
7223 SUBST (XEXP (x, 2),
7224 gen_lowpart_for_combine (GET_MODE (x),
7225 force_to_mode (XEXP (x, 2), mode,
e3d616e3 7226 mask, reg,next_select)));
d0ab8cd3 7227 break;
663522cb 7228
e9a25f70
JL
7229 default:
7230 break;
dfbe1b2f
RK
7231 }
7232
d0ab8cd3 7233 /* Ensure we return a value of the proper mode. */
dfbe1b2f
RK
7234 return gen_lowpart_for_combine (mode, x);
7235}
7236\f
abe6e52f
RK
7237/* Return nonzero if X is an expression that has one of two values depending on
7238 whether some other value is zero or nonzero. In that case, we return the
7239 value that is being tested, *PTRUE is set to the value if the rtx being
7240 returned has a nonzero value, and *PFALSE is set to the other alternative.
7241
7242 If we return zero, we set *PTRUE and *PFALSE to X. */
7243
7244static rtx
7245if_then_else_cond (x, ptrue, pfalse)
7246 rtx x;
7247 rtx *ptrue, *pfalse;
7248{
7249 enum machine_mode mode = GET_MODE (x);
7250 enum rtx_code code = GET_CODE (x);
abe6e52f
RK
7251 rtx cond0, cond1, true0, true1, false0, false1;
7252 unsigned HOST_WIDE_INT nz;
7253
14a774a9
RK
7254 /* If we are comparing a value against zero, we are done. */
7255 if ((code == NE || code == EQ)
7256 && GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) == 0)
7257 {
e8758a3a
JL
7258 *ptrue = (code == NE) ? const_true_rtx : const0_rtx;
7259 *pfalse = (code == NE) ? const0_rtx : const_true_rtx;
14a774a9
RK
7260 return XEXP (x, 0);
7261 }
7262
abe6e52f
RK
7263 /* If this is a unary operation whose operand has one of two values, apply
7264 our opcode to compute those values. */
14a774a9
RK
7265 else if (GET_RTX_CLASS (code) == '1'
7266 && (cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0)) != 0)
abe6e52f 7267 {
0c1c8ea6
RK
7268 *ptrue = gen_unary (code, mode, GET_MODE (XEXP (x, 0)), true0);
7269 *pfalse = gen_unary (code, mode, GET_MODE (XEXP (x, 0)), false0);
abe6e52f
RK
7270 return cond0;
7271 }
7272
3a19aabc 7273 /* If this is a COMPARE, do nothing, since the IF_THEN_ELSE we would
ddd5a7c1 7274 make can't possibly match and would suppress other optimizations. */
3a19aabc
RK
7275 else if (code == COMPARE)
7276 ;
7277
abe6e52f
RK
7278 /* If this is a binary operation, see if either side has only one of two
7279 values. If either one does or if both do and they are conditional on
7280 the same value, compute the new true and false values. */
7281 else if (GET_RTX_CLASS (code) == 'c' || GET_RTX_CLASS (code) == '2'
7282 || GET_RTX_CLASS (code) == '<')
7283 {
7284 cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0);
7285 cond1 = if_then_else_cond (XEXP (x, 1), &true1, &false1);
7286
7287 if ((cond0 != 0 || cond1 != 0)
7288 && ! (cond0 != 0 && cond1 != 0 && ! rtx_equal_p (cond0, cond1)))
7289 {
987e845a
JW
7290 /* If if_then_else_cond returned zero, then true/false are the
7291 same rtl. We must copy one of them to prevent invalid rtl
7292 sharing. */
7293 if (cond0 == 0)
7294 true0 = copy_rtx (true0);
7295 else if (cond1 == 0)
7296 true1 = copy_rtx (true1);
7297
abe6e52f
RK
7298 *ptrue = gen_binary (code, mode, true0, true1);
7299 *pfalse = gen_binary (code, mode, false0, false1);
7300 return cond0 ? cond0 : cond1;
7301 }
9210df58 7302
9210df58 7303 /* See if we have PLUS, IOR, XOR, MINUS or UMAX, where one of the
0802d516
RK
7304 operands is zero when the other is non-zero, and vice-versa,
7305 and STORE_FLAG_VALUE is 1 or -1. */
9210df58 7306
0802d516
RK
7307 if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
7308 && (code == PLUS || code == IOR || code == XOR || code == MINUS
663522cb 7309 || code == UMAX)
9210df58
RK
7310 && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
7311 {
7312 rtx op0 = XEXP (XEXP (x, 0), 1);
7313 rtx op1 = XEXP (XEXP (x, 1), 1);
7314
7315 cond0 = XEXP (XEXP (x, 0), 0);
7316 cond1 = XEXP (XEXP (x, 1), 0);
7317
7318 if (GET_RTX_CLASS (GET_CODE (cond0)) == '<'
7319 && GET_RTX_CLASS (GET_CODE (cond1)) == '<'
9a915772 7320 && ((GET_CODE (cond0) == combine_reversed_comparison_code (cond1)
9210df58
RK
7321 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
7322 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
7323 || ((swap_condition (GET_CODE (cond0))
9a915772 7324 == combine_reversed_comparison_code (cond1))
9210df58
RK
7325 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
7326 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
7327 && ! side_effects_p (x))
7328 {
7329 *ptrue = gen_binary (MULT, mode, op0, const_true_rtx);
663522cb
KH
7330 *pfalse = gen_binary (MULT, mode,
7331 (code == MINUS
0c1c8ea6 7332 ? gen_unary (NEG, mode, mode, op1) : op1),
9210df58
RK
7333 const_true_rtx);
7334 return cond0;
7335 }
7336 }
7337
7338 /* Similarly for MULT, AND and UMIN, execpt that for these the result
7339 is always zero. */
0802d516
RK
7340 if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
7341 && (code == MULT || code == AND || code == UMIN)
9210df58
RK
7342 && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
7343 {
7344 cond0 = XEXP (XEXP (x, 0), 0);
7345 cond1 = XEXP (XEXP (x, 1), 0);
7346
7347 if (GET_RTX_CLASS (GET_CODE (cond0)) == '<'
7348 && GET_RTX_CLASS (GET_CODE (cond1)) == '<'
9a915772 7349 && ((GET_CODE (cond0) == combine_reversed_comparison_code (cond1)
9210df58
RK
7350 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
7351 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
7352 || ((swap_condition (GET_CODE (cond0))
9a915772 7353 == combine_reversed_comparison_code (cond1))
9210df58
RK
7354 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
7355 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
7356 && ! side_effects_p (x))
7357 {
7358 *ptrue = *pfalse = const0_rtx;
7359 return cond0;
7360 }
7361 }
abe6e52f
RK
7362 }
7363
7364 else if (code == IF_THEN_ELSE)
7365 {
7366 /* If we have IF_THEN_ELSE already, extract the condition and
7367 canonicalize it if it is NE or EQ. */
7368 cond0 = XEXP (x, 0);
7369 *ptrue = XEXP (x, 1), *pfalse = XEXP (x, 2);
7370 if (GET_CODE (cond0) == NE && XEXP (cond0, 1) == const0_rtx)
7371 return XEXP (cond0, 0);
7372 else if (GET_CODE (cond0) == EQ && XEXP (cond0, 1) == const0_rtx)
7373 {
7374 *ptrue = XEXP (x, 2), *pfalse = XEXP (x, 1);
7375 return XEXP (cond0, 0);
7376 }
7377 else
7378 return cond0;
7379 }
7380
7381 /* If X is a normal SUBREG with both inner and outer modes integral,
7382 we can narrow both the true and false values of the inner expression,
7383 if there is a condition. */
7384 else if (code == SUBREG && GET_MODE_CLASS (mode) == MODE_INT
7385 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
7386 && GET_MODE_SIZE (mode) <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
7387 && 0 != (cond0 = if_then_else_cond (SUBREG_REG (x),
7388 &true0, &false0)))
7389 {
668bcf76
JL
7390 if ((GET_CODE (SUBREG_REG (x)) == REG
7391 || GET_CODE (SUBREG_REG (x)) == MEM
7392 || CONSTANT_P (SUBREG_REG (x)))
7393 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) > UNITS_PER_WORD
54f3b5c2
R
7394 && (WORDS_BIG_ENDIAN || SUBREG_WORD (x) != 0))
7395 {
80c8b1aa
DD
7396 true0 = operand_subword (true0, SUBREG_WORD (x), 0,
7397 GET_MODE (SUBREG_REG (x)));
7398 false0 = operand_subword (false0, SUBREG_WORD (x), 0,
7399 GET_MODE (SUBREG_REG (x)));
54f3b5c2 7400 }
49219895 7401 *ptrue = force_to_mode (true0, mode, ~(HOST_WIDE_INT) 0, NULL_RTX, 0);
00244e6b 7402 *pfalse
49219895 7403 = force_to_mode (false0, mode, ~(HOST_WIDE_INT) 0, NULL_RTX, 0);
abe6e52f 7404
abe6e52f
RK
7405 return cond0;
7406 }
7407
7408 /* If X is a constant, this isn't special and will cause confusions
7409 if we treat it as such. Likewise if it is equivalent to a constant. */
7410 else if (CONSTANT_P (x)
7411 || ((cond0 = get_last_value (x)) != 0 && CONSTANT_P (cond0)))
7412 ;
7413
1f3f36d1
RH
7414 /* If we're in BImode, canonicalize on 0 and STORE_FLAG_VALUE, as that
7415 will be least confusing to the rest of the compiler. */
7416 else if (mode == BImode)
7417 {
7418 *ptrue = GEN_INT (STORE_FLAG_VALUE), *pfalse = const0_rtx;
7419 return x;
7420 }
7421
663522cb 7422 /* If X is known to be either 0 or -1, those are the true and
abe6e52f 7423 false values when testing X. */
49219895
JH
7424 else if (x == constm1_rtx || x == const0_rtx
7425 || (mode != VOIDmode
7426 && num_sign_bit_copies (x, mode) == GET_MODE_BITSIZE (mode)))
abe6e52f
RK
7427 {
7428 *ptrue = constm1_rtx, *pfalse = const0_rtx;
7429 return x;
7430 }
7431
7432 /* Likewise for 0 or a single bit. */
49219895
JH
7433 else if (mode != VOIDmode
7434 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7435 && exact_log2 (nz = nonzero_bits (x, mode)) >= 0)
abe6e52f
RK
7436 {
7437 *ptrue = GEN_INT (nz), *pfalse = const0_rtx;
7438 return x;
7439 }
7440
7441 /* Otherwise fail; show no condition with true and false values the same. */
7442 *ptrue = *pfalse = x;
7443 return 0;
7444}
7445\f
1a26b032
RK
7446/* Return the value of expression X given the fact that condition COND
7447 is known to be true when applied to REG as its first operand and VAL
7448 as its second. X is known to not be shared and so can be modified in
7449 place.
7450
7451 We only handle the simplest cases, and specifically those cases that
7452 arise with IF_THEN_ELSE expressions. */
7453
7454static rtx
7455known_cond (x, cond, reg, val)
7456 rtx x;
7457 enum rtx_code cond;
7458 rtx reg, val;
7459{
7460 enum rtx_code code = GET_CODE (x);
f24ad0e4 7461 rtx temp;
6f7d635c 7462 const char *fmt;
1a26b032
RK
7463 int i, j;
7464
7465 if (side_effects_p (x))
7466 return x;
7467
69bc0a1f
JH
7468 if (cond == EQ && rtx_equal_p (x, reg) && !FLOAT_MODE_P (cond))
7469 return val;
7470 if (cond == UNEQ && rtx_equal_p (x, reg))
1a26b032
RK
7471 return val;
7472
7473 /* If X is (abs REG) and we know something about REG's relationship
7474 with zero, we may be able to simplify this. */
7475
7476 if (code == ABS && rtx_equal_p (XEXP (x, 0), reg) && val == const0_rtx)
7477 switch (cond)
7478 {
7479 case GE: case GT: case EQ:
7480 return XEXP (x, 0);
7481 case LT: case LE:
0c1c8ea6
RK
7482 return gen_unary (NEG, GET_MODE (XEXP (x, 0)), GET_MODE (XEXP (x, 0)),
7483 XEXP (x, 0));
e9a25f70
JL
7484 default:
7485 break;
1a26b032
RK
7486 }
7487
7488 /* The only other cases we handle are MIN, MAX, and comparisons if the
7489 operands are the same as REG and VAL. */
7490
7491 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
7492 {
7493 if (rtx_equal_p (XEXP (x, 0), val))
7494 cond = swap_condition (cond), temp = val, val = reg, reg = temp;
7495
7496 if (rtx_equal_p (XEXP (x, 0), reg) && rtx_equal_p (XEXP (x, 1), val))
7497 {
7498 if (GET_RTX_CLASS (code) == '<')
1eb8759b
RH
7499 {
7500 if (comparison_dominates_p (cond, code))
7501 return const_true_rtx;
1a26b032 7502
9a915772 7503 code = combine_reversed_comparison_code (x);
1eb8759b
RH
7504 if (code != UNKNOWN
7505 && comparison_dominates_p (cond, code))
7506 return const0_rtx;
7507 else
7508 return x;
7509 }
1a26b032
RK
7510 else if (code == SMAX || code == SMIN
7511 || code == UMIN || code == UMAX)
7512 {
7513 int unsignedp = (code == UMIN || code == UMAX);
7514
ac4cdf40
JE
7515 /* Do not reverse the condition when it is NE or EQ.
7516 This is because we cannot conclude anything about
7517 the value of 'SMAX (x, y)' when x is not equal to y,
7518 but we can when x equals y. */
7519 if ((code == SMAX || code == UMAX)
7520 && ! (cond == EQ || cond == NE))
1a26b032
RK
7521 cond = reverse_condition (cond);
7522
7523 switch (cond)
7524 {
7525 case GE: case GT:
7526 return unsignedp ? x : XEXP (x, 1);
7527 case LE: case LT:
7528 return unsignedp ? x : XEXP (x, 0);
7529 case GEU: case GTU:
7530 return unsignedp ? XEXP (x, 1) : x;
7531 case LEU: case LTU:
7532 return unsignedp ? XEXP (x, 0) : x;
e9a25f70
JL
7533 default:
7534 break;
1a26b032
RK
7535 }
7536 }
7537 }
7538 }
7539
7540 fmt = GET_RTX_FORMAT (code);
7541 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7542 {
7543 if (fmt[i] == 'e')
7544 SUBST (XEXP (x, i), known_cond (XEXP (x, i), cond, reg, val));
7545 else if (fmt[i] == 'E')
7546 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7547 SUBST (XVECEXP (x, i, j), known_cond (XVECEXP (x, i, j),
7548 cond, reg, val));
7549 }
7550
7551 return x;
7552}
7553\f
e11fa86f
RK
7554/* See if X and Y are equal for the purposes of seeing if we can rewrite an
7555 assignment as a field assignment. */
7556
7557static int
7558rtx_equal_for_field_assignment_p (x, y)
7559 rtx x;
7560 rtx y;
7561{
e11fa86f
RK
7562 if (x == y || rtx_equal_p (x, y))
7563 return 1;
7564
7565 if (x == 0 || y == 0 || GET_MODE (x) != GET_MODE (y))
7566 return 0;
7567
7568 /* Check for a paradoxical SUBREG of a MEM compared with the MEM.
7569 Note that all SUBREGs of MEM are paradoxical; otherwise they
7570 would have been rewritten. */
7571 if (GET_CODE (x) == MEM && GET_CODE (y) == SUBREG
7572 && GET_CODE (SUBREG_REG (y)) == MEM
7573 && rtx_equal_p (SUBREG_REG (y),
7574 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (y)), x)))
7575 return 1;
7576
7577 if (GET_CODE (y) == MEM && GET_CODE (x) == SUBREG
7578 && GET_CODE (SUBREG_REG (x)) == MEM
7579 && rtx_equal_p (SUBREG_REG (x),
7580 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (x)), y)))
7581 return 1;
7582
9ec36da5
JL
7583 /* We used to see if get_last_value of X and Y were the same but that's
7584 not correct. In one direction, we'll cause the assignment to have
7585 the wrong destination and in the case, we'll import a register into this
7586 insn that might have already have been dead. So fail if none of the
7587 above cases are true. */
7588 return 0;
e11fa86f
RK
7589}
7590\f
230d793d
RS
7591/* See if X, a SET operation, can be rewritten as a bit-field assignment.
7592 Return that assignment if so.
7593
7594 We only handle the most common cases. */
7595
7596static rtx
7597make_field_assignment (x)
7598 rtx x;
7599{
7600 rtx dest = SET_DEST (x);
7601 rtx src = SET_SRC (x);
dfbe1b2f 7602 rtx assign;
e11fa86f 7603 rtx rhs, lhs;
5f4f0e22 7604 HOST_WIDE_INT c1;
770ae6cc
RK
7605 HOST_WIDE_INT pos;
7606 unsigned HOST_WIDE_INT len;
dfbe1b2f
RK
7607 rtx other;
7608 enum machine_mode mode;
230d793d
RS
7609
7610 /* If SRC was (and (not (ashift (const_int 1) POS)) DEST), this is
7611 a clear of a one-bit field. We will have changed it to
7612 (and (rotate (const_int -2) POS) DEST), so check for that. Also check
7613 for a SUBREG. */
7614
7615 if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == ROTATE
7616 && GET_CODE (XEXP (XEXP (src, 0), 0)) == CONST_INT
7617 && INTVAL (XEXP (XEXP (src, 0), 0)) == -2
e11fa86f 7618 && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
230d793d 7619 {
8999a12e 7620 assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
230d793d 7621 1, 1, 1, 0);
76184def 7622 if (assign != 0)
38a448ca 7623 return gen_rtx_SET (VOIDmode, assign, const0_rtx);
76184def 7624 return x;
230d793d
RS
7625 }
7626
7627 else if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == SUBREG
7628 && subreg_lowpart_p (XEXP (src, 0))
663522cb 7629 && (GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
230d793d
RS
7630 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (src, 0)))))
7631 && GET_CODE (SUBREG_REG (XEXP (src, 0))) == ROTATE
7632 && INTVAL (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == -2
e11fa86f 7633 && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
230d793d 7634 {
8999a12e 7635 assign = make_extraction (VOIDmode, dest, 0,
230d793d
RS
7636 XEXP (SUBREG_REG (XEXP (src, 0)), 1),
7637 1, 1, 1, 0);
76184def 7638 if (assign != 0)
38a448ca 7639 return gen_rtx_SET (VOIDmode, assign, const0_rtx);
76184def 7640 return x;
230d793d
RS
7641 }
7642
9dd11dcb 7643 /* If SRC is (ior (ashift (const_int 1) POS) DEST), this is a set of a
230d793d
RS
7644 one-bit field. */
7645 else if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == ASHIFT
7646 && XEXP (XEXP (src, 0), 0) == const1_rtx
e11fa86f 7647 && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
230d793d 7648 {
8999a12e 7649 assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
230d793d 7650 1, 1, 1, 0);
76184def 7651 if (assign != 0)
38a448ca 7652 return gen_rtx_SET (VOIDmode, assign, const1_rtx);
76184def 7653 return x;
230d793d
RS
7654 }
7655
dfbe1b2f 7656 /* The other case we handle is assignments into a constant-position
9dd11dcb 7657 field. They look like (ior/xor (and DEST C1) OTHER). If C1 represents
dfbe1b2f
RK
7658 a mask that has all one bits except for a group of zero bits and
7659 OTHER is known to have zeros where C1 has ones, this is such an
7660 assignment. Compute the position and length from C1. Shift OTHER
7661 to the appropriate position, force it to the required mode, and
7662 make the extraction. Check for the AND in both operands. */
7663
9dd11dcb 7664 if (GET_CODE (src) != IOR && GET_CODE (src) != XOR)
e11fa86f
RK
7665 return x;
7666
7667 rhs = expand_compound_operation (XEXP (src, 0));
7668 lhs = expand_compound_operation (XEXP (src, 1));
7669
7670 if (GET_CODE (rhs) == AND
7671 && GET_CODE (XEXP (rhs, 1)) == CONST_INT
7672 && rtx_equal_for_field_assignment_p (XEXP (rhs, 0), dest))
7673 c1 = INTVAL (XEXP (rhs, 1)), other = lhs;
7674 else if (GET_CODE (lhs) == AND
7675 && GET_CODE (XEXP (lhs, 1)) == CONST_INT
7676 && rtx_equal_for_field_assignment_p (XEXP (lhs, 0), dest))
7677 c1 = INTVAL (XEXP (lhs, 1)), other = rhs;
dfbe1b2f
RK
7678 else
7679 return x;
230d793d 7680
663522cb 7681 pos = get_pos_from_mask ((~c1) & GET_MODE_MASK (GET_MODE (dest)), &len);
dfbe1b2f 7682 if (pos < 0 || pos + len > GET_MODE_BITSIZE (GET_MODE (dest))
e5e809f4
JL
7683 || GET_MODE_BITSIZE (GET_MODE (dest)) > HOST_BITS_PER_WIDE_INT
7684 || (c1 & nonzero_bits (other, GET_MODE (dest))) != 0)
dfbe1b2f 7685 return x;
230d793d 7686
5f4f0e22 7687 assign = make_extraction (VOIDmode, dest, pos, NULL_RTX, len, 1, 1, 0);
76184def
DE
7688 if (assign == 0)
7689 return x;
230d793d 7690
dfbe1b2f
RK
7691 /* The mode to use for the source is the mode of the assignment, or of
7692 what is inside a possible STRICT_LOW_PART. */
663522cb 7693 mode = (GET_CODE (assign) == STRICT_LOW_PART
dfbe1b2f 7694 ? GET_MODE (XEXP (assign, 0)) : GET_MODE (assign));
230d793d 7695
dfbe1b2f
RK
7696 /* Shift OTHER right POS places and make it the source, restricting it
7697 to the proper length and mode. */
230d793d 7698
5f4f0e22
CH
7699 src = force_to_mode (simplify_shift_const (NULL_RTX, LSHIFTRT,
7700 GET_MODE (src), other, pos),
6139ff20
RK
7701 mode,
7702 GET_MODE_BITSIZE (mode) >= HOST_BITS_PER_WIDE_INT
0345195a 7703 ? ~(unsigned HOST_WIDE_INT) 0
729a2125 7704 : ((unsigned HOST_WIDE_INT) 1 << len) - 1,
e3d616e3 7705 dest, 0);
230d793d 7706
dfbe1b2f 7707 return gen_rtx_combine (SET, VOIDmode, assign, src);
230d793d
RS
7708}
7709\f
7710/* See if X is of the form (+ (* a c) (* b c)) and convert to (* (+ a b) c)
7711 if so. */
7712
7713static rtx
7714apply_distributive_law (x)
7715 rtx x;
7716{
7717 enum rtx_code code = GET_CODE (x);
7718 rtx lhs, rhs, other;
7719 rtx tem;
7720 enum rtx_code inner_code;
7721
d8a8a4da
RS
7722 /* Distributivity is not true for floating point.
7723 It can change the value. So don't do it.
7724 -- rms and moshier@world.std.com. */
3ad2180a 7725 if (FLOAT_MODE_P (GET_MODE (x)))
d8a8a4da
RS
7726 return x;
7727
230d793d
RS
7728 /* The outer operation can only be one of the following: */
7729 if (code != IOR && code != AND && code != XOR
7730 && code != PLUS && code != MINUS)
7731 return x;
7732
7733 lhs = XEXP (x, 0), rhs = XEXP (x, 1);
7734
0f41302f
MS
7735 /* If either operand is a primitive we can't do anything, so get out
7736 fast. */
230d793d 7737 if (GET_RTX_CLASS (GET_CODE (lhs)) == 'o'
dfbe1b2f 7738 || GET_RTX_CLASS (GET_CODE (rhs)) == 'o')
230d793d
RS
7739 return x;
7740
7741 lhs = expand_compound_operation (lhs);
7742 rhs = expand_compound_operation (rhs);
7743 inner_code = GET_CODE (lhs);
7744 if (inner_code != GET_CODE (rhs))
7745 return x;
7746
7747 /* See if the inner and outer operations distribute. */
7748 switch (inner_code)
7749 {
7750 case LSHIFTRT:
7751 case ASHIFTRT:
7752 case AND:
7753 case IOR:
7754 /* These all distribute except over PLUS. */
7755 if (code == PLUS || code == MINUS)
7756 return x;
7757 break;
7758
7759 case MULT:
7760 if (code != PLUS && code != MINUS)
7761 return x;
7762 break;
7763
7764 case ASHIFT:
45620ed4 7765 /* This is also a multiply, so it distributes over everything. */
230d793d
RS
7766 break;
7767
7768 case SUBREG:
dfbe1b2f
RK
7769 /* Non-paradoxical SUBREGs distributes over all operations, provided
7770 the inner modes and word numbers are the same, this is an extraction
2b4bd1bc
JW
7771 of a low-order part, we don't convert an fp operation to int or
7772 vice versa, and we would not be converting a single-word
dfbe1b2f 7773 operation into a multi-word operation. The latter test is not
2b4bd1bc 7774 required, but it prevents generating unneeded multi-word operations.
dfbe1b2f
RK
7775 Some of the previous tests are redundant given the latter test, but
7776 are retained because they are required for correctness.
7777
7778 We produce the result slightly differently in this case. */
7779
7780 if (GET_MODE (SUBREG_REG (lhs)) != GET_MODE (SUBREG_REG (rhs))
7781 || SUBREG_WORD (lhs) != SUBREG_WORD (rhs)
7782 || ! subreg_lowpart_p (lhs)
2b4bd1bc
JW
7783 || (GET_MODE_CLASS (GET_MODE (lhs))
7784 != GET_MODE_CLASS (GET_MODE (SUBREG_REG (lhs))))
dfbe1b2f 7785 || (GET_MODE_SIZE (GET_MODE (lhs))
8af24e26 7786 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))))
dfbe1b2f 7787 || GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))) > UNITS_PER_WORD)
230d793d
RS
7788 return x;
7789
7790 tem = gen_binary (code, GET_MODE (SUBREG_REG (lhs)),
7791 SUBREG_REG (lhs), SUBREG_REG (rhs));
7792 return gen_lowpart_for_combine (GET_MODE (x), tem);
7793
7794 default:
7795 return x;
7796 }
7797
7798 /* Set LHS and RHS to the inner operands (A and B in the example
7799 above) and set OTHER to the common operand (C in the example).
7800 These is only one way to do this unless the inner operation is
7801 commutative. */
7802 if (GET_RTX_CLASS (inner_code) == 'c'
7803 && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 0)))
7804 other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 1);
7805 else if (GET_RTX_CLASS (inner_code) == 'c'
7806 && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 1)))
7807 other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 0);
7808 else if (GET_RTX_CLASS (inner_code) == 'c'
7809 && rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 0)))
7810 other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 1);
7811 else if (rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 1)))
7812 other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 0);
7813 else
7814 return x;
7815
7816 /* Form the new inner operation, seeing if it simplifies first. */
7817 tem = gen_binary (code, GET_MODE (x), lhs, rhs);
7818
7819 /* There is one exception to the general way of distributing:
7820 (a ^ b) | (a ^ c) -> (~a) & (b ^ c) */
7821 if (code == XOR && inner_code == IOR)
7822 {
7823 inner_code = AND;
0c1c8ea6 7824 other = gen_unary (NOT, GET_MODE (x), GET_MODE (x), other);
230d793d
RS
7825 }
7826
7827 /* We may be able to continuing distributing the result, so call
7828 ourselves recursively on the inner operation before forming the
7829 outer operation, which we return. */
7830 return gen_binary (inner_code, GET_MODE (x),
7831 apply_distributive_law (tem), other);
7832}
7833\f
7834/* We have X, a logical `and' of VAROP with the constant CONSTOP, to be done
7835 in MODE.
7836
7837 Return an equivalent form, if different from X. Otherwise, return X. If
7838 X is zero, we are to always construct the equivalent form. */
7839
7840static rtx
7841simplify_and_const_int (x, mode, varop, constop)
7842 rtx x;
7843 enum machine_mode mode;
7844 rtx varop;
5f4f0e22 7845 unsigned HOST_WIDE_INT constop;
230d793d 7846{
951553af 7847 unsigned HOST_WIDE_INT nonzero;
42301240 7848 int i;
230d793d 7849
6139ff20
RK
7850 /* Simplify VAROP knowing that we will be only looking at some of the
7851 bits in it. */
e3d616e3 7852 varop = force_to_mode (varop, mode, constop, NULL_RTX, 0);
230d793d 7853
6139ff20
RK
7854 /* If VAROP is a CLOBBER, we will fail so return it; if it is a
7855 CONST_INT, we are done. */
7856 if (GET_CODE (varop) == CLOBBER || GET_CODE (varop) == CONST_INT)
7857 return varop;
230d793d 7858
fc06d7aa
RK
7859 /* See what bits may be nonzero in VAROP. Unlike the general case of
7860 a call to nonzero_bits, here we don't care about bits outside
7861 MODE. */
7862
7863 nonzero = nonzero_bits (varop, mode) & GET_MODE_MASK (mode);
7e4ce834 7864 nonzero = trunc_int_for_mode (nonzero, mode);
9fa6d012 7865
230d793d 7866 /* Turn off all bits in the constant that are known to already be zero.
951553af 7867 Thus, if the AND isn't needed at all, we will have CONSTOP == NONZERO_BITS
230d793d
RS
7868 which is tested below. */
7869
951553af 7870 constop &= nonzero;
230d793d
RS
7871
7872 /* If we don't have any bits left, return zero. */
7873 if (constop == 0)
7874 return const0_rtx;
7875
42301240
RK
7876 /* If VAROP is a NEG of something known to be zero or 1 and CONSTOP is
7877 a power of two, we can replace this with a ASHIFT. */
7878 if (GET_CODE (varop) == NEG && nonzero_bits (XEXP (varop, 0), mode) == 1
7879 && (i = exact_log2 (constop)) >= 0)
7880 return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (varop, 0), i);
663522cb 7881
6139ff20
RK
7882 /* If VAROP is an IOR or XOR, apply the AND to both branches of the IOR
7883 or XOR, then try to apply the distributive law. This may eliminate
7884 operations if either branch can be simplified because of the AND.
7885 It may also make some cases more complex, but those cases probably
7886 won't match a pattern either with or without this. */
7887
7888 if (GET_CODE (varop) == IOR || GET_CODE (varop) == XOR)
7889 return
7890 gen_lowpart_for_combine
7891 (mode,
7892 apply_distributive_law
7893 (gen_binary (GET_CODE (varop), GET_MODE (varop),
7894 simplify_and_const_int (NULL_RTX, GET_MODE (varop),
7895 XEXP (varop, 0), constop),
7896 simplify_and_const_int (NULL_RTX, GET_MODE (varop),
7897 XEXP (varop, 1), constop))));
7898
230d793d
RS
7899 /* Get VAROP in MODE. Try to get a SUBREG if not. Don't make a new SUBREG
7900 if we already had one (just check for the simplest cases). */
7901 if (x && GET_CODE (XEXP (x, 0)) == SUBREG
7902 && GET_MODE (XEXP (x, 0)) == mode
7903 && SUBREG_REG (XEXP (x, 0)) == varop)
7904 varop = XEXP (x, 0);
7905 else
7906 varop = gen_lowpart_for_combine (mode, varop);
7907
0f41302f 7908 /* If we can't make the SUBREG, try to return what we were given. */
230d793d
RS
7909 if (GET_CODE (varop) == CLOBBER)
7910 return x ? x : varop;
7911
7912 /* If we are only masking insignificant bits, return VAROP. */
951553af 7913 if (constop == nonzero)
230d793d
RS
7914 x = varop;
7915
7916 /* Otherwise, return an AND. See how much, if any, of X we can use. */
7917 else if (x == 0 || GET_CODE (x) != AND || GET_MODE (x) != mode)
6139ff20 7918 x = gen_binary (AND, mode, varop, GEN_INT (constop));
230d793d
RS
7919
7920 else
7921 {
7922 if (GET_CODE (XEXP (x, 1)) != CONST_INT
e51712db 7923 || (unsigned HOST_WIDE_INT) INTVAL (XEXP (x, 1)) != constop)
5f4f0e22 7924 SUBST (XEXP (x, 1), GEN_INT (constop));
230d793d
RS
7925
7926 SUBST (XEXP (x, 0), varop);
7927 }
7928
7929 return x;
7930}
7931\f
b3728b0e
JW
7932/* We let num_sign_bit_copies recur into nonzero_bits as that is useful.
7933 We don't let nonzero_bits recur into num_sign_bit_copies, because that
7934 is less useful. We can't allow both, because that results in exponential
956d6950 7935 run time recursion. There is a nullstone testcase that triggered
b3728b0e
JW
7936 this. This macro avoids accidental uses of num_sign_bit_copies. */
7937#define num_sign_bit_copies()
7938
230d793d
RS
7939/* Given an expression, X, compute which bits in X can be non-zero.
7940 We don't care about bits outside of those defined in MODE.
7941
7942 For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
7943 a shift, AND, or zero_extract, we can do better. */
7944
5f4f0e22 7945static unsigned HOST_WIDE_INT
951553af 7946nonzero_bits (x, mode)
230d793d
RS
7947 rtx x;
7948 enum machine_mode mode;
7949{
951553af
RK
7950 unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
7951 unsigned HOST_WIDE_INT inner_nz;
230d793d 7952 enum rtx_code code;
770ae6cc 7953 unsigned int mode_width = GET_MODE_BITSIZE (mode);
230d793d
RS
7954 rtx tem;
7955
1c75dfa4
RK
7956 /* For floating-point values, assume all bits are needed. */
7957 if (FLOAT_MODE_P (GET_MODE (x)) || FLOAT_MODE_P (mode))
7958 return nonzero;
7959
230d793d
RS
7960 /* If X is wider than MODE, use its mode instead. */
7961 if (GET_MODE_BITSIZE (GET_MODE (x)) > mode_width)
7962 {
7963 mode = GET_MODE (x);
951553af 7964 nonzero = GET_MODE_MASK (mode);
230d793d
RS
7965 mode_width = GET_MODE_BITSIZE (mode);
7966 }
7967
5f4f0e22 7968 if (mode_width > HOST_BITS_PER_WIDE_INT)
230d793d
RS
7969 /* Our only callers in this case look for single bit values. So
7970 just return the mode mask. Those tests will then be false. */
951553af 7971 return nonzero;
230d793d 7972
8baf60bb 7973#ifndef WORD_REGISTER_OPERATIONS
c6965c0f 7974 /* If MODE is wider than X, but both are a single word for both the host
663522cb 7975 and target machines, we can compute this from which bits of the
0840fd91
RK
7976 object might be nonzero in its own mode, taking into account the fact
7977 that on many CISC machines, accessing an object in a wider mode
7978 causes the high-order bits to become undefined. So they are
7979 not known to be zero. */
7980
7981 if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode
7982 && GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD
7983 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
c6965c0f 7984 && GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (GET_MODE (x)))
0840fd91
RK
7985 {
7986 nonzero &= nonzero_bits (x, GET_MODE (x));
663522cb 7987 nonzero |= GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x));
0840fd91
RK
7988 return nonzero;
7989 }
7990#endif
7991
230d793d
RS
7992 code = GET_CODE (x);
7993 switch (code)
7994 {
7995 case REG:
320dd7a7
RK
7996#ifdef POINTERS_EXTEND_UNSIGNED
7997 /* If pointers extend unsigned and this is a pointer in Pmode, say that
7998 all the bits above ptr_mode are known to be zero. */
7999 if (POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
3502dc9c 8000 && REG_POINTER (x))
320dd7a7
RK
8001 nonzero &= GET_MODE_MASK (ptr_mode);
8002#endif
8003
b0d71df9
RK
8004#ifdef STACK_BOUNDARY
8005 /* If this is the stack pointer, we may know something about its
8006 alignment. If PUSH_ROUNDING is defined, it is possible for the
230d793d
RS
8007 stack to be momentarily aligned only to that amount, so we pick
8008 the least alignment. */
8009
ee49a9c7
JW
8010 /* We can't check for arg_pointer_rtx here, because it is not
8011 guaranteed to have as much alignment as the stack pointer.
8012 In particular, in the Irix6 n64 ABI, the stack has 128 bit
8013 alignment but the argument pointer has only 64 bit alignment. */
8014
0e9ff885
DM
8015 if ((x == frame_pointer_rtx
8016 || x == stack_pointer_rtx
8017 || x == hard_frame_pointer_rtx
8018 || (REGNO (x) >= FIRST_VIRTUAL_REGISTER
8019 && REGNO (x) <= LAST_VIRTUAL_REGISTER))
8020#ifdef STACK_BIAS
8021 && !STACK_BIAS
663522cb 8022#endif
0e9ff885 8023 )
230d793d 8024 {
b0d71df9 8025 int sp_alignment = STACK_BOUNDARY / BITS_PER_UNIT;
230d793d
RS
8026
8027#ifdef PUSH_ROUNDING
f73ad30e 8028 if (REGNO (x) == STACK_POINTER_REGNUM && PUSH_ARGS)
b0d71df9 8029 sp_alignment = MIN (PUSH_ROUNDING (1), sp_alignment);
230d793d
RS
8030#endif
8031
320dd7a7
RK
8032 /* We must return here, otherwise we may get a worse result from
8033 one of the choices below. There is nothing useful below as
8034 far as the stack pointer is concerned. */
663522cb 8035 return nonzero &= ~(sp_alignment - 1);
230d793d 8036 }
b0d71df9 8037#endif
230d793d 8038
55310dad
RK
8039 /* If X is a register whose nonzero bits value is current, use it.
8040 Otherwise, if X is a register whose value we can find, use that
8041 value. Otherwise, use the previously-computed global nonzero bits
8042 for this register. */
8043
8044 if (reg_last_set_value[REGNO (x)] != 0
8045 && reg_last_set_mode[REGNO (x)] == mode
57cf50a4
GRK
8046 && (reg_last_set_label[REGNO (x)] == label_tick
8047 || (REGNO (x) >= FIRST_PSEUDO_REGISTER
8048 && REG_N_SETS (REGNO (x)) == 1
663522cb 8049 && ! REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start,
57cf50a4 8050 REGNO (x))))
55310dad
RK
8051 && INSN_CUID (reg_last_set[REGNO (x)]) < subst_low_cuid)
8052 return reg_last_set_nonzero_bits[REGNO (x)];
230d793d
RS
8053
8054 tem = get_last_value (x);
9afa3d54 8055
230d793d 8056 if (tem)
9afa3d54
RK
8057 {
8058#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
8059 /* If X is narrower than MODE and TEM is a non-negative
8060 constant that would appear negative in the mode of X,
8061 sign-extend it for use in reg_nonzero_bits because some
8062 machines (maybe most) will actually do the sign-extension
663522cb 8063 and this is the conservative approach.
9afa3d54
RK
8064
8065 ??? For 2.5, try to tighten up the MD files in this regard
8066 instead of this kludge. */
8067
8068 if (GET_MODE_BITSIZE (GET_MODE (x)) < mode_width
8069 && GET_CODE (tem) == CONST_INT
8070 && INTVAL (tem) > 0
8071 && 0 != (INTVAL (tem)
8072 & ((HOST_WIDE_INT) 1
9e69be8c 8073 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
9afa3d54
RK
8074 tem = GEN_INT (INTVAL (tem)
8075 | ((HOST_WIDE_INT) (-1)
8076 << GET_MODE_BITSIZE (GET_MODE (x))));
8077#endif
8078 return nonzero_bits (tem, mode);
8079 }
951553af
RK
8080 else if (nonzero_sign_valid && reg_nonzero_bits[REGNO (x)])
8081 return reg_nonzero_bits[REGNO (x)] & nonzero;
230d793d 8082 else
951553af 8083 return nonzero;
230d793d
RS
8084
8085 case CONST_INT:
9afa3d54
RK
8086#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
8087 /* If X is negative in MODE, sign-extend the value. */
9e69be8c
RK
8088 if (INTVAL (x) > 0 && mode_width < BITS_PER_WORD
8089 && 0 != (INTVAL (x) & ((HOST_WIDE_INT) 1 << (mode_width - 1))))
8090 return (INTVAL (x) | ((HOST_WIDE_INT) (-1) << mode_width));
9afa3d54
RK
8091#endif
8092
230d793d
RS
8093 return INTVAL (x);
8094
230d793d 8095 case MEM:
8baf60bb 8096#ifdef LOAD_EXTEND_OP
230d793d
RS
8097 /* In many, if not most, RISC machines, reading a byte from memory
8098 zeros the rest of the register. Noticing that fact saves a lot
8099 of extra zero-extends. */
8baf60bb
RK
8100 if (LOAD_EXTEND_OP (GET_MODE (x)) == ZERO_EXTEND)
8101 nonzero &= GET_MODE_MASK (GET_MODE (x));
230d793d 8102#endif
8baf60bb 8103 break;
230d793d 8104
230d793d 8105 case EQ: case NE:
69bc0a1f
JH
8106 case UNEQ: case LTGT:
8107 case GT: case GTU: case UNGT:
8108 case LT: case LTU: case UNLT:
8109 case GE: case GEU: case UNGE:
8110 case LE: case LEU: case UNLE:
8111 case UNORDERED: case ORDERED:
3f508eca 8112
c6965c0f
RK
8113 /* If this produces an integer result, we know which bits are set.
8114 Code here used to clear bits outside the mode of X, but that is
8115 now done above. */
230d793d 8116
c6965c0f
RK
8117 if (GET_MODE_CLASS (mode) == MODE_INT
8118 && mode_width <= HOST_BITS_PER_WIDE_INT)
8119 nonzero = STORE_FLAG_VALUE;
230d793d 8120 break;
230d793d 8121
230d793d 8122 case NEG:
b3728b0e
JW
8123#if 0
8124 /* Disabled to avoid exponential mutual recursion between nonzero_bits
8125 and num_sign_bit_copies. */
d0ab8cd3
RK
8126 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
8127 == GET_MODE_BITSIZE (GET_MODE (x)))
951553af 8128 nonzero = 1;
b3728b0e 8129#endif
230d793d
RS
8130
8131 if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
663522cb 8132 nonzero |= (GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x)));
230d793d 8133 break;
d0ab8cd3
RK
8134
8135 case ABS:
b3728b0e
JW
8136#if 0
8137 /* Disabled to avoid exponential mutual recursion between nonzero_bits
8138 and num_sign_bit_copies. */
d0ab8cd3
RK
8139 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
8140 == GET_MODE_BITSIZE (GET_MODE (x)))
951553af 8141 nonzero = 1;
b3728b0e 8142#endif
d0ab8cd3 8143 break;
230d793d
RS
8144
8145 case TRUNCATE:
951553af 8146 nonzero &= (nonzero_bits (XEXP (x, 0), mode) & GET_MODE_MASK (mode));
230d793d
RS
8147 break;
8148
8149 case ZERO_EXTEND:
951553af 8150 nonzero &= nonzero_bits (XEXP (x, 0), mode);
230d793d 8151 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
951553af 8152 nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
230d793d
RS
8153 break;
8154
8155 case SIGN_EXTEND:
8156 /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
8157 Otherwise, show all the bits in the outer mode but not the inner
8158 may be non-zero. */
951553af 8159 inner_nz = nonzero_bits (XEXP (x, 0), mode);
230d793d
RS
8160 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
8161 {
951553af 8162 inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
e3da301d
MS
8163 if (inner_nz
8164 & (((HOST_WIDE_INT) 1
8165 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1))))
951553af 8166 inner_nz |= (GET_MODE_MASK (mode)
663522cb 8167 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
230d793d
RS
8168 }
8169
951553af 8170 nonzero &= inner_nz;
230d793d
RS
8171 break;
8172
8173 case AND:
951553af
RK
8174 nonzero &= (nonzero_bits (XEXP (x, 0), mode)
8175 & nonzero_bits (XEXP (x, 1), mode));
230d793d
RS
8176 break;
8177
d0ab8cd3
RK
8178 case XOR: case IOR:
8179 case UMIN: case UMAX: case SMIN: case SMAX:
951553af
RK
8180 nonzero &= (nonzero_bits (XEXP (x, 0), mode)
8181 | nonzero_bits (XEXP (x, 1), mode));
230d793d
RS
8182 break;
8183
8184 case PLUS: case MINUS:
8185 case MULT:
8186 case DIV: case UDIV:
8187 case MOD: case UMOD:
8188 /* We can apply the rules of arithmetic to compute the number of
8189 high- and low-order zero bits of these operations. We start by
8190 computing the width (position of the highest-order non-zero bit)
8191 and the number of low-order zero bits for each value. */
8192 {
951553af
RK
8193 unsigned HOST_WIDE_INT nz0 = nonzero_bits (XEXP (x, 0), mode);
8194 unsigned HOST_WIDE_INT nz1 = nonzero_bits (XEXP (x, 1), mode);
8195 int width0 = floor_log2 (nz0) + 1;
8196 int width1 = floor_log2 (nz1) + 1;
8197 int low0 = floor_log2 (nz0 & -nz0);
8198 int low1 = floor_log2 (nz1 & -nz1);
318b149c
RK
8199 HOST_WIDE_INT op0_maybe_minusp
8200 = (nz0 & ((HOST_WIDE_INT) 1 << (mode_width - 1)));
8201 HOST_WIDE_INT op1_maybe_minusp
8202 = (nz1 & ((HOST_WIDE_INT) 1 << (mode_width - 1)));
770ae6cc 8203 unsigned int result_width = mode_width;
230d793d
RS
8204 int result_low = 0;
8205
8206 switch (code)
8207 {
8208 case PLUS:
0e9ff885
DM
8209#ifdef STACK_BIAS
8210 if (STACK_BIAS
663522cb
KH
8211 && (XEXP (x, 0) == stack_pointer_rtx
8212 || XEXP (x, 0) == frame_pointer_rtx)
8213 && GET_CODE (XEXP (x, 1)) == CONST_INT)
0e9ff885
DM
8214 {
8215 int sp_alignment = STACK_BOUNDARY / BITS_PER_UNIT;
8216
663522cb
KH
8217 nz0 = (GET_MODE_MASK (mode) & ~(sp_alignment - 1));
8218 nz1 = INTVAL (XEXP (x, 1)) - STACK_BIAS;
8219 width0 = floor_log2 (nz0) + 1;
8220 width1 = floor_log2 (nz1) + 1;
8221 low0 = floor_log2 (nz0 & -nz0);
8222 low1 = floor_log2 (nz1 & -nz1);
0e9ff885 8223 }
663522cb 8224#endif
230d793d
RS
8225 result_width = MAX (width0, width1) + 1;
8226 result_low = MIN (low0, low1);
8227 break;
8228 case MINUS:
8229 result_low = MIN (low0, low1);
8230 break;
8231 case MULT:
8232 result_width = width0 + width1;
8233 result_low = low0 + low1;
8234 break;
8235 case DIV:
8236 if (! op0_maybe_minusp && ! op1_maybe_minusp)
8237 result_width = width0;
8238 break;
8239 case UDIV:
8240 result_width = width0;
8241 break;
8242 case MOD:
8243 if (! op0_maybe_minusp && ! op1_maybe_minusp)
8244 result_width = MIN (width0, width1);
8245 result_low = MIN (low0, low1);
8246 break;
8247 case UMOD:
8248 result_width = MIN (width0, width1);
8249 result_low = MIN (low0, low1);
8250 break;
e9a25f70
JL
8251 default:
8252 abort ();
230d793d
RS
8253 }
8254
8255 if (result_width < mode_width)
951553af 8256 nonzero &= ((HOST_WIDE_INT) 1 << result_width) - 1;
230d793d
RS
8257
8258 if (result_low > 0)
663522cb 8259 nonzero &= ~(((HOST_WIDE_INT) 1 << result_low) - 1);
d1405722
RK
8260
8261#ifdef POINTERS_EXTEND_UNSIGNED
8262 /* If pointers extend unsigned and this is an addition or subtraction
8263 to a pointer in Pmode, all the bits above ptr_mode are known to be
8264 zero. */
8265 if (POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
8266 && (code == PLUS || code == MINUS)
8267 && GET_CODE (XEXP (x, 0)) == REG && REG_POINTER (XEXP (x, 0)))
8268 nonzero &= GET_MODE_MASK (ptr_mode);
8269#endif
230d793d
RS
8270 }
8271 break;
8272
8273 case ZERO_EXTRACT:
8274 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5f4f0e22 8275 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
951553af 8276 nonzero &= ((HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
230d793d
RS
8277 break;
8278
8279 case SUBREG:
c3c2cb37
RK
8280 /* If this is a SUBREG formed for a promoted variable that has
8281 been zero-extended, we know that at least the high-order bits
8282 are zero, though others might be too. */
8283
8284 if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x))
951553af
RK
8285 nonzero = (GET_MODE_MASK (GET_MODE (x))
8286 & nonzero_bits (SUBREG_REG (x), GET_MODE (x)));
c3c2cb37 8287
230d793d
RS
8288 /* If the inner mode is a single word for both the host and target
8289 machines, we can compute this from which bits of the inner
951553af 8290 object might be nonzero. */
230d793d 8291 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) <= BITS_PER_WORD
5f4f0e22
CH
8292 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
8293 <= HOST_BITS_PER_WIDE_INT))
230d793d 8294 {
951553af 8295 nonzero &= nonzero_bits (SUBREG_REG (x), mode);
8baf60bb 8296
b52ce03d
R
8297#if defined (WORD_REGISTER_OPERATIONS) && defined (LOAD_EXTEND_OP)
8298 /* If this is a typical RISC machine, we only have to worry
8299 about the way loads are extended. */
8300 if (LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
729a2125
RK
8301 ? (((nonzero
8302 & (((unsigned HOST_WIDE_INT) 1
8303 << (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) - 1))))
8304 != 0))
b52ce03d 8305 : LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) != ZERO_EXTEND)
230d793d 8306#endif
b52ce03d
R
8307 {
8308 /* On many CISC machines, accessing an object in a wider mode
8309 causes the high-order bits to become undefined. So they are
8310 not known to be zero. */
8311 if (GET_MODE_SIZE (GET_MODE (x))
8312 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
8313 nonzero |= (GET_MODE_MASK (GET_MODE (x))
663522cb 8314 & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x))));
b52ce03d 8315 }
230d793d
RS
8316 }
8317 break;
8318
8319 case ASHIFTRT:
8320 case LSHIFTRT:
8321 case ASHIFT:
230d793d 8322 case ROTATE:
951553af 8323 /* The nonzero bits are in two classes: any bits within MODE
230d793d 8324 that aren't in GET_MODE (x) are always significant. The rest of the
951553af 8325 nonzero bits are those that are significant in the operand of
230d793d
RS
8326 the shift when shifted the appropriate number of bits. This
8327 shows that high-order bits are cleared by the right shift and
8328 low-order bits by left shifts. */
8329 if (GET_CODE (XEXP (x, 1)) == CONST_INT
8330 && INTVAL (XEXP (x, 1)) >= 0
5f4f0e22 8331 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
230d793d
RS
8332 {
8333 enum machine_mode inner_mode = GET_MODE (x);
770ae6cc 8334 unsigned int width = GET_MODE_BITSIZE (inner_mode);
230d793d 8335 int count = INTVAL (XEXP (x, 1));
5f4f0e22 8336 unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
951553af
RK
8337 unsigned HOST_WIDE_INT op_nonzero = nonzero_bits (XEXP (x, 0), mode);
8338 unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask;
5f4f0e22 8339 unsigned HOST_WIDE_INT outer = 0;
230d793d
RS
8340
8341 if (mode_width > width)
663522cb 8342 outer = (op_nonzero & nonzero & ~mode_mask);
230d793d
RS
8343
8344 if (code == LSHIFTRT)
8345 inner >>= count;
8346 else if (code == ASHIFTRT)
8347 {
8348 inner >>= count;
8349
951553af 8350 /* If the sign bit may have been nonzero before the shift, we
230d793d 8351 need to mark all the places it could have been copied to
951553af 8352 by the shift as possibly nonzero. */
5f4f0e22
CH
8353 if (inner & ((HOST_WIDE_INT) 1 << (width - 1 - count)))
8354 inner |= (((HOST_WIDE_INT) 1 << count) - 1) << (width - count);
230d793d 8355 }
45620ed4 8356 else if (code == ASHIFT)
230d793d
RS
8357 inner <<= count;
8358 else
8359 inner = ((inner << (count % width)
8360 | (inner >> (width - (count % width)))) & mode_mask);
8361
951553af 8362 nonzero &= (outer | inner);
230d793d
RS
8363 }
8364 break;
8365
8366 case FFS:
8367 /* This is at most the number of bits in the mode. */
951553af 8368 nonzero = ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width) + 1)) - 1;
230d793d 8369 break;
d0ab8cd3
RK
8370
8371 case IF_THEN_ELSE:
951553af
RK
8372 nonzero &= (nonzero_bits (XEXP (x, 1), mode)
8373 | nonzero_bits (XEXP (x, 2), mode));
d0ab8cd3 8374 break;
663522cb 8375
e9a25f70
JL
8376 default:
8377 break;
230d793d
RS
8378 }
8379
951553af 8380 return nonzero;
230d793d 8381}
b3728b0e
JW
8382
8383/* See the macro definition above. */
8384#undef num_sign_bit_copies
230d793d 8385\f
d0ab8cd3 8386/* Return the number of bits at the high-order end of X that are known to
5109d49f
RK
8387 be equal to the sign bit. X will be used in mode MODE; if MODE is
8388 VOIDmode, X will be used in its own mode. The returned value will always
8389 be between 1 and the number of bits in MODE. */
d0ab8cd3 8390
770ae6cc 8391static unsigned int
d0ab8cd3
RK
8392num_sign_bit_copies (x, mode)
8393 rtx x;
8394 enum machine_mode mode;
8395{
8396 enum rtx_code code = GET_CODE (x);
770ae6cc 8397 unsigned int bitwidth;
d0ab8cd3 8398 int num0, num1, result;
951553af 8399 unsigned HOST_WIDE_INT nonzero;
d0ab8cd3
RK
8400 rtx tem;
8401
8402 /* If we weren't given a mode, use the mode of X. If the mode is still
1c75dfa4
RK
8403 VOIDmode, we don't know anything. Likewise if one of the modes is
8404 floating-point. */
d0ab8cd3
RK
8405
8406 if (mode == VOIDmode)
8407 mode = GET_MODE (x);
8408
1c75dfa4 8409 if (mode == VOIDmode || FLOAT_MODE_P (mode) || FLOAT_MODE_P (GET_MODE (x)))
6752e8d2 8410 return 1;
d0ab8cd3
RK
8411
8412 bitwidth = GET_MODE_BITSIZE (mode);
8413
0f41302f 8414 /* For a smaller object, just ignore the high bits. */
312def2e 8415 if (bitwidth < GET_MODE_BITSIZE (GET_MODE (x)))
770ae6cc
RK
8416 {
8417 num0 = num_sign_bit_copies (x, GET_MODE (x));
8418 return MAX (1,
8419 num0 - (int) (GET_MODE_BITSIZE (GET_MODE (x)) - bitwidth));
8420 }
663522cb 8421
e9a25f70
JL
8422 if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_BITSIZE (GET_MODE (x)))
8423 {
0c314d1a
RK
8424#ifndef WORD_REGISTER_OPERATIONS
8425 /* If this machine does not do all register operations on the entire
8426 register and MODE is wider than the mode of X, we can say nothing
8427 at all about the high-order bits. */
e9a25f70
JL
8428 return 1;
8429#else
8430 /* Likewise on machines that do, if the mode of the object is smaller
8431 than a word and loads of that size don't sign extend, we can say
8432 nothing about the high order bits. */
8433 if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
8434#ifdef LOAD_EXTEND_OP
8435 && LOAD_EXTEND_OP (GET_MODE (x)) != SIGN_EXTEND
8436#endif
8437 )
8438 return 1;
0c314d1a 8439#endif
e9a25f70 8440 }
0c314d1a 8441
d0ab8cd3
RK
8442 switch (code)
8443 {
8444 case REG:
55310dad 8445
ff0dbdd1
RK
8446#ifdef POINTERS_EXTEND_UNSIGNED
8447 /* If pointers extend signed and this is a pointer in Pmode, say that
8448 all the bits above ptr_mode are known to be sign bit copies. */
8449 if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode && mode == Pmode
3502dc9c 8450 && REG_POINTER (x))
ff0dbdd1
RK
8451 return GET_MODE_BITSIZE (Pmode) - GET_MODE_BITSIZE (ptr_mode) + 1;
8452#endif
8453
55310dad
RK
8454 if (reg_last_set_value[REGNO (x)] != 0
8455 && reg_last_set_mode[REGNO (x)] == mode
57cf50a4
GRK
8456 && (reg_last_set_label[REGNO (x)] == label_tick
8457 || (REGNO (x) >= FIRST_PSEUDO_REGISTER
8458 && REG_N_SETS (REGNO (x)) == 1
8459 && ! REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start,
8460 REGNO (x))))
55310dad
RK
8461 && INSN_CUID (reg_last_set[REGNO (x)]) < subst_low_cuid)
8462 return reg_last_set_sign_bit_copies[REGNO (x)];
d0ab8cd3 8463
663522cb 8464 tem = get_last_value (x);
d0ab8cd3
RK
8465 if (tem != 0)
8466 return num_sign_bit_copies (tem, mode);
55310dad
RK
8467
8468 if (nonzero_sign_valid && reg_sign_bit_copies[REGNO (x)] != 0)
8469 return reg_sign_bit_copies[REGNO (x)];
d0ab8cd3
RK
8470 break;
8471
457816e2 8472 case MEM:
8baf60bb 8473#ifdef LOAD_EXTEND_OP
457816e2 8474 /* Some RISC machines sign-extend all loads of smaller than a word. */
8baf60bb 8475 if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND)
770ae6cc
RK
8476 return MAX (1, ((int) bitwidth
8477 - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1));
457816e2 8478#endif
8baf60bb 8479 break;
457816e2 8480
d0ab8cd3
RK
8481 case CONST_INT:
8482 /* If the constant is negative, take its 1's complement and remask.
8483 Then see how many zero bits we have. */
951553af 8484 nonzero = INTVAL (x) & GET_MODE_MASK (mode);
ac49a949 8485 if (bitwidth <= HOST_BITS_PER_WIDE_INT
951553af 8486 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
663522cb 8487 nonzero = (~nonzero) & GET_MODE_MASK (mode);
d0ab8cd3 8488
951553af 8489 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
d0ab8cd3
RK
8490
8491 case SUBREG:
c3c2cb37
RK
8492 /* If this is a SUBREG for a promoted object that is sign-extended
8493 and we are looking at it in a wider mode, we know that at least the
8494 high-order bits are known to be sign bit copies. */
8495
8496 if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x))
770ae6cc
RK
8497 {
8498 num0 = num_sign_bit_copies (SUBREG_REG (x), mode);
8499 return MAX ((int) bitwidth
8500 - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1,
8501 num0);
8502 }
663522cb 8503
0f41302f 8504 /* For a smaller object, just ignore the high bits. */
d0ab8cd3
RK
8505 if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
8506 {
8507 num0 = num_sign_bit_copies (SUBREG_REG (x), VOIDmode);
8508 return MAX (1, (num0
770ae6cc
RK
8509 - (int) (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
8510 - bitwidth)));
d0ab8cd3 8511 }
457816e2 8512
8baf60bb 8513#ifdef WORD_REGISTER_OPERATIONS
2aec5b7a 8514#ifdef LOAD_EXTEND_OP
8baf60bb
RK
8515 /* For paradoxical SUBREGs on machines where all register operations
8516 affect the entire register, just look inside. Note that we are
8517 passing MODE to the recursive call, so the number of sign bit copies
8518 will remain relative to that mode, not the inner mode. */
457816e2 8519
2aec5b7a
JW
8520 /* This works only if loads sign extend. Otherwise, if we get a
8521 reload for the inner part, it may be loaded from the stack, and
8522 then we lose all sign bit copies that existed before the store
8523 to the stack. */
8524
8525 if ((GET_MODE_SIZE (GET_MODE (x))
8526 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
8527 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND)
457816e2 8528 return num_sign_bit_copies (SUBREG_REG (x), mode);
2aec5b7a 8529#endif
457816e2 8530#endif
d0ab8cd3
RK
8531 break;
8532
8533 case SIGN_EXTRACT:
8534 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
770ae6cc 8535 return MAX (1, (int) bitwidth - INTVAL (XEXP (x, 1)));
d0ab8cd3
RK
8536 break;
8537
663522cb 8538 case SIGN_EXTEND:
d0ab8cd3
RK
8539 return (bitwidth - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
8540 + num_sign_bit_copies (XEXP (x, 0), VOIDmode));
8541
8542 case TRUNCATE:
0f41302f 8543 /* For a smaller object, just ignore the high bits. */
d0ab8cd3 8544 num0 = num_sign_bit_copies (XEXP (x, 0), VOIDmode);
770ae6cc
RK
8545 return MAX (1, (num0 - (int) (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
8546 - bitwidth)));
d0ab8cd3
RK
8547
8548 case NOT:
8549 return num_sign_bit_copies (XEXP (x, 0), mode);
8550
8551 case ROTATE: case ROTATERT:
8552 /* If we are rotating left by a number of bits less than the number
8553 of sign bit copies, we can just subtract that amount from the
8554 number. */
8555 if (GET_CODE (XEXP (x, 1)) == CONST_INT
8556 && INTVAL (XEXP (x, 1)) >= 0 && INTVAL (XEXP (x, 1)) < bitwidth)
8557 {
8558 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8559 return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
770ae6cc 8560 : (int) bitwidth - INTVAL (XEXP (x, 1))));
d0ab8cd3
RK
8561 }
8562 break;
8563
8564 case NEG:
8565 /* In general, this subtracts one sign bit copy. But if the value
8566 is known to be positive, the number of sign bit copies is the
951553af
RK
8567 same as that of the input. Finally, if the input has just one bit
8568 that might be nonzero, all the bits are copies of the sign bit. */
70186b34
BS
8569 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8570 if (bitwidth > HOST_BITS_PER_WIDE_INT)
8571 return num0 > 1 ? num0 - 1 : 1;
8572
951553af
RK
8573 nonzero = nonzero_bits (XEXP (x, 0), mode);
8574 if (nonzero == 1)
d0ab8cd3
RK
8575 return bitwidth;
8576
d0ab8cd3 8577 if (num0 > 1
951553af 8578 && (((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero))
d0ab8cd3
RK
8579 num0--;
8580
8581 return num0;
8582
8583 case IOR: case AND: case XOR:
8584 case SMIN: case SMAX: case UMIN: case UMAX:
8585 /* Logical operations will preserve the number of sign-bit copies.
8586 MIN and MAX operations always return one of the operands. */
8587 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8588 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
8589 return MIN (num0, num1);
8590
8591 case PLUS: case MINUS:
8592 /* For addition and subtraction, we can have a 1-bit carry. However,
8593 if we are subtracting 1 from a positive number, there will not
8594 be such a carry. Furthermore, if the positive number is known to
8595 be 0 or 1, we know the result is either -1 or 0. */
8596
3e3ea975 8597 if (code == PLUS && XEXP (x, 1) == constm1_rtx
9295e6af 8598 && bitwidth <= HOST_BITS_PER_WIDE_INT)
d0ab8cd3 8599 {
951553af
RK
8600 nonzero = nonzero_bits (XEXP (x, 0), mode);
8601 if ((((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0)
8602 return (nonzero == 1 || nonzero == 0 ? bitwidth
8603 : bitwidth - floor_log2 (nonzero) - 1);
d0ab8cd3
RK
8604 }
8605
8606 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8607 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
d1405722
RK
8608 result = MAX (1, MIN (num0, num1) - 1);
8609
8610#ifdef POINTERS_EXTEND_UNSIGNED
8611 /* If pointers extend signed and this is an addition or subtraction
8612 to a pointer in Pmode, all the bits above ptr_mode are known to be
8613 sign bit copies. */
8614 if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
8615 && (code == PLUS || code == MINUS)
8616 && GET_CODE (XEXP (x, 0)) == REG && REG_POINTER (XEXP (x, 0)))
8617 result = MAX ((GET_MODE_BITSIZE (Pmode)
8618 - GET_MODE_BITSIZE (ptr_mode) + 1),
8619 result);
8620#endif
8621 return result;
663522cb 8622
d0ab8cd3
RK
8623 case MULT:
8624 /* The number of bits of the product is the sum of the number of
8625 bits of both terms. However, unless one of the terms if known
8626 to be positive, we must allow for an additional bit since negating
8627 a negative number can remove one sign bit copy. */
8628
8629 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8630 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
8631
8632 result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
8633 if (result > 0
70186b34
BS
8634 && (bitwidth > HOST_BITS_PER_WIDE_INT
8635 || (((nonzero_bits (XEXP (x, 0), mode)
8636 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
8637 && ((nonzero_bits (XEXP (x, 1), mode)
8638 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))))
d0ab8cd3
RK
8639 result--;
8640
8641 return MAX (1, result);
8642
8643 case UDIV:
70186b34
BS
8644 /* The result must be <= the first operand. If the first operand
8645 has the high bit set, we know nothing about the number of sign
8646 bit copies. */
8647 if (bitwidth > HOST_BITS_PER_WIDE_INT)
8648 return 1;
8649 else if ((nonzero_bits (XEXP (x, 0), mode)
8650 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
8651 return 1;
8652 else
8653 return num_sign_bit_copies (XEXP (x, 0), mode);
663522cb 8654
d0ab8cd3
RK
8655 case UMOD:
8656 /* The result must be <= the scond operand. */
8657 return num_sign_bit_copies (XEXP (x, 1), mode);
8658
8659 case DIV:
8660 /* Similar to unsigned division, except that we have to worry about
8661 the case where the divisor is negative, in which case we have
8662 to add 1. */
8663 result = num_sign_bit_copies (XEXP (x, 0), mode);
8664 if (result > 1
70186b34
BS
8665 && (bitwidth > HOST_BITS_PER_WIDE_INT
8666 || (nonzero_bits (XEXP (x, 1), mode)
8667 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
8668 result--;
d0ab8cd3
RK
8669
8670 return result;
8671
8672 case MOD:
8673 result = num_sign_bit_copies (XEXP (x, 1), mode);
8674 if (result > 1
70186b34
BS
8675 && (bitwidth > HOST_BITS_PER_WIDE_INT
8676 || (nonzero_bits (XEXP (x, 1), mode)
8677 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
8678 result--;
d0ab8cd3
RK
8679
8680 return result;
8681
8682 case ASHIFTRT:
8683 /* Shifts by a constant add to the number of bits equal to the
8684 sign bit. */
8685 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8686 if (GET_CODE (XEXP (x, 1)) == CONST_INT
8687 && INTVAL (XEXP (x, 1)) > 0)
8688 num0 = MIN (bitwidth, num0 + INTVAL (XEXP (x, 1)));
8689
8690 return num0;
8691
8692 case ASHIFT:
d0ab8cd3
RK
8693 /* Left shifts destroy copies. */
8694 if (GET_CODE (XEXP (x, 1)) != CONST_INT
8695 || INTVAL (XEXP (x, 1)) < 0
8696 || INTVAL (XEXP (x, 1)) >= bitwidth)
8697 return 1;
8698
8699 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8700 return MAX (1, num0 - INTVAL (XEXP (x, 1)));
8701
8702 case IF_THEN_ELSE:
8703 num0 = num_sign_bit_copies (XEXP (x, 1), mode);
8704 num1 = num_sign_bit_copies (XEXP (x, 2), mode);
8705 return MIN (num0, num1);
8706
d0ab8cd3 8707 case EQ: case NE: case GE: case GT: case LE: case LT:
69bc0a1f 8708 case UNEQ: case LTGT: case UNGE: case UNGT: case UNLE: case UNLT:
d0ab8cd3 8709 case GEU: case GTU: case LEU: case LTU:
69bc0a1f
JH
8710 case UNORDERED: case ORDERED:
8711 /* If the constant is negative, take its 1's complement and remask.
8712 Then see how many zero bits we have. */
8713 nonzero = STORE_FLAG_VALUE;
8714 if (bitwidth <= HOST_BITS_PER_WIDE_INT
8715 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
8716 nonzero = (~nonzero) & GET_MODE_MASK (mode);
8717
8718 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
e9a25f70 8719 break;
663522cb 8720
e9a25f70
JL
8721 default:
8722 break;
d0ab8cd3
RK
8723 }
8724
8725 /* If we haven't been able to figure it out by one of the above rules,
8726 see if some of the high-order bits are known to be zero. If so,
ac49a949
RS
8727 count those bits and return one less than that amount. If we can't
8728 safely compute the mask for this mode, always return BITWIDTH. */
8729
8730 if (bitwidth > HOST_BITS_PER_WIDE_INT)
6752e8d2 8731 return 1;
d0ab8cd3 8732
951553af 8733 nonzero = nonzero_bits (x, mode);
df6f4086 8734 return (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))
951553af 8735 ? 1 : bitwidth - floor_log2 (nonzero) - 1);
d0ab8cd3
RK
8736}
8737\f
1a26b032
RK
8738/* Return the number of "extended" bits there are in X, when interpreted
8739 as a quantity in MODE whose signedness is indicated by UNSIGNEDP. For
8740 unsigned quantities, this is the number of high-order zero bits.
8741 For signed quantities, this is the number of copies of the sign bit
8742 minus 1. In both case, this function returns the number of "spare"
8743 bits. For example, if two quantities for which this function returns
8744 at least 1 are added, the addition is known not to overflow.
8745
8746 This function will always return 0 unless called during combine, which
8747 implies that it must be called from a define_split. */
8748
770ae6cc 8749unsigned int
1a26b032
RK
8750extended_count (x, mode, unsignedp)
8751 rtx x;
8752 enum machine_mode mode;
8753 int unsignedp;
8754{
951553af 8755 if (nonzero_sign_valid == 0)
1a26b032
RK
8756 return 0;
8757
8758 return (unsignedp
ac49a949 8759 ? (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
770ae6cc
RK
8760 ? (GET_MODE_BITSIZE (mode) - 1
8761 - floor_log2 (nonzero_bits (x, mode)))
8762 : 0)
1a26b032
RK
8763 : num_sign_bit_copies (x, mode) - 1);
8764}
8765\f
230d793d
RS
8766/* This function is called from `simplify_shift_const' to merge two
8767 outer operations. Specifically, we have already found that we need
8768 to perform operation *POP0 with constant *PCONST0 at the outermost
8769 position. We would now like to also perform OP1 with constant CONST1
8770 (with *POP0 being done last).
8771
8772 Return 1 if we can do the operation and update *POP0 and *PCONST0 with
663522cb 8773 the resulting operation. *PCOMP_P is set to 1 if we would need to
230d793d
RS
8774 complement the innermost operand, otherwise it is unchanged.
8775
8776 MODE is the mode in which the operation will be done. No bits outside
8777 the width of this mode matter. It is assumed that the width of this mode
5f4f0e22 8778 is smaller than or equal to HOST_BITS_PER_WIDE_INT.
230d793d
RS
8779
8780 If *POP0 or OP1 are NIL, it means no operation is required. Only NEG, PLUS,
8781 IOR, XOR, and AND are supported. We may set *POP0 to SET if the proper
8782 result is simply *PCONST0.
8783
8784 If the resulting operation cannot be expressed as one operation, we
8785 return 0 and do not change *POP0, *PCONST0, and *PCOMP_P. */
8786
8787static int
8788merge_outer_ops (pop0, pconst0, op1, const1, mode, pcomp_p)
8789 enum rtx_code *pop0;
5f4f0e22 8790 HOST_WIDE_INT *pconst0;
230d793d 8791 enum rtx_code op1;
5f4f0e22 8792 HOST_WIDE_INT const1;
230d793d
RS
8793 enum machine_mode mode;
8794 int *pcomp_p;
8795{
8796 enum rtx_code op0 = *pop0;
5f4f0e22 8797 HOST_WIDE_INT const0 = *pconst0;
230d793d
RS
8798
8799 const0 &= GET_MODE_MASK (mode);
8800 const1 &= GET_MODE_MASK (mode);
8801
8802 /* If OP0 is an AND, clear unimportant bits in CONST1. */
8803 if (op0 == AND)
8804 const1 &= const0;
8805
8806 /* If OP0 or OP1 is NIL, this is easy. Similarly if they are the same or
8807 if OP0 is SET. */
8808
8809 if (op1 == NIL || op0 == SET)
8810 return 1;
8811
8812 else if (op0 == NIL)
8813 op0 = op1, const0 = const1;
8814
8815 else if (op0 == op1)
8816 {
8817 switch (op0)
8818 {
8819 case AND:
8820 const0 &= const1;
8821 break;
8822 case IOR:
8823 const0 |= const1;
8824 break;
8825 case XOR:
8826 const0 ^= const1;
8827 break;
8828 case PLUS:
8829 const0 += const1;
8830 break;
8831 case NEG:
8832 op0 = NIL;
8833 break;
e9a25f70
JL
8834 default:
8835 break;
230d793d
RS
8836 }
8837 }
8838
8839 /* Otherwise, if either is a PLUS or NEG, we can't do anything. */
8840 else if (op0 == PLUS || op1 == PLUS || op0 == NEG || op1 == NEG)
8841 return 0;
8842
8843 /* If the two constants aren't the same, we can't do anything. The
8844 remaining six cases can all be done. */
8845 else if (const0 != const1)
8846 return 0;
8847
8848 else
8849 switch (op0)
8850 {
8851 case IOR:
8852 if (op1 == AND)
8853 /* (a & b) | b == b */
8854 op0 = SET;
8855 else /* op1 == XOR */
8856 /* (a ^ b) | b == a | b */
b729186a 8857 {;}
230d793d
RS
8858 break;
8859
8860 case XOR:
8861 if (op1 == AND)
8862 /* (a & b) ^ b == (~a) & b */
8863 op0 = AND, *pcomp_p = 1;
8864 else /* op1 == IOR */
8865 /* (a | b) ^ b == a & ~b */
663522cb 8866 op0 = AND, *pconst0 = ~const0;
230d793d
RS
8867 break;
8868
8869 case AND:
8870 if (op1 == IOR)
8871 /* (a | b) & b == b */
8872 op0 = SET;
8873 else /* op1 == XOR */
8874 /* (a ^ b) & b) == (~a) & b */
8875 *pcomp_p = 1;
8876 break;
e9a25f70
JL
8877 default:
8878 break;
230d793d
RS
8879 }
8880
8881 /* Check for NO-OP cases. */
8882 const0 &= GET_MODE_MASK (mode);
8883 if (const0 == 0
8884 && (op0 == IOR || op0 == XOR || op0 == PLUS))
8885 op0 = NIL;
8886 else if (const0 == 0 && op0 == AND)
8887 op0 = SET;
e51712db
KG
8888 else if ((unsigned HOST_WIDE_INT) const0 == GET_MODE_MASK (mode)
8889 && op0 == AND)
230d793d
RS
8890 op0 = NIL;
8891
7e4ce834
RH
8892 /* ??? Slightly redundant with the above mask, but not entirely.
8893 Moving this above means we'd have to sign-extend the mode mask
8894 for the final test. */
8895 const0 = trunc_int_for_mode (const0, mode);
9fa6d012 8896
230d793d
RS
8897 *pop0 = op0;
8898 *pconst0 = const0;
8899
8900 return 1;
8901}
8902\f
8903/* Simplify a shift of VAROP by COUNT bits. CODE says what kind of shift.
8904 The result of the shift is RESULT_MODE. X, if non-zero, is an expression
8905 that we started with.
8906
8907 The shift is normally computed in the widest mode we find in VAROP, as
8908 long as it isn't a different number of words than RESULT_MODE. Exceptions
8909 are ASHIFTRT and ROTATE, which are always done in their original mode, */
8910
8911static rtx
770ae6cc 8912simplify_shift_const (x, code, result_mode, varop, input_count)
230d793d
RS
8913 rtx x;
8914 enum rtx_code code;
8915 enum machine_mode result_mode;
8916 rtx varop;
770ae6cc 8917 int input_count;
230d793d
RS
8918{
8919 enum rtx_code orig_code = code;
770ae6cc
RK
8920 int orig_count = input_count;
8921 unsigned int count;
8922 int signed_count;
230d793d
RS
8923 enum machine_mode mode = result_mode;
8924 enum machine_mode shift_mode, tmode;
770ae6cc 8925 unsigned int mode_words
230d793d
RS
8926 = (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
8927 /* We form (outer_op (code varop count) (outer_const)). */
8928 enum rtx_code outer_op = NIL;
c4e861e8 8929 HOST_WIDE_INT outer_const = 0;
230d793d
RS
8930 rtx const_rtx;
8931 int complement_p = 0;
8932 rtx new;
8933
8934 /* If we were given an invalid count, don't do anything except exactly
8935 what was requested. */
8936
770ae6cc 8937 if (input_count < 0 || input_count > (int) GET_MODE_BITSIZE (mode))
230d793d
RS
8938 {
8939 if (x)
8940 return x;
8941
770ae6cc 8942 return gen_rtx_fmt_ee (code, mode, varop, GEN_INT (input_count));
230d793d
RS
8943 }
8944
770ae6cc
RK
8945 count = input_count;
8946
853d8828
RH
8947 /* Make sure and truncate the "natural" shift on the way in. We don't
8948 want to do this inside the loop as it makes it more difficult to
8949 combine shifts. */
8950#ifdef SHIFT_COUNT_TRUNCATED
8951 if (SHIFT_COUNT_TRUNCATED)
8952 count %= GET_MODE_BITSIZE (mode);
8953#endif
8954
230d793d
RS
8955 /* Unless one of the branches of the `if' in this loop does a `continue',
8956 we will `break' the loop after the `if'. */
8957
8958 while (count != 0)
8959 {
8960 /* If we have an operand of (clobber (const_int 0)), just return that
8961 value. */
8962 if (GET_CODE (varop) == CLOBBER)
8963 return varop;
8964
8965 /* If we discovered we had to complement VAROP, leave. Making a NOT
8966 here would cause an infinite loop. */
8967 if (complement_p)
8968 break;
8969
abc95ed3 8970 /* Convert ROTATERT to ROTATE. */
230d793d
RS
8971 if (code == ROTATERT)
8972 code = ROTATE, count = GET_MODE_BITSIZE (result_mode) - count;
8973
230d793d 8974 /* We need to determine what mode we will do the shift in. If the
f6789c77
RK
8975 shift is a right shift or a ROTATE, we must always do it in the mode
8976 it was originally done in. Otherwise, we can do it in MODE, the
0f41302f 8977 widest mode encountered. */
f6789c77
RK
8978 shift_mode
8979 = (code == ASHIFTRT || code == LSHIFTRT || code == ROTATE
8980 ? result_mode : mode);
230d793d
RS
8981
8982 /* Handle cases where the count is greater than the size of the mode
853d8828
RH
8983 minus 1. For ASHIFT, use the size minus one as the count (this can
8984 occur when simplifying (lshiftrt (ashiftrt ..))). For rotates,
8985 take the count modulo the size. For other shifts, the result is
8986 zero.
230d793d
RS
8987
8988 Since these shifts are being produced by the compiler by combining
8989 multiple operations, each of which are defined, we know what the
8990 result is supposed to be. */
663522cb 8991
230d793d
RS
8992 if (count > GET_MODE_BITSIZE (shift_mode) - 1)
8993 {
8994 if (code == ASHIFTRT)
8995 count = GET_MODE_BITSIZE (shift_mode) - 1;
8996 else if (code == ROTATE || code == ROTATERT)
8997 count %= GET_MODE_BITSIZE (shift_mode);
8998 else
8999 {
9000 /* We can't simply return zero because there may be an
9001 outer op. */
9002 varop = const0_rtx;
9003 count = 0;
9004 break;
9005 }
9006 }
9007
312def2e
RK
9008 /* An arithmetic right shift of a quantity known to be -1 or 0
9009 is a no-op. */
9010 if (code == ASHIFTRT
9011 && (num_sign_bit_copies (varop, shift_mode)
9012 == GET_MODE_BITSIZE (shift_mode)))
d0ab8cd3 9013 {
312def2e
RK
9014 count = 0;
9015 break;
9016 }
d0ab8cd3 9017
312def2e
RK
9018 /* If we are doing an arithmetic right shift and discarding all but
9019 the sign bit copies, this is equivalent to doing a shift by the
9020 bitsize minus one. Convert it into that shift because it will often
9021 allow other simplifications. */
500c518b 9022
312def2e
RK
9023 if (code == ASHIFTRT
9024 && (count + num_sign_bit_copies (varop, shift_mode)
9025 >= GET_MODE_BITSIZE (shift_mode)))
9026 count = GET_MODE_BITSIZE (shift_mode) - 1;
500c518b 9027
230d793d
RS
9028 /* We simplify the tests below and elsewhere by converting
9029 ASHIFTRT to LSHIFTRT if we know the sign bit is clear.
9030 `make_compound_operation' will convert it to a ASHIFTRT for
9031 those machines (such as Vax) that don't have a LSHIFTRT. */
5f4f0e22 9032 if (GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT
230d793d 9033 && code == ASHIFTRT
951553af 9034 && ((nonzero_bits (varop, shift_mode)
5f4f0e22
CH
9035 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (shift_mode) - 1)))
9036 == 0))
230d793d
RS
9037 code = LSHIFTRT;
9038
9039 switch (GET_CODE (varop))
9040 {
9041 case SIGN_EXTEND:
9042 case ZERO_EXTEND:
9043 case SIGN_EXTRACT:
9044 case ZERO_EXTRACT:
9045 new = expand_compound_operation (varop);
9046 if (new != varop)
9047 {
9048 varop = new;
9049 continue;
9050 }
9051 break;
9052
9053 case MEM:
9054 /* If we have (xshiftrt (mem ...) C) and C is MODE_WIDTH
9055 minus the width of a smaller mode, we can do this with a
9056 SIGN_EXTEND or ZERO_EXTEND from the narrower memory location. */
9057 if ((code == ASHIFTRT || code == LSHIFTRT)
9058 && ! mode_dependent_address_p (XEXP (varop, 0))
9059 && ! MEM_VOLATILE_P (varop)
9060 && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
9061 MODE_INT, 1)) != BLKmode)
9062 {
f76b9db2 9063 if (BYTES_BIG_ENDIAN)
38a448ca 9064 new = gen_rtx_MEM (tmode, XEXP (varop, 0));
f76b9db2 9065 else
38a448ca
RH
9066 new = gen_rtx_MEM (tmode,
9067 plus_constant (XEXP (varop, 0),
9068 count / BITS_PER_UNIT));
bf49b139 9069
c6df88cb 9070 MEM_COPY_ATTRIBUTES (new, varop);
230d793d
RS
9071 varop = gen_rtx_combine (code == ASHIFTRT ? SIGN_EXTEND
9072 : ZERO_EXTEND, mode, new);
9073 count = 0;
9074 continue;
9075 }
9076 break;
9077
9078 case USE:
9079 /* Similar to the case above, except that we can only do this if
9080 the resulting mode is the same as that of the underlying
9081 MEM and adjust the address depending on the *bits* endianness
9082 because of the way that bit-field extract insns are defined. */
9083 if ((code == ASHIFTRT || code == LSHIFTRT)
9084 && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
9085 MODE_INT, 1)) != BLKmode
9086 && tmode == GET_MODE (XEXP (varop, 0)))
9087 {
f76b9db2
ILT
9088 if (BITS_BIG_ENDIAN)
9089 new = XEXP (varop, 0);
9090 else
9091 {
9092 new = copy_rtx (XEXP (varop, 0));
663522cb 9093 SUBST (XEXP (new, 0),
f76b9db2
ILT
9094 plus_constant (XEXP (new, 0),
9095 count / BITS_PER_UNIT));
9096 }
230d793d
RS
9097
9098 varop = gen_rtx_combine (code == ASHIFTRT ? SIGN_EXTEND
9099 : ZERO_EXTEND, mode, new);
9100 count = 0;
9101 continue;
9102 }
9103 break;
9104
9105 case SUBREG:
9106 /* If VAROP is a SUBREG, strip it as long as the inner operand has
9107 the same number of words as what we've seen so far. Then store
9108 the widest mode in MODE. */
f9e67232
RS
9109 if (subreg_lowpart_p (varop)
9110 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
9111 > GET_MODE_SIZE (GET_MODE (varop)))
230d793d
RS
9112 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
9113 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
9114 == mode_words))
9115 {
9116 varop = SUBREG_REG (varop);
9117 if (GET_MODE_SIZE (GET_MODE (varop)) > GET_MODE_SIZE (mode))
9118 mode = GET_MODE (varop);
9119 continue;
9120 }
9121 break;
9122
9123 case MULT:
9124 /* Some machines use MULT instead of ASHIFT because MULT
9125 is cheaper. But it is still better on those machines to
9126 merge two shifts into one. */
9127 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
9128 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
9129 {
770ae6cc
RK
9130 varop
9131 = gen_binary (ASHIFT, GET_MODE (varop), XEXP (varop, 0),
9132 GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));
230d793d
RS
9133 continue;
9134 }
9135 break;
9136
9137 case UDIV:
9138 /* Similar, for when divides are cheaper. */
9139 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
9140 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
9141 {
770ae6cc
RK
9142 varop
9143 = gen_binary (LSHIFTRT, GET_MODE (varop), XEXP (varop, 0),
9144 GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));
230d793d
RS
9145 continue;
9146 }
9147 break;
9148
9149 case ASHIFTRT:
8f8d8d6e
AO
9150 /* If we are extracting just the sign bit of an arithmetic
9151 right shift, that shift is not needed. However, the sign
9152 bit of a wider mode may be different from what would be
9153 interpreted as the sign bit in a narrower mode, so, if
9154 the result is narrower, don't discard the shift. */
9155 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1
9156 && (GET_MODE_BITSIZE (result_mode)
9157 >= GET_MODE_BITSIZE (GET_MODE (varop))))
230d793d
RS
9158 {
9159 varop = XEXP (varop, 0);
9160 continue;
9161 }
9162
0f41302f 9163 /* ... fall through ... */
230d793d
RS
9164
9165 case LSHIFTRT:
9166 case ASHIFT:
230d793d
RS
9167 case ROTATE:
9168 /* Here we have two nested shifts. The result is usually the
9169 AND of a new shift with a mask. We compute the result below. */
9170 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
9171 && INTVAL (XEXP (varop, 1)) >= 0
9172 && INTVAL (XEXP (varop, 1)) < GET_MODE_BITSIZE (GET_MODE (varop))
5f4f0e22
CH
9173 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
9174 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
230d793d
RS
9175 {
9176 enum rtx_code first_code = GET_CODE (varop);
770ae6cc 9177 unsigned int first_count = INTVAL (XEXP (varop, 1));
5f4f0e22 9178 unsigned HOST_WIDE_INT mask;
230d793d 9179 rtx mask_rtx;
230d793d 9180
230d793d
RS
9181 /* We have one common special case. We can't do any merging if
9182 the inner code is an ASHIFTRT of a smaller mode. However, if
9183 we have (ashift:M1 (subreg:M1 (ashiftrt:M2 FOO C1) 0) C2)
9184 with C2 == GET_MODE_BITSIZE (M1) - GET_MODE_BITSIZE (M2),
9185 we can convert it to
9186 (ashiftrt:M1 (ashift:M1 (and:M1 (subreg:M1 FOO 0 C2) C3) C1).
9187 This simplifies certain SIGN_EXTEND operations. */
9188 if (code == ASHIFT && first_code == ASHIFTRT
9189 && (GET_MODE_BITSIZE (result_mode)
9190 - GET_MODE_BITSIZE (GET_MODE (varop))) == count)
9191 {
9192 /* C3 has the low-order C1 bits zero. */
663522cb 9193
5f4f0e22 9194 mask = (GET_MODE_MASK (mode)
663522cb 9195 & ~(((HOST_WIDE_INT) 1 << first_count) - 1));
230d793d 9196
5f4f0e22 9197 varop = simplify_and_const_int (NULL_RTX, result_mode,
230d793d 9198 XEXP (varop, 0), mask);
5f4f0e22 9199 varop = simplify_shift_const (NULL_RTX, ASHIFT, result_mode,
230d793d
RS
9200 varop, count);
9201 count = first_count;
9202 code = ASHIFTRT;
9203 continue;
9204 }
663522cb 9205
d0ab8cd3
RK
9206 /* If this was (ashiftrt (ashift foo C1) C2) and FOO has more
9207 than C1 high-order bits equal to the sign bit, we can convert
9208 this to either an ASHIFT or a ASHIFTRT depending on the
663522cb 9209 two counts.
230d793d
RS
9210
9211 We cannot do this if VAROP's mode is not SHIFT_MODE. */
9212
9213 if (code == ASHIFTRT && first_code == ASHIFT
9214 && GET_MODE (varop) == shift_mode
d0ab8cd3
RK
9215 && (num_sign_bit_copies (XEXP (varop, 0), shift_mode)
9216 > first_count))
230d793d 9217 {
d0ab8cd3 9218 varop = XEXP (varop, 0);
770ae6cc
RK
9219
9220 signed_count = count - first_count;
9221 if (signed_count < 0)
663522cb 9222 count = -signed_count, code = ASHIFT;
770ae6cc
RK
9223 else
9224 count = signed_count;
9225
d0ab8cd3 9226 continue;
230d793d
RS
9227 }
9228
9229 /* There are some cases we can't do. If CODE is ASHIFTRT,
9230 we can only do this if FIRST_CODE is also ASHIFTRT.
9231
9232 We can't do the case when CODE is ROTATE and FIRST_CODE is
9233 ASHIFTRT.
9234
9235 If the mode of this shift is not the mode of the outer shift,
bdaae9a0 9236 we can't do this if either shift is a right shift or ROTATE.
230d793d
RS
9237
9238 Finally, we can't do any of these if the mode is too wide
9239 unless the codes are the same.
9240
9241 Handle the case where the shift codes are the same
9242 first. */
9243
9244 if (code == first_code)
9245 {
9246 if (GET_MODE (varop) != result_mode
bdaae9a0
RK
9247 && (code == ASHIFTRT || code == LSHIFTRT
9248 || code == ROTATE))
230d793d
RS
9249 break;
9250
9251 count += first_count;
9252 varop = XEXP (varop, 0);
9253 continue;
9254 }
9255
9256 if (code == ASHIFTRT
9257 || (code == ROTATE && first_code == ASHIFTRT)
5f4f0e22 9258 || GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT
230d793d 9259 || (GET_MODE (varop) != result_mode
bdaae9a0
RK
9260 && (first_code == ASHIFTRT || first_code == LSHIFTRT
9261 || first_code == ROTATE
230d793d
RS
9262 || code == ROTATE)))
9263 break;
9264
9265 /* To compute the mask to apply after the shift, shift the
663522cb 9266 nonzero bits of the inner shift the same way the
230d793d
RS
9267 outer shift will. */
9268
951553af 9269 mask_rtx = GEN_INT (nonzero_bits (varop, GET_MODE (varop)));
230d793d
RS
9270
9271 mask_rtx
9272 = simplify_binary_operation (code, result_mode, mask_rtx,
5f4f0e22 9273 GEN_INT (count));
663522cb 9274
230d793d
RS
9275 /* Give up if we can't compute an outer operation to use. */
9276 if (mask_rtx == 0
9277 || GET_CODE (mask_rtx) != CONST_INT
9278 || ! merge_outer_ops (&outer_op, &outer_const, AND,
9279 INTVAL (mask_rtx),
9280 result_mode, &complement_p))
9281 break;
9282
9283 /* If the shifts are in the same direction, we add the
9284 counts. Otherwise, we subtract them. */
770ae6cc 9285 signed_count = count;
230d793d
RS
9286 if ((code == ASHIFTRT || code == LSHIFTRT)
9287 == (first_code == ASHIFTRT || first_code == LSHIFTRT))
770ae6cc 9288 signed_count += first_count;
230d793d 9289 else
770ae6cc 9290 signed_count -= first_count;
230d793d 9291
663522cb 9292 /* If COUNT is positive, the new shift is usually CODE,
230d793d
RS
9293 except for the two exceptions below, in which case it is
9294 FIRST_CODE. If the count is negative, FIRST_CODE should
9295 always be used */
770ae6cc 9296 if (signed_count > 0
230d793d
RS
9297 && ((first_code == ROTATE && code == ASHIFT)
9298 || (first_code == ASHIFTRT && code == LSHIFTRT)))
770ae6cc
RK
9299 code = first_code, count = signed_count;
9300 else if (signed_count < 0)
663522cb 9301 code = first_code, count = -signed_count;
770ae6cc
RK
9302 else
9303 count = signed_count;
230d793d
RS
9304
9305 varop = XEXP (varop, 0);
9306 continue;
9307 }
9308
9309 /* If we have (A << B << C) for any shift, we can convert this to
9310 (A << C << B). This wins if A is a constant. Only try this if
9311 B is not a constant. */
9312
9313 else if (GET_CODE (varop) == code
9314 && GET_CODE (XEXP (varop, 1)) != CONST_INT
9315 && 0 != (new
9316 = simplify_binary_operation (code, mode,
9317 XEXP (varop, 0),
5f4f0e22 9318 GEN_INT (count))))
230d793d
RS
9319 {
9320 varop = gen_rtx_combine (code, mode, new, XEXP (varop, 1));
9321 count = 0;
9322 continue;
9323 }
9324 break;
9325
9326 case NOT:
9327 /* Make this fit the case below. */
9328 varop = gen_rtx_combine (XOR, mode, XEXP (varop, 0),
5f4f0e22 9329 GEN_INT (GET_MODE_MASK (mode)));
230d793d
RS
9330 continue;
9331
9332 case IOR:
9333 case AND:
9334 case XOR:
9335 /* If we have (xshiftrt (ior (plus X (const_int -1)) X) C)
9336 with C the size of VAROP - 1 and the shift is logical if
9337 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
9338 we have an (le X 0) operation. If we have an arithmetic shift
9339 and STORE_FLAG_VALUE is 1 or we have a logical shift with
9340 STORE_FLAG_VALUE of -1, we have a (neg (le X 0)) operation. */
9341
9342 if (GET_CODE (varop) == IOR && GET_CODE (XEXP (varop, 0)) == PLUS
9343 && XEXP (XEXP (varop, 0), 1) == constm1_rtx
9344 && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
9345 && (code == LSHIFTRT || code == ASHIFTRT)
9346 && count == GET_MODE_BITSIZE (GET_MODE (varop)) - 1
9347 && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
9348 {
9349 count = 0;
9350 varop = gen_rtx_combine (LE, GET_MODE (varop), XEXP (varop, 1),
9351 const0_rtx);
9352
9353 if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
9354 varop = gen_rtx_combine (NEG, GET_MODE (varop), varop);
9355
9356 continue;
9357 }
9358
9359 /* If we have (shift (logical)), move the logical to the outside
9360 to allow it to possibly combine with another logical and the
9361 shift to combine with another shift. This also canonicalizes to
9362 what a ZERO_EXTRACT looks like. Also, some machines have
9363 (and (shift)) insns. */
9364
9365 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
9366 && (new = simplify_binary_operation (code, result_mode,
9367 XEXP (varop, 1),
5f4f0e22 9368 GEN_INT (count))) != 0
663522cb 9369 && GET_CODE (new) == CONST_INT
230d793d
RS
9370 && merge_outer_ops (&outer_op, &outer_const, GET_CODE (varop),
9371 INTVAL (new), result_mode, &complement_p))
9372 {
9373 varop = XEXP (varop, 0);
9374 continue;
9375 }
9376
9377 /* If we can't do that, try to simplify the shift in each arm of the
9378 logical expression, make a new logical expression, and apply
9379 the inverse distributive law. */
9380 {
00d4ca1c 9381 rtx lhs = simplify_shift_const (NULL_RTX, code, shift_mode,
230d793d 9382 XEXP (varop, 0), count);
00d4ca1c 9383 rtx rhs = simplify_shift_const (NULL_RTX, code, shift_mode,
230d793d
RS
9384 XEXP (varop, 1), count);
9385
21a64bf1 9386 varop = gen_binary (GET_CODE (varop), shift_mode, lhs, rhs);
230d793d
RS
9387 varop = apply_distributive_law (varop);
9388
9389 count = 0;
9390 }
9391 break;
9392
9393 case EQ:
45620ed4 9394 /* convert (lshiftrt (eq FOO 0) C) to (xor FOO 1) if STORE_FLAG_VALUE
230d793d 9395 says that the sign bit can be tested, FOO has mode MODE, C is
45620ed4
RK
9396 GET_MODE_BITSIZE (MODE) - 1, and FOO has only its low-order bit
9397 that may be nonzero. */
9398 if (code == LSHIFTRT
230d793d
RS
9399 && XEXP (varop, 1) == const0_rtx
9400 && GET_MODE (XEXP (varop, 0)) == result_mode
9401 && count == GET_MODE_BITSIZE (result_mode) - 1
5f4f0e22 9402 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
230d793d 9403 && ((STORE_FLAG_VALUE
663522cb 9404 & ((HOST_WIDE_INT) 1
770ae6cc 9405 < (GET_MODE_BITSIZE (result_mode) - 1))))
951553af 9406 && nonzero_bits (XEXP (varop, 0), result_mode) == 1
5f4f0e22
CH
9407 && merge_outer_ops (&outer_op, &outer_const, XOR,
9408 (HOST_WIDE_INT) 1, result_mode,
9409 &complement_p))
230d793d
RS
9410 {
9411 varop = XEXP (varop, 0);
9412 count = 0;
9413 continue;
9414 }
9415 break;
9416
9417 case NEG:
d0ab8cd3
RK
9418 /* (lshiftrt (neg A) C) where A is either 0 or 1 and C is one less
9419 than the number of bits in the mode is equivalent to A. */
9420 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1
951553af 9421 && nonzero_bits (XEXP (varop, 0), result_mode) == 1)
230d793d 9422 {
d0ab8cd3 9423 varop = XEXP (varop, 0);
230d793d
RS
9424 count = 0;
9425 continue;
9426 }
9427
9428 /* NEG commutes with ASHIFT since it is multiplication. Move the
9429 NEG outside to allow shifts to combine. */
9430 if (code == ASHIFT
5f4f0e22
CH
9431 && merge_outer_ops (&outer_op, &outer_const, NEG,
9432 (HOST_WIDE_INT) 0, result_mode,
9433 &complement_p))
230d793d
RS
9434 {
9435 varop = XEXP (varop, 0);
9436 continue;
9437 }
9438 break;
9439
9440 case PLUS:
d0ab8cd3
RK
9441 /* (lshiftrt (plus A -1) C) where A is either 0 or 1 and C
9442 is one less than the number of bits in the mode is
9443 equivalent to (xor A 1). */
230d793d
RS
9444 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1
9445 && XEXP (varop, 1) == constm1_rtx
951553af 9446 && nonzero_bits (XEXP (varop, 0), result_mode) == 1
5f4f0e22
CH
9447 && merge_outer_ops (&outer_op, &outer_const, XOR,
9448 (HOST_WIDE_INT) 1, result_mode,
9449 &complement_p))
230d793d
RS
9450 {
9451 count = 0;
9452 varop = XEXP (varop, 0);
9453 continue;
9454 }
9455
3f508eca 9456 /* If we have (xshiftrt (plus FOO BAR) C), and the only bits
951553af 9457 that might be nonzero in BAR are those being shifted out and those
3f508eca
RK
9458 bits are known zero in FOO, we can replace the PLUS with FOO.
9459 Similarly in the other operand order. This code occurs when
9460 we are computing the size of a variable-size array. */
9461
9462 if ((code == ASHIFTRT || code == LSHIFTRT)
5f4f0e22 9463 && count < HOST_BITS_PER_WIDE_INT
951553af
RK
9464 && nonzero_bits (XEXP (varop, 1), result_mode) >> count == 0
9465 && (nonzero_bits (XEXP (varop, 1), result_mode)
9466 & nonzero_bits (XEXP (varop, 0), result_mode)) == 0)
3f508eca
RK
9467 {
9468 varop = XEXP (varop, 0);
9469 continue;
9470 }
9471 else if ((code == ASHIFTRT || code == LSHIFTRT)
5f4f0e22 9472 && count < HOST_BITS_PER_WIDE_INT
ac49a949 9473 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
951553af 9474 && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
3f508eca 9475 >> count)
951553af
RK
9476 && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
9477 & nonzero_bits (XEXP (varop, 1),
3f508eca
RK
9478 result_mode)))
9479 {
9480 varop = XEXP (varop, 1);
9481 continue;
9482 }
9483
230d793d
RS
9484 /* (ashift (plus foo C) N) is (plus (ashift foo N) C'). */
9485 if (code == ASHIFT
9486 && GET_CODE (XEXP (varop, 1)) == CONST_INT
9487 && (new = simplify_binary_operation (ASHIFT, result_mode,
9488 XEXP (varop, 1),
5f4f0e22 9489 GEN_INT (count))) != 0
770ae6cc 9490 && GET_CODE (new) == CONST_INT
230d793d
RS
9491 && merge_outer_ops (&outer_op, &outer_const, PLUS,
9492 INTVAL (new), result_mode, &complement_p))
9493 {
9494 varop = XEXP (varop, 0);
9495 continue;
9496 }
9497 break;
9498
9499 case MINUS:
9500 /* If we have (xshiftrt (minus (ashiftrt X C)) X) C)
9501 with C the size of VAROP - 1 and the shift is logical if
9502 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
9503 we have a (gt X 0) operation. If the shift is arithmetic with
9504 STORE_FLAG_VALUE of 1 or logical with STORE_FLAG_VALUE == -1,
9505 we have a (neg (gt X 0)) operation. */
9506
0802d516
RK
9507 if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
9508 && GET_CODE (XEXP (varop, 0)) == ASHIFTRT
230d793d 9509 && count == GET_MODE_BITSIZE (GET_MODE (varop)) - 1
230d793d
RS
9510 && (code == LSHIFTRT || code == ASHIFTRT)
9511 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
9512 && INTVAL (XEXP (XEXP (varop, 0), 1)) == count
9513 && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
9514 {
9515 count = 0;
9516 varop = gen_rtx_combine (GT, GET_MODE (varop), XEXP (varop, 1),
9517 const0_rtx);
9518
9519 if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
9520 varop = gen_rtx_combine (NEG, GET_MODE (varop), varop);
9521
9522 continue;
9523 }
9524 break;
6e0ef100
JC
9525
9526 case TRUNCATE:
9527 /* Change (lshiftrt (truncate (lshiftrt))) to (truncate (lshiftrt))
9528 if the truncate does not affect the value. */
9529 if (code == LSHIFTRT
9530 && GET_CODE (XEXP (varop, 0)) == LSHIFTRT
9531 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
9532 && (INTVAL (XEXP (XEXP (varop, 0), 1))
b577a8ff
JL
9533 >= (GET_MODE_BITSIZE (GET_MODE (XEXP (varop, 0)))
9534 - GET_MODE_BITSIZE (GET_MODE (varop)))))
6e0ef100
JC
9535 {
9536 rtx varop_inner = XEXP (varop, 0);
9537
770ae6cc
RK
9538 varop_inner
9539 = gen_rtx_combine (LSHIFTRT, GET_MODE (varop_inner),
9540 XEXP (varop_inner, 0),
9541 GEN_INT (count
9542 + INTVAL (XEXP (varop_inner, 1))));
6e0ef100
JC
9543 varop = gen_rtx_combine (TRUNCATE, GET_MODE (varop),
9544 varop_inner);
9545 count = 0;
9546 continue;
9547 }
9548 break;
663522cb 9549
e9a25f70
JL
9550 default:
9551 break;
230d793d
RS
9552 }
9553
9554 break;
9555 }
9556
9557 /* We need to determine what mode to do the shift in. If the shift is
f6789c77
RK
9558 a right shift or ROTATE, we must always do it in the mode it was
9559 originally done in. Otherwise, we can do it in MODE, the widest mode
9560 encountered. The code we care about is that of the shift that will
9561 actually be done, not the shift that was originally requested. */
9562 shift_mode
9563 = (code == ASHIFTRT || code == LSHIFTRT || code == ROTATE
9564 ? result_mode : mode);
230d793d
RS
9565
9566 /* We have now finished analyzing the shift. The result should be
9567 a shift of type CODE with SHIFT_MODE shifting VAROP COUNT places. If
9568 OUTER_OP is non-NIL, it is an operation that needs to be applied
9569 to the result of the shift. OUTER_CONST is the relevant constant,
9570 but we must turn off all bits turned off in the shift.
9571
9572 If we were passed a value for X, see if we can use any pieces of
9573 it. If not, make new rtx. */
9574
9575 if (x && GET_RTX_CLASS (GET_CODE (x)) == '2'
9576 && GET_CODE (XEXP (x, 1)) == CONST_INT
9577 && INTVAL (XEXP (x, 1)) == count)
9578 const_rtx = XEXP (x, 1);
9579 else
5f4f0e22 9580 const_rtx = GEN_INT (count);
230d793d
RS
9581
9582 if (x && GET_CODE (XEXP (x, 0)) == SUBREG
9583 && GET_MODE (XEXP (x, 0)) == shift_mode
9584 && SUBREG_REG (XEXP (x, 0)) == varop)
9585 varop = XEXP (x, 0);
9586 else if (GET_MODE (varop) != shift_mode)
9587 varop = gen_lowpart_for_combine (shift_mode, varop);
9588
0f41302f 9589 /* If we can't make the SUBREG, try to return what we were given. */
230d793d
RS
9590 if (GET_CODE (varop) == CLOBBER)
9591 return x ? x : varop;
9592
9593 new = simplify_binary_operation (code, shift_mode, varop, const_rtx);
9594 if (new != 0)
9595 x = new;
9596 else
9597 {
9598 if (x == 0 || GET_CODE (x) != code || GET_MODE (x) != shift_mode)
9599 x = gen_rtx_combine (code, shift_mode, varop, const_rtx);
9600
9601 SUBST (XEXP (x, 0), varop);
9602 SUBST (XEXP (x, 1), const_rtx);
9603 }
9604
224eeff2
RK
9605 /* If we have an outer operation and we just made a shift, it is
9606 possible that we could have simplified the shift were it not
9607 for the outer operation. So try to do the simplification
9608 recursively. */
9609
9610 if (outer_op != NIL && GET_CODE (x) == code
9611 && GET_CODE (XEXP (x, 1)) == CONST_INT)
9612 x = simplify_shift_const (x, code, shift_mode, XEXP (x, 0),
9613 INTVAL (XEXP (x, 1)));
9614
230d793d
RS
9615 /* If we were doing a LSHIFTRT in a wider mode than it was originally,
9616 turn off all the bits that the shift would have turned off. */
9617 if (orig_code == LSHIFTRT && result_mode != shift_mode)
5f4f0e22 9618 x = simplify_and_const_int (NULL_RTX, shift_mode, x,
230d793d 9619 GET_MODE_MASK (result_mode) >> orig_count);
663522cb 9620
230d793d
RS
9621 /* Do the remainder of the processing in RESULT_MODE. */
9622 x = gen_lowpart_for_combine (result_mode, x);
9623
9624 /* If COMPLEMENT_P is set, we have to complement X before doing the outer
9625 operation. */
9626 if (complement_p)
0c1c8ea6 9627 x = gen_unary (NOT, result_mode, result_mode, x);
230d793d
RS
9628
9629 if (outer_op != NIL)
9630 {
5f4f0e22 9631 if (GET_MODE_BITSIZE (result_mode) < HOST_BITS_PER_WIDE_INT)
7e4ce834 9632 outer_const = trunc_int_for_mode (outer_const, result_mode);
230d793d
RS
9633
9634 if (outer_op == AND)
5f4f0e22 9635 x = simplify_and_const_int (NULL_RTX, result_mode, x, outer_const);
230d793d
RS
9636 else if (outer_op == SET)
9637 /* This means that we have determined that the result is
9638 equivalent to a constant. This should be rare. */
5f4f0e22 9639 x = GEN_INT (outer_const);
230d793d 9640 else if (GET_RTX_CLASS (outer_op) == '1')
0c1c8ea6 9641 x = gen_unary (outer_op, result_mode, result_mode, x);
230d793d 9642 else
5f4f0e22 9643 x = gen_binary (outer_op, result_mode, x, GEN_INT (outer_const));
230d793d
RS
9644 }
9645
9646 return x;
663522cb 9647}
230d793d
RS
9648\f
9649/* Like recog, but we receive the address of a pointer to a new pattern.
9650 We try to match the rtx that the pointer points to.
9651 If that fails, we may try to modify or replace the pattern,
9652 storing the replacement into the same pointer object.
9653
9654 Modifications include deletion or addition of CLOBBERs.
9655
9656 PNOTES is a pointer to a location where any REG_UNUSED notes added for
9657 the CLOBBERs are placed.
9658
9659 The value is the final insn code from the pattern ultimately matched,
9660 or -1. */
9661
9662static int
8e2f6e35 9663recog_for_combine (pnewpat, insn, pnotes)
230d793d
RS
9664 rtx *pnewpat;
9665 rtx insn;
9666 rtx *pnotes;
9667{
9668 register rtx pat = *pnewpat;
9669 int insn_code_number;
9670 int num_clobbers_to_add = 0;
9671 int i;
9672 rtx notes = 0;
c1194d74 9673 rtx old_notes;
230d793d 9674
974f4146
RK
9675 /* If PAT is a PARALLEL, check to see if it contains the CLOBBER
9676 we use to indicate that something didn't match. If we find such a
9677 thing, force rejection. */
d96023cf 9678 if (GET_CODE (pat) == PARALLEL)
974f4146 9679 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
d96023cf
RK
9680 if (GET_CODE (XVECEXP (pat, 0, i)) == CLOBBER
9681 && XEXP (XVECEXP (pat, 0, i), 0) == const0_rtx)
974f4146
RK
9682 return -1;
9683
c1194d74
JW
9684 /* Remove the old notes prior to trying to recognize the new pattern. */
9685 old_notes = REG_NOTES (insn);
9686 REG_NOTES (insn) = 0;
9687
230d793d
RS
9688 /* Is the result of combination a valid instruction? */
9689 insn_code_number = recog (pat, insn, &num_clobbers_to_add);
9690
9691 /* If it isn't, there is the possibility that we previously had an insn
9692 that clobbered some register as a side effect, but the combined
9693 insn doesn't need to do that. So try once more without the clobbers
9694 unless this represents an ASM insn. */
9695
9696 if (insn_code_number < 0 && ! check_asm_operands (pat)
9697 && GET_CODE (pat) == PARALLEL)
9698 {
9699 int pos;
9700
9701 for (pos = 0, i = 0; i < XVECLEN (pat, 0); i++)
9702 if (GET_CODE (XVECEXP (pat, 0, i)) != CLOBBER)
9703 {
9704 if (i != pos)
9705 SUBST (XVECEXP (pat, 0, pos), XVECEXP (pat, 0, i));
9706 pos++;
9707 }
9708
9709 SUBST_INT (XVECLEN (pat, 0), pos);
9710
9711 if (pos == 1)
9712 pat = XVECEXP (pat, 0, 0);
9713
9714 insn_code_number = recog (pat, insn, &num_clobbers_to_add);
9715 }
9716
c1194d74
JW
9717 REG_NOTES (insn) = old_notes;
9718
230d793d
RS
9719 /* If we had any clobbers to add, make a new pattern than contains
9720 them. Then check to make sure that all of them are dead. */
9721 if (num_clobbers_to_add)
9722 {
38a448ca 9723 rtx newpat = gen_rtx_PARALLEL (VOIDmode,
bf103ec2
R
9724 rtvec_alloc (GET_CODE (pat) == PARALLEL
9725 ? (XVECLEN (pat, 0)
9726 + num_clobbers_to_add)
9727 : num_clobbers_to_add + 1));
230d793d
RS
9728
9729 if (GET_CODE (pat) == PARALLEL)
9730 for (i = 0; i < XVECLEN (pat, 0); i++)
9731 XVECEXP (newpat, 0, i) = XVECEXP (pat, 0, i);
9732 else
9733 XVECEXP (newpat, 0, 0) = pat;
9734
9735 add_clobbers (newpat, insn_code_number);
9736
9737 for (i = XVECLEN (newpat, 0) - num_clobbers_to_add;
9738 i < XVECLEN (newpat, 0); i++)
9739 {
9740 if (GET_CODE (XEXP (XVECEXP (newpat, 0, i), 0)) == REG
9741 && ! reg_dead_at_p (XEXP (XVECEXP (newpat, 0, i), 0), insn))
9742 return -1;
38a448ca
RH
9743 notes = gen_rtx_EXPR_LIST (REG_UNUSED,
9744 XEXP (XVECEXP (newpat, 0, i), 0), notes);
230d793d
RS
9745 }
9746 pat = newpat;
9747 }
9748
9749 *pnewpat = pat;
9750 *pnotes = notes;
9751
9752 return insn_code_number;
9753}
9754\f
9755/* Like gen_lowpart but for use by combine. In combine it is not possible
9756 to create any new pseudoregs. However, it is safe to create
9757 invalid memory addresses, because combine will try to recognize
9758 them and all they will do is make the combine attempt fail.
9759
9760 If for some reason this cannot do its job, an rtx
9761 (clobber (const_int 0)) is returned.
9762 An insn containing that will not be recognized. */
9763
9764#undef gen_lowpart
9765
9766static rtx
9767gen_lowpart_for_combine (mode, x)
9768 enum machine_mode mode;
9769 register rtx x;
9770{
9771 rtx result;
9772
9773 if (GET_MODE (x) == mode)
9774 return x;
9775
eae957a8
RK
9776 /* We can only support MODE being wider than a word if X is a
9777 constant integer or has a mode the same size. */
9778
9779 if (GET_MODE_SIZE (mode) > UNITS_PER_WORD
9780 && ! ((GET_MODE (x) == VOIDmode
9781 && (GET_CODE (x) == CONST_INT
9782 || GET_CODE (x) == CONST_DOUBLE))
9783 || GET_MODE_SIZE (GET_MODE (x)) == GET_MODE_SIZE (mode)))
38a448ca 9784 return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
230d793d
RS
9785
9786 /* X might be a paradoxical (subreg (mem)). In that case, gen_lowpart
9787 won't know what to do. So we will strip off the SUBREG here and
9788 process normally. */
9789 if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
9790 {
9791 x = SUBREG_REG (x);
9792 if (GET_MODE (x) == mode)
9793 return x;
9794 }
9795
9796 result = gen_lowpart_common (mode, x);
02188693 9797#ifdef CLASS_CANNOT_CHANGE_MODE
64bf47a2
RK
9798 if (result != 0
9799 && GET_CODE (result) == SUBREG
9800 && GET_CODE (SUBREG_REG (result)) == REG
9801 && REGNO (SUBREG_REG (result)) >= FIRST_PSEUDO_REGISTER
02188693
RH
9802 && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (result),
9803 GET_MODE (SUBREG_REG (result))))
9804 REG_CHANGES_MODE (REGNO (SUBREG_REG (result))) = 1;
9805#endif
64bf47a2 9806
230d793d
RS
9807 if (result)
9808 return result;
9809
9810 if (GET_CODE (x) == MEM)
9811 {
9812 register int offset = 0;
9813 rtx new;
9814
9815 /* Refuse to work on a volatile memory ref or one with a mode-dependent
9816 address. */
9817 if (MEM_VOLATILE_P (x) || mode_dependent_address_p (XEXP (x, 0)))
38a448ca 9818 return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
230d793d
RS
9819
9820 /* If we want to refer to something bigger than the original memref,
9821 generate a perverse subreg instead. That will force a reload
9822 of the original memref X. */
9823 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode))
38a448ca 9824 return gen_rtx_SUBREG (mode, x, 0);
230d793d 9825
f76b9db2
ILT
9826 if (WORDS_BIG_ENDIAN)
9827 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
9828 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
c5c76735 9829
f76b9db2
ILT
9830 if (BYTES_BIG_ENDIAN)
9831 {
9832 /* Adjust the address so that the address-after-the-data is
9833 unchanged. */
9834 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
9835 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
9836 }
38a448ca 9837 new = gen_rtx_MEM (mode, plus_constant (XEXP (x, 0), offset));
c6df88cb 9838 MEM_COPY_ATTRIBUTES (new, x);
230d793d
RS
9839 return new;
9840 }
9841
9842 /* If X is a comparison operator, rewrite it in a new mode. This
9843 probably won't match, but may allow further simplifications. */
9844 else if (GET_RTX_CLASS (GET_CODE (x)) == '<')
9845 return gen_rtx_combine (GET_CODE (x), mode, XEXP (x, 0), XEXP (x, 1));
9846
9847 /* If we couldn't simplify X any other way, just enclose it in a
9848 SUBREG. Normally, this SUBREG won't match, but some patterns may
a7c99304 9849 include an explicit SUBREG or we may simplify it further in combine. */
230d793d 9850 else
dfbe1b2f
RK
9851 {
9852 int word = 0;
9853
9854 if (WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD)
9855 word = ((GET_MODE_SIZE (GET_MODE (x))
9856 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD))
9857 / UNITS_PER_WORD);
38a448ca 9858 return gen_rtx_SUBREG (mode, x, word);
dfbe1b2f 9859 }
230d793d
RS
9860}
9861\f
9862/* Make an rtx expression. This is a subset of gen_rtx and only supports
9863 expressions of 1, 2, or 3 operands, each of which are rtx expressions.
9864
9865 If the identical expression was previously in the insn (in the undobuf),
9866 it will be returned. Only if it is not found will a new expression
9867 be made. */
9868
9869/*VARARGS2*/
9870static rtx
83d2b3b9 9871gen_rtx_combine VPARAMS ((enum rtx_code code, enum machine_mode mode, ...))
230d793d 9872{
5148a72b 9873#ifndef ANSI_PROTOTYPES
230d793d
RS
9874 enum rtx_code code;
9875 enum machine_mode mode;
4f90e4a0
RK
9876#endif
9877 va_list p;
230d793d
RS
9878 int n_args;
9879 rtx args[3];
b729186a 9880 int j;
6f7d635c 9881 const char *fmt;
230d793d 9882 rtx rt;
241cea85 9883 struct undo *undo;
230d793d 9884
4f90e4a0
RK
9885 VA_START (p, mode);
9886
5148a72b 9887#ifndef ANSI_PROTOTYPES
230d793d
RS
9888 code = va_arg (p, enum rtx_code);
9889 mode = va_arg (p, enum machine_mode);
4f90e4a0
RK
9890#endif
9891
230d793d
RS
9892 n_args = GET_RTX_LENGTH (code);
9893 fmt = GET_RTX_FORMAT (code);
9894
9895 if (n_args == 0 || n_args > 3)
9896 abort ();
9897
9898 /* Get each arg and verify that it is supposed to be an expression. */
9899 for (j = 0; j < n_args; j++)
9900 {
9901 if (*fmt++ != 'e')
9902 abort ();
9903
9904 args[j] = va_arg (p, rtx);
9905 }
9906
f0305a2b
KG
9907 va_end (p);
9908
230d793d
RS
9909 /* See if this is in undobuf. Be sure we don't use objects that came
9910 from another insn; this could produce circular rtl structures. */
9911
241cea85
RK
9912 for (undo = undobuf.undos; undo != undobuf.previous_undos; undo = undo->next)
9913 if (!undo->is_int
9914 && GET_CODE (undo->old_contents.r) == code
9915 && GET_MODE (undo->old_contents.r) == mode)
230d793d
RS
9916 {
9917 for (j = 0; j < n_args; j++)
241cea85 9918 if (XEXP (undo->old_contents.r, j) != args[j])
230d793d
RS
9919 break;
9920
9921 if (j == n_args)
241cea85 9922 return undo->old_contents.r;
230d793d
RS
9923 }
9924
9925 /* Otherwise make a new rtx. We know we have 1, 2, or 3 args.
9926 Use rtx_alloc instead of gen_rtx because it's faster on RISC. */
9927 rt = rtx_alloc (code);
9928 PUT_MODE (rt, mode);
9929 XEXP (rt, 0) = args[0];
9930 if (n_args > 1)
9931 {
9932 XEXP (rt, 1) = args[1];
9933 if (n_args > 2)
9934 XEXP (rt, 2) = args[2];
9935 }
9936 return rt;
9937}
9938
9939/* These routines make binary and unary operations by first seeing if they
9940 fold; if not, a new expression is allocated. */
9941
9942static rtx
9943gen_binary (code, mode, op0, op1)
9944 enum rtx_code code;
9945 enum machine_mode mode;
9946 rtx op0, op1;
9947{
9948 rtx result;
1a26b032
RK
9949 rtx tem;
9950
9951 if (GET_RTX_CLASS (code) == 'c'
9952 && (GET_CODE (op0) == CONST_INT
9953 || (CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)))
9954 tem = op0, op0 = op1, op1 = tem;
230d793d 9955
663522cb 9956 if (GET_RTX_CLASS (code) == '<')
230d793d
RS
9957 {
9958 enum machine_mode op_mode = GET_MODE (op0);
9210df58 9959
663522cb 9960 /* Strip the COMPARE from (REL_OP (compare X Y) 0) to get
0f41302f 9961 just (REL_OP X Y). */
9210df58
RK
9962 if (GET_CODE (op0) == COMPARE && op1 == const0_rtx)
9963 {
9964 op1 = XEXP (op0, 1);
9965 op0 = XEXP (op0, 0);
9966 op_mode = GET_MODE (op0);
9967 }
9968
230d793d
RS
9969 if (op_mode == VOIDmode)
9970 op_mode = GET_MODE (op1);
9971 result = simplify_relational_operation (code, op_mode, op0, op1);
9972 }
9973 else
9974 result = simplify_binary_operation (code, mode, op0, op1);
9975
9976 if (result)
9977 return result;
9978
9979 /* Put complex operands first and constants second. */
9980 if (GET_RTX_CLASS (code) == 'c'
9981 && ((CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)
9982 || (GET_RTX_CLASS (GET_CODE (op0)) == 'o'
9983 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')
9984 || (GET_CODE (op0) == SUBREG
9985 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op0))) == 'o'
9986 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')))
9987 return gen_rtx_combine (code, mode, op1, op0);
9988
e5e809f4
JL
9989 /* If we are turning off bits already known off in OP0, we need not do
9990 an AND. */
9991 else if (code == AND && GET_CODE (op1) == CONST_INT
9992 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
663522cb 9993 && (nonzero_bits (op0, mode) & ~INTVAL (op1)) == 0)
e5e809f4
JL
9994 return op0;
9995
230d793d
RS
9996 return gen_rtx_combine (code, mode, op0, op1);
9997}
9998
9999static rtx
0c1c8ea6 10000gen_unary (code, mode, op0_mode, op0)
230d793d 10001 enum rtx_code code;
0c1c8ea6 10002 enum machine_mode mode, op0_mode;
230d793d
RS
10003 rtx op0;
10004{
0c1c8ea6 10005 rtx result = simplify_unary_operation (code, mode, op0, op0_mode);
230d793d
RS
10006
10007 if (result)
10008 return result;
10009
10010 return gen_rtx_combine (code, mode, op0);
10011}
10012\f
10013/* Simplify a comparison between *POP0 and *POP1 where CODE is the
10014 comparison code that will be tested.
10015
10016 The result is a possibly different comparison code to use. *POP0 and
10017 *POP1 may be updated.
10018
10019 It is possible that we might detect that a comparison is either always
10020 true or always false. However, we do not perform general constant
5089e22e 10021 folding in combine, so this knowledge isn't useful. Such tautologies
230d793d
RS
10022 should have been detected earlier. Hence we ignore all such cases. */
10023
10024static enum rtx_code
10025simplify_comparison (code, pop0, pop1)
10026 enum rtx_code code;
10027 rtx *pop0;
10028 rtx *pop1;
10029{
10030 rtx op0 = *pop0;
10031 rtx op1 = *pop1;
10032 rtx tem, tem1;
10033 int i;
10034 enum machine_mode mode, tmode;
10035
10036 /* Try a few ways of applying the same transformation to both operands. */
10037 while (1)
10038 {
3a19aabc
RK
10039#ifndef WORD_REGISTER_OPERATIONS
10040 /* The test below this one won't handle SIGN_EXTENDs on these machines,
10041 so check specially. */
10042 if (code != GTU && code != GEU && code != LTU && code != LEU
10043 && GET_CODE (op0) == ASHIFTRT && GET_CODE (op1) == ASHIFTRT
10044 && GET_CODE (XEXP (op0, 0)) == ASHIFT
10045 && GET_CODE (XEXP (op1, 0)) == ASHIFT
10046 && GET_CODE (XEXP (XEXP (op0, 0), 0)) == SUBREG
10047 && GET_CODE (XEXP (XEXP (op1, 0), 0)) == SUBREG
10048 && (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0)))
ad25ba17 10049 == GET_MODE (SUBREG_REG (XEXP (XEXP (op1, 0), 0))))
3a19aabc
RK
10050 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10051 && GET_CODE (XEXP (op1, 1)) == CONST_INT
10052 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
10053 && GET_CODE (XEXP (XEXP (op1, 0), 1)) == CONST_INT
10054 && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (op1, 1))
10055 && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (XEXP (op0, 0), 1))
10056 && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (XEXP (op1, 0), 1))
10057 && (INTVAL (XEXP (op0, 1))
10058 == (GET_MODE_BITSIZE (GET_MODE (op0))
10059 - (GET_MODE_BITSIZE
10060 (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0))))))))
10061 {
10062 op0 = SUBREG_REG (XEXP (XEXP (op0, 0), 0));
10063 op1 = SUBREG_REG (XEXP (XEXP (op1, 0), 0));
10064 }
10065#endif
10066
230d793d
RS
10067 /* If both operands are the same constant shift, see if we can ignore the
10068 shift. We can if the shift is a rotate or if the bits shifted out of
951553af 10069 this shift are known to be zero for both inputs and if the type of
230d793d 10070 comparison is compatible with the shift. */
67232b23
RK
10071 if (GET_CODE (op0) == GET_CODE (op1)
10072 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
10073 && ((GET_CODE (op0) == ROTATE && (code == NE || code == EQ))
45620ed4 10074 || ((GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFT)
67232b23
RK
10075 && (code != GT && code != LT && code != GE && code != LE))
10076 || (GET_CODE (op0) == ASHIFTRT
10077 && (code != GTU && code != LTU
10078 && code != GEU && code != GEU)))
10079 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10080 && INTVAL (XEXP (op0, 1)) >= 0
10081 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
10082 && XEXP (op0, 1) == XEXP (op1, 1))
230d793d
RS
10083 {
10084 enum machine_mode mode = GET_MODE (op0);
5f4f0e22 10085 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
230d793d
RS
10086 int shift_count = INTVAL (XEXP (op0, 1));
10087
10088 if (GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFTRT)
10089 mask &= (mask >> shift_count) << shift_count;
45620ed4 10090 else if (GET_CODE (op0) == ASHIFT)
230d793d
RS
10091 mask = (mask & (mask << shift_count)) >> shift_count;
10092
663522cb
KH
10093 if ((nonzero_bits (XEXP (op0, 0), mode) & ~mask) == 0
10094 && (nonzero_bits (XEXP (op1, 0), mode) & ~mask) == 0)
230d793d
RS
10095 op0 = XEXP (op0, 0), op1 = XEXP (op1, 0);
10096 else
10097 break;
10098 }
10099
10100 /* If both operands are AND's of a paradoxical SUBREG by constant, the
10101 SUBREGs are of the same mode, and, in both cases, the AND would
10102 be redundant if the comparison was done in the narrower mode,
10103 do the comparison in the narrower mode (e.g., we are AND'ing with 1
951553af
RK
10104 and the operand's possibly nonzero bits are 0xffffff01; in that case
10105 if we only care about QImode, we don't need the AND). This case
10106 occurs if the output mode of an scc insn is not SImode and
7e4dc511
RK
10107 STORE_FLAG_VALUE == 1 (e.g., the 386).
10108
10109 Similarly, check for a case where the AND's are ZERO_EXTEND
10110 operations from some narrower mode even though a SUBREG is not
10111 present. */
230d793d 10112
663522cb
KH
10113 else if (GET_CODE (op0) == AND && GET_CODE (op1) == AND
10114 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10115 && GET_CODE (XEXP (op1, 1)) == CONST_INT)
230d793d 10116 {
7e4dc511
RK
10117 rtx inner_op0 = XEXP (op0, 0);
10118 rtx inner_op1 = XEXP (op1, 0);
10119 HOST_WIDE_INT c0 = INTVAL (XEXP (op0, 1));
10120 HOST_WIDE_INT c1 = INTVAL (XEXP (op1, 1));
10121 int changed = 0;
663522cb 10122
7e4dc511
RK
10123 if (GET_CODE (inner_op0) == SUBREG && GET_CODE (inner_op1) == SUBREG
10124 && (GET_MODE_SIZE (GET_MODE (inner_op0))
10125 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (inner_op0))))
10126 && (GET_MODE (SUBREG_REG (inner_op0))
10127 == GET_MODE (SUBREG_REG (inner_op1)))
729a2bc6 10128 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (inner_op0)))
7e4dc511 10129 <= HOST_BITS_PER_WIDE_INT)
01c82bbb 10130 && (0 == ((~c0) & nonzero_bits (SUBREG_REG (inner_op0),
729a2bc6 10131 GET_MODE (SUBREG_REG (inner_op0)))))
01c82bbb
RK
10132 && (0 == ((~c1) & nonzero_bits (SUBREG_REG (inner_op1),
10133 GET_MODE (SUBREG_REG (inner_op1))))))
7e4dc511
RK
10134 {
10135 op0 = SUBREG_REG (inner_op0);
10136 op1 = SUBREG_REG (inner_op1);
10137
10138 /* The resulting comparison is always unsigned since we masked
0f41302f 10139 off the original sign bit. */
7e4dc511
RK
10140 code = unsigned_condition (code);
10141
10142 changed = 1;
10143 }
230d793d 10144
7e4dc511
RK
10145 else if (c0 == c1)
10146 for (tmode = GET_CLASS_NARROWEST_MODE
10147 (GET_MODE_CLASS (GET_MODE (op0)));
10148 tmode != GET_MODE (op0); tmode = GET_MODE_WIDER_MODE (tmode))
e51712db 10149 if ((unsigned HOST_WIDE_INT) c0 == GET_MODE_MASK (tmode))
7e4dc511
RK
10150 {
10151 op0 = gen_lowpart_for_combine (tmode, inner_op0);
10152 op1 = gen_lowpart_for_combine (tmode, inner_op1);
66415c8b 10153 code = unsigned_condition (code);
7e4dc511
RK
10154 changed = 1;
10155 break;
10156 }
10157
10158 if (! changed)
10159 break;
230d793d 10160 }
3a19aabc 10161
ad25ba17
RK
10162 /* If both operands are NOT, we can strip off the outer operation
10163 and adjust the comparison code for swapped operands; similarly for
10164 NEG, except that this must be an equality comparison. */
10165 else if ((GET_CODE (op0) == NOT && GET_CODE (op1) == NOT)
10166 || (GET_CODE (op0) == NEG && GET_CODE (op1) == NEG
10167 && (code == EQ || code == NE)))
10168 op0 = XEXP (op0, 0), op1 = XEXP (op1, 0), code = swap_condition (code);
3a19aabc 10169
230d793d
RS
10170 else
10171 break;
10172 }
663522cb 10173
230d793d 10174 /* If the first operand is a constant, swap the operands and adjust the
3aceff0d
RK
10175 comparison code appropriately, but don't do this if the second operand
10176 is already a constant integer. */
10177 if (CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)
230d793d
RS
10178 {
10179 tem = op0, op0 = op1, op1 = tem;
10180 code = swap_condition (code);
10181 }
10182
10183 /* We now enter a loop during which we will try to simplify the comparison.
10184 For the most part, we only are concerned with comparisons with zero,
10185 but some things may really be comparisons with zero but not start
10186 out looking that way. */
10187
10188 while (GET_CODE (op1) == CONST_INT)
10189 {
10190 enum machine_mode mode = GET_MODE (op0);
770ae6cc 10191 unsigned int mode_width = GET_MODE_BITSIZE (mode);
5f4f0e22 10192 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
230d793d
RS
10193 int equality_comparison_p;
10194 int sign_bit_comparison_p;
10195 int unsigned_comparison_p;
5f4f0e22 10196 HOST_WIDE_INT const_op;
230d793d
RS
10197
10198 /* We only want to handle integral modes. This catches VOIDmode,
10199 CCmode, and the floating-point modes. An exception is that we
10200 can handle VOIDmode if OP0 is a COMPARE or a comparison
10201 operation. */
10202
10203 if (GET_MODE_CLASS (mode) != MODE_INT
10204 && ! (mode == VOIDmode
10205 && (GET_CODE (op0) == COMPARE
10206 || GET_RTX_CLASS (GET_CODE (op0)) == '<')))
10207 break;
10208
10209 /* Get the constant we are comparing against and turn off all bits
10210 not on in our mode. */
3c094e22 10211 const_op = trunc_int_for_mode (INTVAL (op1), mode);
230d793d
RS
10212
10213 /* If we are comparing against a constant power of two and the value
951553af 10214 being compared can only have that single bit nonzero (e.g., it was
230d793d
RS
10215 `and'ed with that bit), we can replace this with a comparison
10216 with zero. */
10217 if (const_op
10218 && (code == EQ || code == NE || code == GE || code == GEU
10219 || code == LT || code == LTU)
5f4f0e22 10220 && mode_width <= HOST_BITS_PER_WIDE_INT
230d793d 10221 && exact_log2 (const_op) >= 0
e51712db 10222 && nonzero_bits (op0, mode) == (unsigned HOST_WIDE_INT) const_op)
230d793d
RS
10223 {
10224 code = (code == EQ || code == GE || code == GEU ? NE : EQ);
10225 op1 = const0_rtx, const_op = 0;
10226 }
10227
d0ab8cd3
RK
10228 /* Similarly, if we are comparing a value known to be either -1 or
10229 0 with -1, change it to the opposite comparison against zero. */
10230
10231 if (const_op == -1
10232 && (code == EQ || code == NE || code == GT || code == LE
10233 || code == GEU || code == LTU)
10234 && num_sign_bit_copies (op0, mode) == mode_width)
10235 {
10236 code = (code == EQ || code == LE || code == GEU ? NE : EQ);
10237 op1 = const0_rtx, const_op = 0;
10238 }
10239
230d793d 10240 /* Do some canonicalizations based on the comparison code. We prefer
663522cb 10241 comparisons against zero and then prefer equality comparisons.
4803a34a 10242 If we can reduce the size of a constant, we will do that too. */
230d793d
RS
10243
10244 switch (code)
10245 {
10246 case LT:
4803a34a
RK
10247 /* < C is equivalent to <= (C - 1) */
10248 if (const_op > 0)
230d793d 10249 {
4803a34a 10250 const_op -= 1;
5f4f0e22 10251 op1 = GEN_INT (const_op);
230d793d
RS
10252 code = LE;
10253 /* ... fall through to LE case below. */
10254 }
10255 else
10256 break;
10257
10258 case LE:
4803a34a
RK
10259 /* <= C is equivalent to < (C + 1); we do this for C < 0 */
10260 if (const_op < 0)
10261 {
10262 const_op += 1;
5f4f0e22 10263 op1 = GEN_INT (const_op);
4803a34a
RK
10264 code = LT;
10265 }
230d793d
RS
10266
10267 /* If we are doing a <= 0 comparison on a value known to have
10268 a zero sign bit, we can replace this with == 0. */
10269 else if (const_op == 0
5f4f0e22 10270 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 10271 && (nonzero_bits (op0, mode)
5f4f0e22 10272 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
230d793d
RS
10273 code = EQ;
10274 break;
10275
10276 case GE:
0f41302f 10277 /* >= C is equivalent to > (C - 1). */
4803a34a 10278 if (const_op > 0)
230d793d 10279 {
4803a34a 10280 const_op -= 1;
5f4f0e22 10281 op1 = GEN_INT (const_op);
230d793d
RS
10282 code = GT;
10283 /* ... fall through to GT below. */
10284 }
10285 else
10286 break;
10287
10288 case GT:
663522cb 10289 /* > C is equivalent to >= (C + 1); we do this for C < 0. */
4803a34a
RK
10290 if (const_op < 0)
10291 {
10292 const_op += 1;
5f4f0e22 10293 op1 = GEN_INT (const_op);
4803a34a
RK
10294 code = GE;
10295 }
230d793d
RS
10296
10297 /* If we are doing a > 0 comparison on a value known to have
10298 a zero sign bit, we can replace this with != 0. */
10299 else if (const_op == 0
5f4f0e22 10300 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 10301 && (nonzero_bits (op0, mode)
5f4f0e22 10302 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
230d793d
RS
10303 code = NE;
10304 break;
10305
230d793d 10306 case LTU:
4803a34a
RK
10307 /* < C is equivalent to <= (C - 1). */
10308 if (const_op > 0)
10309 {
10310 const_op -= 1;
5f4f0e22 10311 op1 = GEN_INT (const_op);
4803a34a 10312 code = LEU;
0f41302f 10313 /* ... fall through ... */
4803a34a 10314 }
d0ab8cd3
RK
10315
10316 /* (unsigned) < 0x80000000 is equivalent to >= 0. */
f77aada2
JW
10317 else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
10318 && (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1)))
d0ab8cd3
RK
10319 {
10320 const_op = 0, op1 = const0_rtx;
10321 code = GE;
10322 break;
10323 }
4803a34a
RK
10324 else
10325 break;
230d793d
RS
10326
10327 case LEU:
10328 /* unsigned <= 0 is equivalent to == 0 */
10329 if (const_op == 0)
10330 code = EQ;
d0ab8cd3 10331
0f41302f 10332 /* (unsigned) <= 0x7fffffff is equivalent to >= 0. */
f77aada2
JW
10333 else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
10334 && (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1))
d0ab8cd3
RK
10335 {
10336 const_op = 0, op1 = const0_rtx;
10337 code = GE;
10338 }
230d793d
RS
10339 break;
10340
4803a34a
RK
10341 case GEU:
10342 /* >= C is equivalent to < (C - 1). */
10343 if (const_op > 1)
10344 {
10345 const_op -= 1;
5f4f0e22 10346 op1 = GEN_INT (const_op);
4803a34a 10347 code = GTU;
0f41302f 10348 /* ... fall through ... */
4803a34a 10349 }
d0ab8cd3
RK
10350
10351 /* (unsigned) >= 0x80000000 is equivalent to < 0. */
f77aada2
JW
10352 else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
10353 && (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1)))
d0ab8cd3
RK
10354 {
10355 const_op = 0, op1 = const0_rtx;
10356 code = LT;
8b2e69e1 10357 break;
d0ab8cd3 10358 }
4803a34a
RK
10359 else
10360 break;
10361
230d793d
RS
10362 case GTU:
10363 /* unsigned > 0 is equivalent to != 0 */
10364 if (const_op == 0)
10365 code = NE;
d0ab8cd3
RK
10366
10367 /* (unsigned) > 0x7fffffff is equivalent to < 0. */
f77aada2
JW
10368 else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
10369 && (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1))
d0ab8cd3
RK
10370 {
10371 const_op = 0, op1 = const0_rtx;
10372 code = LT;
10373 }
230d793d 10374 break;
e9a25f70
JL
10375
10376 default:
10377 break;
230d793d
RS
10378 }
10379
10380 /* Compute some predicates to simplify code below. */
10381
10382 equality_comparison_p = (code == EQ || code == NE);
10383 sign_bit_comparison_p = ((code == LT || code == GE) && const_op == 0);
10384 unsigned_comparison_p = (code == LTU || code == LEU || code == GTU
d5010e66 10385 || code == GEU);
230d793d 10386
6139ff20
RK
10387 /* If this is a sign bit comparison and we can do arithmetic in
10388 MODE, say that we will only be needing the sign bit of OP0. */
10389 if (sign_bit_comparison_p
10390 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
10391 op0 = force_to_mode (op0, mode,
10392 ((HOST_WIDE_INT) 1
10393 << (GET_MODE_BITSIZE (mode) - 1)),
e3d616e3 10394 NULL_RTX, 0);
6139ff20 10395
230d793d
RS
10396 /* Now try cases based on the opcode of OP0. If none of the cases
10397 does a "continue", we exit this loop immediately after the
10398 switch. */
10399
10400 switch (GET_CODE (op0))
10401 {
10402 case ZERO_EXTRACT:
10403 /* If we are extracting a single bit from a variable position in
10404 a constant that has only a single bit set and are comparing it
663522cb 10405 with zero, we can convert this into an equality comparison
d7cd794f 10406 between the position and the location of the single bit. */
230d793d 10407
230d793d
RS
10408 if (GET_CODE (XEXP (op0, 0)) == CONST_INT
10409 && XEXP (op0, 1) == const1_rtx
10410 && equality_comparison_p && const_op == 0
d7cd794f 10411 && (i = exact_log2 (INTVAL (XEXP (op0, 0)))) >= 0)
230d793d 10412 {
f76b9db2 10413 if (BITS_BIG_ENDIAN)
0d8e55d8 10414 {
d7cd794f 10415#ifdef HAVE_extzv
a995e389 10416 mode = insn_data[(int) CODE_FOR_extzv].operand[1].mode;
0d8e55d8
JL
10417 if (mode == VOIDmode)
10418 mode = word_mode;
10419 i = (GET_MODE_BITSIZE (mode) - 1 - i);
d7cd794f 10420#else
663522cb 10421 i = BITS_PER_WORD - 1 - i;
230d793d 10422#endif
0d8e55d8 10423 }
230d793d
RS
10424
10425 op0 = XEXP (op0, 2);
5f4f0e22 10426 op1 = GEN_INT (i);
230d793d
RS
10427 const_op = i;
10428
10429 /* Result is nonzero iff shift count is equal to I. */
10430 code = reverse_condition (code);
10431 continue;
10432 }
230d793d 10433
0f41302f 10434 /* ... fall through ... */
230d793d
RS
10435
10436 case SIGN_EXTRACT:
10437 tem = expand_compound_operation (op0);
10438 if (tem != op0)
10439 {
10440 op0 = tem;
10441 continue;
10442 }
10443 break;
10444
10445 case NOT:
10446 /* If testing for equality, we can take the NOT of the constant. */
10447 if (equality_comparison_p
10448 && (tem = simplify_unary_operation (NOT, mode, op1, mode)) != 0)
10449 {
10450 op0 = XEXP (op0, 0);
10451 op1 = tem;
10452 continue;
10453 }
10454
10455 /* If just looking at the sign bit, reverse the sense of the
10456 comparison. */
10457 if (sign_bit_comparison_p)
10458 {
10459 op0 = XEXP (op0, 0);
10460 code = (code == GE ? LT : GE);
10461 continue;
10462 }
10463 break;
10464
10465 case NEG:
10466 /* If testing for equality, we can take the NEG of the constant. */
10467 if (equality_comparison_p
10468 && (tem = simplify_unary_operation (NEG, mode, op1, mode)) != 0)
10469 {
10470 op0 = XEXP (op0, 0);
10471 op1 = tem;
10472 continue;
10473 }
10474
10475 /* The remaining cases only apply to comparisons with zero. */
10476 if (const_op != 0)
10477 break;
10478
10479 /* When X is ABS or is known positive,
10480 (neg X) is < 0 if and only if X != 0. */
10481
10482 if (sign_bit_comparison_p
10483 && (GET_CODE (XEXP (op0, 0)) == ABS
5f4f0e22 10484 || (mode_width <= HOST_BITS_PER_WIDE_INT
951553af 10485 && (nonzero_bits (XEXP (op0, 0), mode)
5f4f0e22 10486 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)))
230d793d
RS
10487 {
10488 op0 = XEXP (op0, 0);
10489 code = (code == LT ? NE : EQ);
10490 continue;
10491 }
10492
3bed8141 10493 /* If we have NEG of something whose two high-order bits are the
0f41302f 10494 same, we know that "(-a) < 0" is equivalent to "a > 0". */
3bed8141 10495 if (num_sign_bit_copies (op0, mode) >= 2)
230d793d
RS
10496 {
10497 op0 = XEXP (op0, 0);
10498 code = swap_condition (code);
10499 continue;
10500 }
10501 break;
10502
10503 case ROTATE:
10504 /* If we are testing equality and our count is a constant, we
10505 can perform the inverse operation on our RHS. */
10506 if (equality_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
10507 && (tem = simplify_binary_operation (ROTATERT, mode,
10508 op1, XEXP (op0, 1))) != 0)
10509 {
10510 op0 = XEXP (op0, 0);
10511 op1 = tem;
10512 continue;
10513 }
10514
10515 /* If we are doing a < 0 or >= 0 comparison, it means we are testing
10516 a particular bit. Convert it to an AND of a constant of that
10517 bit. This will be converted into a ZERO_EXTRACT. */
10518 if (const_op == 0 && sign_bit_comparison_p
10519 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5f4f0e22 10520 && mode_width <= HOST_BITS_PER_WIDE_INT)
230d793d 10521 {
5f4f0e22
CH
10522 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
10523 ((HOST_WIDE_INT) 1
10524 << (mode_width - 1
10525 - INTVAL (XEXP (op0, 1)))));
230d793d
RS
10526 code = (code == LT ? NE : EQ);
10527 continue;
10528 }
10529
663522cb 10530 /* Fall through. */
230d793d
RS
10531
10532 case ABS:
10533 /* ABS is ignorable inside an equality comparison with zero. */
10534 if (const_op == 0 && equality_comparison_p)
10535 {
10536 op0 = XEXP (op0, 0);
10537 continue;
10538 }
10539 break;
230d793d
RS
10540
10541 case SIGN_EXTEND:
10542 /* Can simplify (compare (zero/sign_extend FOO) CONST)
663522cb 10543 to (compare FOO CONST) if CONST fits in FOO's mode and we
230d793d
RS
10544 are either testing inequality or have an unsigned comparison
10545 with ZERO_EXTEND or a signed comparison with SIGN_EXTEND. */
10546 if (! unsigned_comparison_p
10547 && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
5f4f0e22
CH
10548 <= HOST_BITS_PER_WIDE_INT)
10549 && ((unsigned HOST_WIDE_INT) const_op
e51712db 10550 < (((unsigned HOST_WIDE_INT) 1
5f4f0e22 10551 << (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0))) - 1)))))
230d793d
RS
10552 {
10553 op0 = XEXP (op0, 0);
10554 continue;
10555 }
10556 break;
10557
10558 case SUBREG:
a687e897 10559 /* Check for the case where we are comparing A - C1 with C2,
abc95ed3 10560 both constants are smaller than 1/2 the maximum positive
a687e897
RK
10561 value in MODE, and the comparison is equality or unsigned.
10562 In that case, if A is either zero-extended to MODE or has
10563 sufficient sign bits so that the high-order bit in MODE
10564 is a copy of the sign in the inner mode, we can prove that it is
10565 safe to do the operation in the wider mode. This simplifies
10566 many range checks. */
10567
10568 if (mode_width <= HOST_BITS_PER_WIDE_INT
10569 && subreg_lowpart_p (op0)
10570 && GET_CODE (SUBREG_REG (op0)) == PLUS
10571 && GET_CODE (XEXP (SUBREG_REG (op0), 1)) == CONST_INT
10572 && INTVAL (XEXP (SUBREG_REG (op0), 1)) < 0
663522cb
KH
10573 && (-INTVAL (XEXP (SUBREG_REG (op0), 1))
10574 < (HOST_WIDE_INT) (GET_MODE_MASK (mode) / 2))
adb7a1cb 10575 && (unsigned HOST_WIDE_INT) const_op < GET_MODE_MASK (mode) / 2
951553af
RK
10576 && (0 == (nonzero_bits (XEXP (SUBREG_REG (op0), 0),
10577 GET_MODE (SUBREG_REG (op0)))
663522cb 10578 & ~GET_MODE_MASK (mode))
a687e897
RK
10579 || (num_sign_bit_copies (XEXP (SUBREG_REG (op0), 0),
10580 GET_MODE (SUBREG_REG (op0)))
10581 > (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
10582 - GET_MODE_BITSIZE (mode)))))
10583 {
10584 op0 = SUBREG_REG (op0);
10585 continue;
10586 }
10587
fe0cf571
RK
10588 /* If the inner mode is narrower and we are extracting the low part,
10589 we can treat the SUBREG as if it were a ZERO_EXTEND. */
10590 if (subreg_lowpart_p (op0)
89f1c7f2
RS
10591 && GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) < mode_width)
10592 /* Fall through */ ;
10593 else
230d793d
RS
10594 break;
10595
0f41302f 10596 /* ... fall through ... */
230d793d
RS
10597
10598 case ZERO_EXTEND:
10599 if ((unsigned_comparison_p || equality_comparison_p)
10600 && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
5f4f0e22
CH
10601 <= HOST_BITS_PER_WIDE_INT)
10602 && ((unsigned HOST_WIDE_INT) const_op
230d793d
RS
10603 < GET_MODE_MASK (GET_MODE (XEXP (op0, 0)))))
10604 {
10605 op0 = XEXP (op0, 0);
10606 continue;
10607 }
10608 break;
10609
10610 case PLUS:
20fdd649 10611 /* (eq (plus X A) B) -> (eq X (minus B A)). We can only do
5089e22e 10612 this for equality comparisons due to pathological cases involving
230d793d 10613 overflows. */
20fdd649
RK
10614 if (equality_comparison_p
10615 && 0 != (tem = simplify_binary_operation (MINUS, mode,
10616 op1, XEXP (op0, 1))))
230d793d
RS
10617 {
10618 op0 = XEXP (op0, 0);
10619 op1 = tem;
10620 continue;
10621 }
10622
10623 /* (plus (abs X) (const_int -1)) is < 0 if and only if X == 0. */
10624 if (const_op == 0 && XEXP (op0, 1) == constm1_rtx
10625 && GET_CODE (XEXP (op0, 0)) == ABS && sign_bit_comparison_p)
10626 {
10627 op0 = XEXP (XEXP (op0, 0), 0);
10628 code = (code == LT ? EQ : NE);
10629 continue;
10630 }
10631 break;
10632
10633 case MINUS:
65945ec1
HPN
10634 /* We used to optimize signed comparisons against zero, but that
10635 was incorrect. Unsigned comparisons against zero (GTU, LEU)
10636 arrive here as equality comparisons, or (GEU, LTU) are
10637 optimized away. No need to special-case them. */
0bd4b461 10638
20fdd649
RK
10639 /* (eq (minus A B) C) -> (eq A (plus B C)) or
10640 (eq B (minus A C)), whichever simplifies. We can only do
10641 this for equality comparisons due to pathological cases involving
10642 overflows. */
10643 if (equality_comparison_p
10644 && 0 != (tem = simplify_binary_operation (PLUS, mode,
10645 XEXP (op0, 1), op1)))
10646 {
10647 op0 = XEXP (op0, 0);
10648 op1 = tem;
10649 continue;
10650 }
10651
10652 if (equality_comparison_p
10653 && 0 != (tem = simplify_binary_operation (MINUS, mode,
10654 XEXP (op0, 0), op1)))
10655 {
10656 op0 = XEXP (op0, 1);
10657 op1 = tem;
10658 continue;
10659 }
10660
230d793d
RS
10661 /* The sign bit of (minus (ashiftrt X C) X), where C is the number
10662 of bits in X minus 1, is one iff X > 0. */
10663 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == ASHIFTRT
10664 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
10665 && INTVAL (XEXP (XEXP (op0, 0), 1)) == mode_width - 1
10666 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
10667 {
10668 op0 = XEXP (op0, 1);
10669 code = (code == GE ? LE : GT);
10670 continue;
10671 }
10672 break;
10673
10674 case XOR:
10675 /* (eq (xor A B) C) -> (eq A (xor B C)). This is a simplification
10676 if C is zero or B is a constant. */
10677 if (equality_comparison_p
10678 && 0 != (tem = simplify_binary_operation (XOR, mode,
10679 XEXP (op0, 1), op1)))
10680 {
10681 op0 = XEXP (op0, 0);
10682 op1 = tem;
10683 continue;
10684 }
10685 break;
10686
10687 case EQ: case NE:
69bc0a1f
JH
10688 case UNEQ: case LTGT:
10689 case LT: case LTU: case UNLT: case LE: case LEU: case UNLE:
10690 case GT: case GTU: case UNGT: case GE: case GEU: case UNGE:
10691 case UNORDERED: case ORDERED:
230d793d
RS
10692 /* We can't do anything if OP0 is a condition code value, rather
10693 than an actual data value. */
10694 if (const_op != 0
10695#ifdef HAVE_cc0
10696 || XEXP (op0, 0) == cc0_rtx
10697#endif
10698 || GET_MODE_CLASS (GET_MODE (XEXP (op0, 0))) == MODE_CC)
10699 break;
10700
10701 /* Get the two operands being compared. */
10702 if (GET_CODE (XEXP (op0, 0)) == COMPARE)
10703 tem = XEXP (XEXP (op0, 0), 0), tem1 = XEXP (XEXP (op0, 0), 1);
10704 else
10705 tem = XEXP (op0, 0), tem1 = XEXP (op0, 1);
10706
10707 /* Check for the cases where we simply want the result of the
10708 earlier test or the opposite of that result. */
9a915772 10709 if (code == NE || code == EQ
5f4f0e22 10710 || (GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
3f508eca 10711 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
230d793d 10712 && (STORE_FLAG_VALUE
5f4f0e22
CH
10713 & (((HOST_WIDE_INT) 1
10714 << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
aa6683f7 10715 && (code == LT || code == GE)))
230d793d 10716 {
aa6683f7
GK
10717 enum rtx_code new_code;
10718 if (code == LT || code == NE)
10719 new_code = GET_CODE (op0);
10720 else
10721 new_code = combine_reversed_comparison_code (op0);
10722
10723 if (new_code != UNKNOWN)
9a915772 10724 {
aa6683f7
GK
10725 code = new_code;
10726 op0 = tem;
10727 op1 = tem1;
9a915772
JH
10728 continue;
10729 }
230d793d
RS
10730 }
10731 break;
10732
10733 case IOR:
10734 /* The sign bit of (ior (plus X (const_int -1)) X) is non-zero
10735 iff X <= 0. */
10736 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == PLUS
10737 && XEXP (XEXP (op0, 0), 1) == constm1_rtx
10738 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
10739 {
10740 op0 = XEXP (op0, 1);
10741 code = (code == GE ? GT : LE);
10742 continue;
10743 }
10744 break;
10745
10746 case AND:
10747 /* Convert (and (xshift 1 X) Y) to (and (lshiftrt Y X) 1). This
10748 will be converted to a ZERO_EXTRACT later. */
10749 if (const_op == 0 && equality_comparison_p
45620ed4 10750 && GET_CODE (XEXP (op0, 0)) == ASHIFT
230d793d
RS
10751 && XEXP (XEXP (op0, 0), 0) == const1_rtx)
10752 {
10753 op0 = simplify_and_const_int
10754 (op0, mode, gen_rtx_combine (LSHIFTRT, mode,
10755 XEXP (op0, 1),
10756 XEXP (XEXP (op0, 0), 1)),
5f4f0e22 10757 (HOST_WIDE_INT) 1);
230d793d
RS
10758 continue;
10759 }
10760
10761 /* If we are comparing (and (lshiftrt X C1) C2) for equality with
10762 zero and X is a comparison and C1 and C2 describe only bits set
10763 in STORE_FLAG_VALUE, we can compare with X. */
10764 if (const_op == 0 && equality_comparison_p
5f4f0e22 10765 && mode_width <= HOST_BITS_PER_WIDE_INT
230d793d
RS
10766 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10767 && GET_CODE (XEXP (op0, 0)) == LSHIFTRT
10768 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
10769 && INTVAL (XEXP (XEXP (op0, 0), 1)) >= 0
5f4f0e22 10770 && INTVAL (XEXP (XEXP (op0, 0), 1)) < HOST_BITS_PER_WIDE_INT)
230d793d
RS
10771 {
10772 mask = ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
10773 << INTVAL (XEXP (XEXP (op0, 0), 1)));
663522cb 10774 if ((~STORE_FLAG_VALUE & mask) == 0
230d793d
RS
10775 && (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (op0, 0), 0))) == '<'
10776 || ((tem = get_last_value (XEXP (XEXP (op0, 0), 0))) != 0
10777 && GET_RTX_CLASS (GET_CODE (tem)) == '<')))
10778 {
10779 op0 = XEXP (XEXP (op0, 0), 0);
10780 continue;
10781 }
10782 }
10783
10784 /* If we are doing an equality comparison of an AND of a bit equal
10785 to the sign bit, replace this with a LT or GE comparison of
10786 the underlying value. */
10787 if (equality_comparison_p
10788 && const_op == 0
10789 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5f4f0e22 10790 && mode_width <= HOST_BITS_PER_WIDE_INT
230d793d 10791 && ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
e51712db 10792 == (unsigned HOST_WIDE_INT) 1 << (mode_width - 1)))
230d793d
RS
10793 {
10794 op0 = XEXP (op0, 0);
10795 code = (code == EQ ? GE : LT);
10796 continue;
10797 }
10798
10799 /* If this AND operation is really a ZERO_EXTEND from a narrower
10800 mode, the constant fits within that mode, and this is either an
10801 equality or unsigned comparison, try to do this comparison in
10802 the narrower mode. */
10803 if ((equality_comparison_p || unsigned_comparison_p)
10804 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10805 && (i = exact_log2 ((INTVAL (XEXP (op0, 1))
10806 & GET_MODE_MASK (mode))
10807 + 1)) >= 0
10808 && const_op >> i == 0
10809 && (tmode = mode_for_size (i, MODE_INT, 1)) != BLKmode)
10810 {
10811 op0 = gen_lowpart_for_combine (tmode, XEXP (op0, 0));
10812 continue;
10813 }
e5e809f4
JL
10814
10815 /* If this is (and:M1 (subreg:M2 X 0) (const_int C1)) where C1 fits
10816 in both M1 and M2 and the SUBREG is either paradoxical or
10817 represents the low part, permute the SUBREG and the AND and
10818 try again. */
10819 if (GET_CODE (XEXP (op0, 0)) == SUBREG
c5c76735 10820 && (0
9ec36da5 10821#ifdef WORD_REGISTER_OPERATIONS
c5c76735
JL
10822 || ((mode_width
10823 > (GET_MODE_BITSIZE
10824 (GET_MODE (SUBREG_REG (XEXP (op0, 0))))))
10825 && mode_width <= BITS_PER_WORD)
9ec36da5 10826#endif
c5c76735
JL
10827 || ((mode_width
10828 <= (GET_MODE_BITSIZE
10829 (GET_MODE (SUBREG_REG (XEXP (op0, 0))))))
10830 && subreg_lowpart_p (XEXP (op0, 0))))
adc05e6c
JL
10831#ifndef WORD_REGISTER_OPERATIONS
10832 /* It is unsafe to commute the AND into the SUBREG if the SUBREG
10833 is paradoxical and WORD_REGISTER_OPERATIONS is not defined.
10834 As originally written the upper bits have a defined value
10835 due to the AND operation. However, if we commute the AND
10836 inside the SUBREG then they no longer have defined values
10837 and the meaning of the code has been changed. */
10838 && (GET_MODE_SIZE (GET_MODE (XEXP (op0, 0)))
10839 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (op0, 0)))))
10840#endif
e5e809f4
JL
10841 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10842 && mode_width <= HOST_BITS_PER_WIDE_INT
10843 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (XEXP (op0, 0))))
10844 <= HOST_BITS_PER_WIDE_INT)
663522cb
KH
10845 && (INTVAL (XEXP (op0, 1)) & ~mask) == 0
10846 && 0 == (~GET_MODE_MASK (GET_MODE (SUBREG_REG (XEXP (op0, 0))))
9ec36da5 10847 & INTVAL (XEXP (op0, 1)))
e51712db
KG
10848 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (op0, 1)) != mask
10849 && ((unsigned HOST_WIDE_INT) INTVAL (XEXP (op0, 1))
9ec36da5 10850 != GET_MODE_MASK (GET_MODE (SUBREG_REG (XEXP (op0, 0))))))
663522cb 10851
e5e809f4
JL
10852 {
10853 op0
10854 = gen_lowpart_for_combine
10855 (mode,
10856 gen_binary (AND, GET_MODE (SUBREG_REG (XEXP (op0, 0))),
10857 SUBREG_REG (XEXP (op0, 0)), XEXP (op0, 1)));
10858 continue;
10859 }
10860
9f8e169e
RH
10861 /* Convert (ne (and (lshiftrt (not X)) 1) 0) to
10862 (eq (and (lshiftrt X) 1) 0). */
10863 if (const_op == 0 && equality_comparison_p
10864 && XEXP (op0, 1) == const1_rtx
10865 && GET_CODE (XEXP (op0, 0)) == LSHIFTRT
10866 && GET_CODE (XEXP (XEXP (op0, 0), 0)) == NOT)
10867 {
10868 op0 = simplify_and_const_int
10869 (op0, mode, gen_rtx_combine (LSHIFTRT, mode,
10870 XEXP (XEXP (XEXP (op0, 0), 0), 0),
10871 XEXP (XEXP (op0, 0), 1)),
10872 (HOST_WIDE_INT) 1);
10873 code = (code == NE ? EQ : NE);
10874 continue;
10875 }
230d793d
RS
10876 break;
10877
10878 case ASHIFT:
45620ed4 10879 /* If we have (compare (ashift FOO N) (const_int C)) and
230d793d 10880 the high order N bits of FOO (N+1 if an inequality comparison)
951553af 10881 are known to be zero, we can do this by comparing FOO with C
230d793d
RS
10882 shifted right N bits so long as the low-order N bits of C are
10883 zero. */
10884 if (GET_CODE (XEXP (op0, 1)) == CONST_INT
10885 && INTVAL (XEXP (op0, 1)) >= 0
10886 && ((INTVAL (XEXP (op0, 1)) + ! equality_comparison_p)
5f4f0e22
CH
10887 < HOST_BITS_PER_WIDE_INT)
10888 && ((const_op
34785d05 10889 & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0)
5f4f0e22 10890 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 10891 && (nonzero_bits (XEXP (op0, 0), mode)
663522cb
KH
10892 & ~(mask >> (INTVAL (XEXP (op0, 1))
10893 + ! equality_comparison_p))) == 0)
230d793d 10894 {
7ce787fe
NC
10895 /* We must perform a logical shift, not an arithmetic one,
10896 as we want the top N bits of C to be zero. */
aaaec114 10897 unsigned HOST_WIDE_INT temp = const_op & GET_MODE_MASK (mode);
663522cb 10898
7ce787fe 10899 temp >>= INTVAL (XEXP (op0, 1));
aaaec114 10900 op1 = GEN_INT (trunc_int_for_mode (temp, mode));
230d793d
RS
10901 op0 = XEXP (op0, 0);
10902 continue;
10903 }
10904
dfbe1b2f 10905 /* If we are doing a sign bit comparison, it means we are testing
230d793d 10906 a particular bit. Convert it to the appropriate AND. */
dfbe1b2f 10907 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
5f4f0e22 10908 && mode_width <= HOST_BITS_PER_WIDE_INT)
230d793d 10909 {
5f4f0e22
CH
10910 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
10911 ((HOST_WIDE_INT) 1
10912 << (mode_width - 1
10913 - INTVAL (XEXP (op0, 1)))));
230d793d
RS
10914 code = (code == LT ? NE : EQ);
10915 continue;
10916 }
dfbe1b2f
RK
10917
10918 /* If this an equality comparison with zero and we are shifting
10919 the low bit to the sign bit, we can convert this to an AND of the
10920 low-order bit. */
10921 if (const_op == 0 && equality_comparison_p
10922 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10923 && INTVAL (XEXP (op0, 1)) == mode_width - 1)
10924 {
5f4f0e22
CH
10925 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
10926 (HOST_WIDE_INT) 1);
dfbe1b2f
RK
10927 continue;
10928 }
230d793d
RS
10929 break;
10930
10931 case ASHIFTRT:
d0ab8cd3
RK
10932 /* If this is an equality comparison with zero, we can do this
10933 as a logical shift, which might be much simpler. */
10934 if (equality_comparison_p && const_op == 0
10935 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
10936 {
10937 op0 = simplify_shift_const (NULL_RTX, LSHIFTRT, mode,
10938 XEXP (op0, 0),
10939 INTVAL (XEXP (op0, 1)));
10940 continue;
10941 }
10942
230d793d
RS
10943 /* If OP0 is a sign extension and CODE is not an unsigned comparison,
10944 do the comparison in a narrower mode. */
10945 if (! unsigned_comparison_p
10946 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10947 && GET_CODE (XEXP (op0, 0)) == ASHIFT
10948 && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1)
10949 && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)),
22331794 10950 MODE_INT, 1)) != BLKmode
5f4f0e22 10951 && ((unsigned HOST_WIDE_INT) const_op <= GET_MODE_MASK (tmode)
663522cb 10952 || ((unsigned HOST_WIDE_INT) -const_op
5f4f0e22 10953 <= GET_MODE_MASK (tmode))))
230d793d
RS
10954 {
10955 op0 = gen_lowpart_for_combine (tmode, XEXP (XEXP (op0, 0), 0));
10956 continue;
10957 }
10958
14a774a9
RK
10959 /* Likewise if OP0 is a PLUS of a sign extension with a
10960 constant, which is usually represented with the PLUS
10961 between the shifts. */
10962 if (! unsigned_comparison_p
10963 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10964 && GET_CODE (XEXP (op0, 0)) == PLUS
10965 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
10966 && GET_CODE (XEXP (XEXP (op0, 0), 0)) == ASHIFT
10967 && XEXP (op0, 1) == XEXP (XEXP (XEXP (op0, 0), 0), 1)
10968 && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)),
10969 MODE_INT, 1)) != BLKmode
10970 && ((unsigned HOST_WIDE_INT) const_op <= GET_MODE_MASK (tmode)
663522cb 10971 || ((unsigned HOST_WIDE_INT) -const_op
14a774a9
RK
10972 <= GET_MODE_MASK (tmode))))
10973 {
10974 rtx inner = XEXP (XEXP (XEXP (op0, 0), 0), 0);
10975 rtx add_const = XEXP (XEXP (op0, 0), 1);
10976 rtx new_const = gen_binary (ASHIFTRT, GET_MODE (op0), add_const,
10977 XEXP (op0, 1));
10978
10979 op0 = gen_binary (PLUS, tmode,
10980 gen_lowpart_for_combine (tmode, inner),
10981 new_const);
10982 continue;
10983 }
10984
0f41302f 10985 /* ... fall through ... */
230d793d
RS
10986 case LSHIFTRT:
10987 /* If we have (compare (xshiftrt FOO N) (const_int C)) and
951553af 10988 the low order N bits of FOO are known to be zero, we can do this
230d793d
RS
10989 by comparing FOO with C shifted left N bits so long as no
10990 overflow occurs. */
10991 if (GET_CODE (XEXP (op0, 1)) == CONST_INT
10992 && INTVAL (XEXP (op0, 1)) >= 0
5f4f0e22
CH
10993 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
10994 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 10995 && (nonzero_bits (XEXP (op0, 0), mode)
5f4f0e22 10996 & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0
230d793d
RS
10997 && (const_op == 0
10998 || (floor_log2 (const_op) + INTVAL (XEXP (op0, 1))
10999 < mode_width)))
11000 {
11001 const_op <<= INTVAL (XEXP (op0, 1));
5f4f0e22 11002 op1 = GEN_INT (const_op);
230d793d
RS
11003 op0 = XEXP (op0, 0);
11004 continue;
11005 }
11006
11007 /* If we are using this shift to extract just the sign bit, we
11008 can replace this with an LT or GE comparison. */
11009 if (const_op == 0
11010 && (equality_comparison_p || sign_bit_comparison_p)
11011 && GET_CODE (XEXP (op0, 1)) == CONST_INT
11012 && INTVAL (XEXP (op0, 1)) == mode_width - 1)
11013 {
11014 op0 = XEXP (op0, 0);
11015 code = (code == NE || code == GT ? LT : GE);
11016 continue;
11017 }
11018 break;
663522cb 11019
e9a25f70
JL
11020 default:
11021 break;
230d793d
RS
11022 }
11023
11024 break;
11025 }
11026
11027 /* Now make any compound operations involved in this comparison. Then,
76d31c63 11028 check for an outmost SUBREG on OP0 that is not doing anything or is
230d793d
RS
11029 paradoxical. The latter case can only occur when it is known that the
11030 "extra" bits will be zero. Therefore, it is safe to remove the SUBREG.
11031 We can never remove a SUBREG for a non-equality comparison because the
11032 sign bit is in a different place in the underlying object. */
11033
11034 op0 = make_compound_operation (op0, op1 == const0_rtx ? COMPARE : SET);
11035 op1 = make_compound_operation (op1, SET);
11036
11037 if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
11038 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
11039 && (code == NE || code == EQ)
11040 && ((GET_MODE_SIZE (GET_MODE (op0))
11041 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0))))))
11042 {
11043 op0 = SUBREG_REG (op0);
11044 op1 = gen_lowpart_for_combine (GET_MODE (op0), op1);
11045 }
11046
11047 else if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
11048 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
11049 && (code == NE || code == EQ)
ac49a949
RS
11050 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
11051 <= HOST_BITS_PER_WIDE_INT)
951553af 11052 && (nonzero_bits (SUBREG_REG (op0), GET_MODE (SUBREG_REG (op0)))
663522cb 11053 & ~GET_MODE_MASK (GET_MODE (op0))) == 0
230d793d
RS
11054 && (tem = gen_lowpart_for_combine (GET_MODE (SUBREG_REG (op0)),
11055 op1),
951553af 11056 (nonzero_bits (tem, GET_MODE (SUBREG_REG (op0)))
663522cb 11057 & ~GET_MODE_MASK (GET_MODE (op0))) == 0))
230d793d
RS
11058 op0 = SUBREG_REG (op0), op1 = tem;
11059
11060 /* We now do the opposite procedure: Some machines don't have compare
11061 insns in all modes. If OP0's mode is an integer mode smaller than a
11062 word and we can't do a compare in that mode, see if there is a larger
a687e897
RK
11063 mode for which we can do the compare. There are a number of cases in
11064 which we can use the wider mode. */
230d793d
RS
11065
11066 mode = GET_MODE (op0);
11067 if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT
11068 && GET_MODE_SIZE (mode) < UNITS_PER_WORD
11069 && cmp_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
11070 for (tmode = GET_MODE_WIDER_MODE (mode);
5f4f0e22
CH
11071 (tmode != VOIDmode
11072 && GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT);
230d793d 11073 tmode = GET_MODE_WIDER_MODE (tmode))
a687e897 11074 if (cmp_optab->handlers[(int) tmode].insn_code != CODE_FOR_nothing)
230d793d 11075 {
951553af 11076 /* If the only nonzero bits in OP0 and OP1 are those in the
a687e897
RK
11077 narrower mode and this is an equality or unsigned comparison,
11078 we can use the wider mode. Similarly for sign-extended
7e4dc511 11079 values, in which case it is true for all comparisons. */
a687e897
RK
11080 if (((code == EQ || code == NE
11081 || code == GEU || code == GTU || code == LEU || code == LTU)
663522cb
KH
11082 && (nonzero_bits (op0, tmode) & ~GET_MODE_MASK (mode)) == 0
11083 && (nonzero_bits (op1, tmode) & ~GET_MODE_MASK (mode)) == 0)
7e4dc511
RK
11084 || ((num_sign_bit_copies (op0, tmode)
11085 > GET_MODE_BITSIZE (tmode) - GET_MODE_BITSIZE (mode))
a687e897 11086 && (num_sign_bit_copies (op1, tmode)
58744483 11087 > GET_MODE_BITSIZE (tmode) - GET_MODE_BITSIZE (mode))))
a687e897 11088 {
14a774a9
RK
11089 /* If OP0 is an AND and we don't have an AND in MODE either,
11090 make a new AND in the proper mode. */
11091 if (GET_CODE (op0) == AND
11092 && (add_optab->handlers[(int) mode].insn_code
11093 == CODE_FOR_nothing))
11094 op0 = gen_binary (AND, tmode,
11095 gen_lowpart_for_combine (tmode,
11096 XEXP (op0, 0)),
11097 gen_lowpart_for_combine (tmode,
11098 XEXP (op0, 1)));
11099
a687e897
RK
11100 op0 = gen_lowpart_for_combine (tmode, op0);
11101 op1 = gen_lowpart_for_combine (tmode, op1);
11102 break;
11103 }
230d793d 11104
a687e897
RK
11105 /* If this is a test for negative, we can make an explicit
11106 test of the sign bit. */
11107
11108 if (op1 == const0_rtx && (code == LT || code == GE)
11109 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
230d793d 11110 {
a687e897
RK
11111 op0 = gen_binary (AND, tmode,
11112 gen_lowpart_for_combine (tmode, op0),
5f4f0e22
CH
11113 GEN_INT ((HOST_WIDE_INT) 1
11114 << (GET_MODE_BITSIZE (mode) - 1)));
230d793d 11115 code = (code == LT) ? NE : EQ;
a687e897 11116 break;
230d793d 11117 }
230d793d
RS
11118 }
11119
b7a775b2
RK
11120#ifdef CANONICALIZE_COMPARISON
11121 /* If this machine only supports a subset of valid comparisons, see if we
11122 can convert an unsupported one into a supported one. */
11123 CANONICALIZE_COMPARISON (code, op0, op1);
11124#endif
11125
230d793d
RS
11126 *pop0 = op0;
11127 *pop1 = op1;
11128
11129 return code;
11130}
11131\f
9a915772
JH
11132/* Like jump.c' reversed_comparison_code, but use combine infrastructure for
11133 searching backward. */
c3ffea50 11134static enum rtx_code
9a915772
JH
11135combine_reversed_comparison_code (exp)
11136 rtx exp;
230d793d 11137{
9a915772
JH
11138 enum rtx_code code1 = reversed_comparison_code (exp, NULL);
11139 rtx x;
11140
11141 if (code1 != UNKNOWN
11142 || GET_MODE_CLASS (GET_MODE (XEXP (exp, 0))) != MODE_CC)
11143 return code1;
11144 /* Otherwise try and find where the condition codes were last set and
11145 use that. */
c3ffea50
AJ
11146 x = get_last_value (XEXP (exp, 0));
11147 if (!x || GET_CODE (x) != COMPARE)
9a915772
JH
11148 return UNKNOWN;
11149 return reversed_comparison_code_parts (GET_CODE (exp),
11150 XEXP (x, 0), XEXP (x, 1), NULL);
11151}
11152/* Return comparison with reversed code of EXP and operands OP0 and OP1.
11153 Return NULL_RTX in case we fail to do the reversal. */
11154static rtx
11155reversed_comparison (exp, mode, op0, op1)
11156 rtx exp, op0, op1;
11157 enum machine_mode mode;
11158{
11159 enum rtx_code reversed_code = combine_reversed_comparison_code (exp);
11160 if (reversed_code == UNKNOWN)
11161 return NULL_RTX;
11162 else
11163 return gen_binary (reversed_code, mode, op0, op1);
230d793d
RS
11164}
11165\f
11166/* Utility function for following routine. Called when X is part of a value
11167 being stored into reg_last_set_value. Sets reg_last_set_table_tick
11168 for each register mentioned. Similar to mention_regs in cse.c */
11169
11170static void
11171update_table_tick (x)
11172 rtx x;
11173{
11174 register enum rtx_code code = GET_CODE (x);
6f7d635c 11175 register const char *fmt = GET_RTX_FORMAT (code);
230d793d
RS
11176 register int i;
11177
11178 if (code == REG)
11179 {
770ae6cc
RK
11180 unsigned int regno = REGNO (x);
11181 unsigned int endregno
11182 = regno + (regno < FIRST_PSEUDO_REGISTER
11183 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
11184 unsigned int r;
230d793d 11185
770ae6cc
RK
11186 for (r = regno; r < endregno; r++)
11187 reg_last_set_table_tick[r] = label_tick;
230d793d
RS
11188
11189 return;
11190 }
663522cb 11191
230d793d
RS
11192 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
11193 /* Note that we can't have an "E" in values stored; see
11194 get_last_value_validate. */
11195 if (fmt[i] == 'e')
11196 update_table_tick (XEXP (x, i));
11197}
11198
11199/* Record that REG is set to VALUE in insn INSN. If VALUE is zero, we
11200 are saying that the register is clobbered and we no longer know its
7988fd36
RK
11201 value. If INSN is zero, don't update reg_last_set; this is only permitted
11202 with VALUE also zero and is used to invalidate the register. */
230d793d
RS
11203
11204static void
11205record_value_for_reg (reg, insn, value)
11206 rtx reg;
11207 rtx insn;
11208 rtx value;
11209{
770ae6cc
RK
11210 unsigned int regno = REGNO (reg);
11211 unsigned int endregno
11212 = regno + (regno < FIRST_PSEUDO_REGISTER
11213 ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1);
11214 unsigned int i;
230d793d
RS
11215
11216 /* If VALUE contains REG and we have a previous value for REG, substitute
11217 the previous value. */
11218 if (value && insn && reg_overlap_mentioned_p (reg, value))
11219 {
11220 rtx tem;
11221
11222 /* Set things up so get_last_value is allowed to see anything set up to
11223 our insn. */
11224 subst_low_cuid = INSN_CUID (insn);
663522cb 11225 tem = get_last_value (reg);
230d793d 11226
14a774a9
RK
11227 /* If TEM is simply a binary operation with two CLOBBERs as operands,
11228 it isn't going to be useful and will take a lot of time to process,
11229 so just use the CLOBBER. */
11230
230d793d 11231 if (tem)
14a774a9
RK
11232 {
11233 if ((GET_RTX_CLASS (GET_CODE (tem)) == '2'
11234 || GET_RTX_CLASS (GET_CODE (tem)) == 'c')
11235 && GET_CODE (XEXP (tem, 0)) == CLOBBER
11236 && GET_CODE (XEXP (tem, 1)) == CLOBBER)
11237 tem = XEXP (tem, 0);
11238
11239 value = replace_rtx (copy_rtx (value), reg, tem);
11240 }
230d793d
RS
11241 }
11242
11243 /* For each register modified, show we don't know its value, that
ef026f91
RS
11244 we don't know about its bitwise content, that its value has been
11245 updated, and that we don't know the location of the death of the
11246 register. */
770ae6cc 11247 for (i = regno; i < endregno; i++)
230d793d
RS
11248 {
11249 if (insn)
11250 reg_last_set[i] = insn;
770ae6cc 11251
230d793d 11252 reg_last_set_value[i] = 0;
ef026f91
RS
11253 reg_last_set_mode[i] = 0;
11254 reg_last_set_nonzero_bits[i] = 0;
11255 reg_last_set_sign_bit_copies[i] = 0;
230d793d
RS
11256 reg_last_death[i] = 0;
11257 }
11258
11259 /* Mark registers that are being referenced in this value. */
11260 if (value)
11261 update_table_tick (value);
11262
11263 /* Now update the status of each register being set.
11264 If someone is using this register in this block, set this register
11265 to invalid since we will get confused between the two lives in this
11266 basic block. This makes using this register always invalid. In cse, we
11267 scan the table to invalidate all entries using this register, but this
11268 is too much work for us. */
11269
11270 for (i = regno; i < endregno; i++)
11271 {
11272 reg_last_set_label[i] = label_tick;
11273 if (value && reg_last_set_table_tick[i] == label_tick)
11274 reg_last_set_invalid[i] = 1;
11275 else
11276 reg_last_set_invalid[i] = 0;
11277 }
11278
11279 /* The value being assigned might refer to X (like in "x++;"). In that
11280 case, we must replace it with (clobber (const_int 0)) to prevent
11281 infinite loops. */
9a893315 11282 if (value && ! get_last_value_validate (&value, insn,
230d793d
RS
11283 reg_last_set_label[regno], 0))
11284 {
11285 value = copy_rtx (value);
9a893315
JW
11286 if (! get_last_value_validate (&value, insn,
11287 reg_last_set_label[regno], 1))
230d793d
RS
11288 value = 0;
11289 }
11290
55310dad
RK
11291 /* For the main register being modified, update the value, the mode, the
11292 nonzero bits, and the number of sign bit copies. */
11293
230d793d
RS
11294 reg_last_set_value[regno] = value;
11295
55310dad
RK
11296 if (value)
11297 {
2afabb48 11298 subst_low_cuid = INSN_CUID (insn);
55310dad
RK
11299 reg_last_set_mode[regno] = GET_MODE (reg);
11300 reg_last_set_nonzero_bits[regno] = nonzero_bits (value, GET_MODE (reg));
11301 reg_last_set_sign_bit_copies[regno]
11302 = num_sign_bit_copies (value, GET_MODE (reg));
11303 }
230d793d
RS
11304}
11305
230d793d 11306/* Called via note_stores from record_dead_and_set_regs to handle one
84832317
MM
11307 SET or CLOBBER in an insn. DATA is the instruction in which the
11308 set is occurring. */
230d793d
RS
11309
11310static void
84832317 11311record_dead_and_set_regs_1 (dest, setter, data)
230d793d 11312 rtx dest, setter;
84832317 11313 void *data;
230d793d 11314{
84832317
MM
11315 rtx record_dead_insn = (rtx) data;
11316
ca89d290
RK
11317 if (GET_CODE (dest) == SUBREG)
11318 dest = SUBREG_REG (dest);
11319
230d793d
RS
11320 if (GET_CODE (dest) == REG)
11321 {
11322 /* If we are setting the whole register, we know its value. Otherwise
11323 show that we don't know the value. We can handle SUBREG in
11324 some cases. */
11325 if (GET_CODE (setter) == SET && dest == SET_DEST (setter))
11326 record_value_for_reg (dest, record_dead_insn, SET_SRC (setter));
11327 else if (GET_CODE (setter) == SET
11328 && GET_CODE (SET_DEST (setter)) == SUBREG
11329 && SUBREG_REG (SET_DEST (setter)) == dest
90bf8081 11330 && GET_MODE_BITSIZE (GET_MODE (dest)) <= BITS_PER_WORD
230d793d 11331 && subreg_lowpart_p (SET_DEST (setter)))
d0ab8cd3
RK
11332 record_value_for_reg (dest, record_dead_insn,
11333 gen_lowpart_for_combine (GET_MODE (dest),
11334 SET_SRC (setter)));
230d793d 11335 else
5f4f0e22 11336 record_value_for_reg (dest, record_dead_insn, NULL_RTX);
230d793d
RS
11337 }
11338 else if (GET_CODE (dest) == MEM
11339 /* Ignore pushes, they clobber nothing. */
11340 && ! push_operand (dest, GET_MODE (dest)))
11341 mem_last_set = INSN_CUID (record_dead_insn);
11342}
11343
11344/* Update the records of when each REG was most recently set or killed
11345 for the things done by INSN. This is the last thing done in processing
11346 INSN in the combiner loop.
11347
ef026f91
RS
11348 We update reg_last_set, reg_last_set_value, reg_last_set_mode,
11349 reg_last_set_nonzero_bits, reg_last_set_sign_bit_copies, reg_last_death,
11350 and also the similar information mem_last_set (which insn most recently
11351 modified memory) and last_call_cuid (which insn was the most recent
11352 subroutine call). */
230d793d
RS
11353
11354static void
11355record_dead_and_set_regs (insn)
11356 rtx insn;
11357{
11358 register rtx link;
770ae6cc 11359 unsigned int i;
55310dad 11360
230d793d
RS
11361 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
11362 {
dbc131f3
RK
11363 if (REG_NOTE_KIND (link) == REG_DEAD
11364 && GET_CODE (XEXP (link, 0)) == REG)
11365 {
770ae6cc
RK
11366 unsigned int regno = REGNO (XEXP (link, 0));
11367 unsigned int endregno
dbc131f3
RK
11368 = regno + (regno < FIRST_PSEUDO_REGISTER
11369 ? HARD_REGNO_NREGS (regno, GET_MODE (XEXP (link, 0)))
11370 : 1);
dbc131f3
RK
11371
11372 for (i = regno; i < endregno; i++)
11373 reg_last_death[i] = insn;
11374 }
230d793d 11375 else if (REG_NOTE_KIND (link) == REG_INC)
5f4f0e22 11376 record_value_for_reg (XEXP (link, 0), insn, NULL_RTX);
230d793d
RS
11377 }
11378
11379 if (GET_CODE (insn) == CALL_INSN)
55310dad
RK
11380 {
11381 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
11382 if (call_used_regs[i])
11383 {
11384 reg_last_set_value[i] = 0;
ef026f91
RS
11385 reg_last_set_mode[i] = 0;
11386 reg_last_set_nonzero_bits[i] = 0;
11387 reg_last_set_sign_bit_copies[i] = 0;
55310dad
RK
11388 reg_last_death[i] = 0;
11389 }
11390
11391 last_call_cuid = mem_last_set = INSN_CUID (insn);
11392 }
230d793d 11393
84832317 11394 note_stores (PATTERN (insn), record_dead_and_set_regs_1, insn);
230d793d 11395}
732f2ac9 11396
732f2ac9
JJ
11397/* If a SUBREG has the promoted bit set, it is in fact a property of the
11398 register present in the SUBREG, so for each such SUBREG go back and
11399 adjust nonzero and sign bit information of the registers that are
11400 known to have some zero/sign bits set.
11401
11402 This is needed because when combine blows the SUBREGs away, the
11403 information on zero/sign bits is lost and further combines can be
11404 missed because of that. */
11405
11406static void
11407record_promoted_value (insn, subreg)
663522cb
KH
11408 rtx insn;
11409 rtx subreg;
732f2ac9 11410{
4a71b24f 11411 rtx links, set;
770ae6cc 11412 unsigned int regno = REGNO (SUBREG_REG (subreg));
732f2ac9
JJ
11413 enum machine_mode mode = GET_MODE (subreg);
11414
25af74a0 11415 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
732f2ac9
JJ
11416 return;
11417
663522cb 11418 for (links = LOG_LINKS (insn); links;)
732f2ac9
JJ
11419 {
11420 insn = XEXP (links, 0);
11421 set = single_set (insn);
11422
11423 if (! set || GET_CODE (SET_DEST (set)) != REG
11424 || REGNO (SET_DEST (set)) != regno
11425 || GET_MODE (SET_DEST (set)) != GET_MODE (SUBREG_REG (subreg)))
11426 {
11427 links = XEXP (links, 1);
11428 continue;
11429 }
11430
663522cb
KH
11431 if (reg_last_set[regno] == insn)
11432 {
732f2ac9 11433 if (SUBREG_PROMOTED_UNSIGNED_P (subreg))
663522cb
KH
11434 reg_last_set_nonzero_bits[regno] &= GET_MODE_MASK (mode);
11435 }
732f2ac9
JJ
11436
11437 if (GET_CODE (SET_SRC (set)) == REG)
11438 {
11439 regno = REGNO (SET_SRC (set));
11440 links = LOG_LINKS (insn);
11441 }
11442 else
11443 break;
11444 }
11445}
11446
11447/* Scan X for promoted SUBREGs. For each one found,
11448 note what it implies to the registers used in it. */
11449
11450static void
11451check_promoted_subreg (insn, x)
663522cb
KH
11452 rtx insn;
11453 rtx x;
732f2ac9
JJ
11454{
11455 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
11456 && GET_CODE (SUBREG_REG (x)) == REG)
11457 record_promoted_value (insn, x);
11458 else
11459 {
11460 const char *format = GET_RTX_FORMAT (GET_CODE (x));
11461 int i, j;
11462
11463 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (x)); i++)
663522cb 11464 switch (format[i])
732f2ac9
JJ
11465 {
11466 case 'e':
11467 check_promoted_subreg (insn, XEXP (x, i));
11468 break;
11469 case 'V':
11470 case 'E':
11471 if (XVEC (x, i) != 0)
11472 for (j = 0; j < XVECLEN (x, i); j++)
11473 check_promoted_subreg (insn, XVECEXP (x, i, j));
11474 break;
11475 }
11476 }
11477}
230d793d
RS
11478\f
11479/* Utility routine for the following function. Verify that all the registers
11480 mentioned in *LOC are valid when *LOC was part of a value set when
11481 label_tick == TICK. Return 0 if some are not.
11482
11483 If REPLACE is non-zero, replace the invalid reference with
11484 (clobber (const_int 0)) and return 1. This replacement is useful because
11485 we often can get useful information about the form of a value (e.g., if
11486 it was produced by a shift that always produces -1 or 0) even though
11487 we don't know exactly what registers it was produced from. */
11488
11489static int
9a893315 11490get_last_value_validate (loc, insn, tick, replace)
230d793d 11491 rtx *loc;
9a893315 11492 rtx insn;
230d793d
RS
11493 int tick;
11494 int replace;
11495{
11496 rtx x = *loc;
6f7d635c 11497 const char *fmt = GET_RTX_FORMAT (GET_CODE (x));
230d793d
RS
11498 int len = GET_RTX_LENGTH (GET_CODE (x));
11499 int i;
11500
11501 if (GET_CODE (x) == REG)
11502 {
770ae6cc
RK
11503 unsigned int regno = REGNO (x);
11504 unsigned int endregno
11505 = regno + (regno < FIRST_PSEUDO_REGISTER
11506 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
11507 unsigned int j;
230d793d
RS
11508
11509 for (j = regno; j < endregno; j++)
11510 if (reg_last_set_invalid[j]
57cf50a4
GRK
11511 /* If this is a pseudo-register that was only set once and not
11512 live at the beginning of the function, it is always valid. */
663522cb 11513 || (! (regno >= FIRST_PSEUDO_REGISTER
57cf50a4 11514 && REG_N_SETS (regno) == 1
770ae6cc
RK
11515 && (! REGNO_REG_SET_P
11516 (BASIC_BLOCK (0)->global_live_at_start, regno)))
230d793d
RS
11517 && reg_last_set_label[j] > tick))
11518 {
11519 if (replace)
38a448ca 11520 *loc = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
230d793d
RS
11521 return replace;
11522 }
11523
11524 return 1;
11525 }
9a893315
JW
11526 /* If this is a memory reference, make sure that there were
11527 no stores after it that might have clobbered the value. We don't
11528 have alias info, so we assume any store invalidates it. */
11529 else if (GET_CODE (x) == MEM && ! RTX_UNCHANGING_P (x)
11530 && INSN_CUID (insn) <= mem_last_set)
11531 {
11532 if (replace)
38a448ca 11533 *loc = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
9a893315
JW
11534 return replace;
11535 }
230d793d
RS
11536
11537 for (i = 0; i < len; i++)
11538 if ((fmt[i] == 'e'
9a893315 11539 && get_last_value_validate (&XEXP (x, i), insn, tick, replace) == 0)
230d793d
RS
11540 /* Don't bother with these. They shouldn't occur anyway. */
11541 || fmt[i] == 'E')
11542 return 0;
11543
11544 /* If we haven't found a reason for it to be invalid, it is valid. */
11545 return 1;
11546}
11547
11548/* Get the last value assigned to X, if known. Some registers
11549 in the value may be replaced with (clobber (const_int 0)) if their value
11550 is known longer known reliably. */
11551
11552static rtx
11553get_last_value (x)
11554 rtx x;
11555{
770ae6cc 11556 unsigned int regno;
230d793d
RS
11557 rtx value;
11558
11559 /* If this is a non-paradoxical SUBREG, get the value of its operand and
11560 then convert it to the desired mode. If this is a paradoxical SUBREG,
0f41302f 11561 we cannot predict what values the "extra" bits might have. */
230d793d
RS
11562 if (GET_CODE (x) == SUBREG
11563 && subreg_lowpart_p (x)
11564 && (GET_MODE_SIZE (GET_MODE (x))
11565 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
11566 && (value = get_last_value (SUBREG_REG (x))) != 0)
11567 return gen_lowpart_for_combine (GET_MODE (x), value);
11568
11569 if (GET_CODE (x) != REG)
11570 return 0;
11571
11572 regno = REGNO (x);
11573 value = reg_last_set_value[regno];
11574
57cf50a4
GRK
11575 /* If we don't have a value, or if it isn't for this basic block and
11576 it's either a hard register, set more than once, or it's a live
663522cb 11577 at the beginning of the function, return 0.
57cf50a4 11578
663522cb 11579 Because if it's not live at the beginnning of the function then the reg
57cf50a4
GRK
11580 is always set before being used (is never used without being set).
11581 And, if it's set only once, and it's always set before use, then all
11582 uses must have the same last value, even if it's not from this basic
11583 block. */
230d793d
RS
11584
11585 if (value == 0
57cf50a4
GRK
11586 || (reg_last_set_label[regno] != label_tick
11587 && (regno < FIRST_PSEUDO_REGISTER
11588 || REG_N_SETS (regno) != 1
770ae6cc
RK
11589 || (REGNO_REG_SET_P
11590 (BASIC_BLOCK (0)->global_live_at_start, regno)))))
230d793d
RS
11591 return 0;
11592
4255220d 11593 /* If the value was set in a later insn than the ones we are processing,
ca4cd906 11594 we can't use it even if the register was only set once. */
bcd49eb7 11595 if (INSN_CUID (reg_last_set[regno]) >= subst_low_cuid)
ca4cd906 11596 return 0;
d0ab8cd3
RK
11597
11598 /* If the value has all its registers valid, return it. */
9a893315
JW
11599 if (get_last_value_validate (&value, reg_last_set[regno],
11600 reg_last_set_label[regno], 0))
230d793d
RS
11601 return value;
11602
11603 /* Otherwise, make a copy and replace any invalid register with
11604 (clobber (const_int 0)). If that fails for some reason, return 0. */
11605
11606 value = copy_rtx (value);
9a893315
JW
11607 if (get_last_value_validate (&value, reg_last_set[regno],
11608 reg_last_set_label[regno], 1))
230d793d
RS
11609 return value;
11610
11611 return 0;
11612}
11613\f
11614/* Return nonzero if expression X refers to a REG or to memory
11615 that is set in an instruction more recent than FROM_CUID. */
11616
11617static int
11618use_crosses_set_p (x, from_cuid)
11619 register rtx x;
11620 int from_cuid;
11621{
6f7d635c 11622 register const char *fmt;
230d793d
RS
11623 register int i;
11624 register enum rtx_code code = GET_CODE (x);
11625
11626 if (code == REG)
11627 {
770ae6cc
RK
11628 unsigned int regno = REGNO (x);
11629 unsigned endreg = regno + (regno < FIRST_PSEUDO_REGISTER
663522cb
KH
11630 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
11631
230d793d
RS
11632#ifdef PUSH_ROUNDING
11633 /* Don't allow uses of the stack pointer to be moved,
11634 because we don't know whether the move crosses a push insn. */
f73ad30e 11635 if (regno == STACK_POINTER_REGNUM && PUSH_ARGS)
230d793d
RS
11636 return 1;
11637#endif
770ae6cc 11638 for (; regno < endreg; regno++)
e28f5732
RK
11639 if (reg_last_set[regno]
11640 && INSN_CUID (reg_last_set[regno]) > from_cuid)
11641 return 1;
11642 return 0;
230d793d
RS
11643 }
11644
11645 if (code == MEM && mem_last_set > from_cuid)
11646 return 1;
11647
11648 fmt = GET_RTX_FORMAT (code);
11649
11650 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
11651 {
11652 if (fmt[i] == 'E')
11653 {
11654 register int j;
11655 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
11656 if (use_crosses_set_p (XVECEXP (x, i, j), from_cuid))
11657 return 1;
11658 }
11659 else if (fmt[i] == 'e'
11660 && use_crosses_set_p (XEXP (x, i), from_cuid))
11661 return 1;
11662 }
11663 return 0;
11664}
11665\f
11666/* Define three variables used for communication between the following
11667 routines. */
11668
770ae6cc 11669static unsigned int reg_dead_regno, reg_dead_endregno;
230d793d
RS
11670static int reg_dead_flag;
11671
11672/* Function called via note_stores from reg_dead_at_p.
11673
663522cb 11674 If DEST is within [reg_dead_regno, reg_dead_endregno), set
230d793d
RS
11675 reg_dead_flag to 1 if X is a CLOBBER and to -1 it is a SET. */
11676
11677static void
84832317 11678reg_dead_at_p_1 (dest, x, data)
230d793d
RS
11679 rtx dest;
11680 rtx x;
84832317 11681 void *data ATTRIBUTE_UNUSED;
230d793d 11682{
770ae6cc 11683 unsigned int regno, endregno;
230d793d
RS
11684
11685 if (GET_CODE (dest) != REG)
11686 return;
11687
11688 regno = REGNO (dest);
663522cb 11689 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
230d793d
RS
11690 ? HARD_REGNO_NREGS (regno, GET_MODE (dest)) : 1);
11691
11692 if (reg_dead_endregno > regno && reg_dead_regno < endregno)
11693 reg_dead_flag = (GET_CODE (x) == CLOBBER) ? 1 : -1;
11694}
11695
11696/* Return non-zero if REG is known to be dead at INSN.
11697
11698 We scan backwards from INSN. If we hit a REG_DEAD note or a CLOBBER
11699 referencing REG, it is dead. If we hit a SET referencing REG, it is
11700 live. Otherwise, see if it is live or dead at the start of the basic
6e25d159
RK
11701 block we are in. Hard regs marked as being live in NEWPAT_USED_REGS
11702 must be assumed to be always live. */
230d793d
RS
11703
11704static int
11705reg_dead_at_p (reg, insn)
11706 rtx reg;
11707 rtx insn;
11708{
770ae6cc
RK
11709 int block;
11710 unsigned int i;
230d793d
RS
11711
11712 /* Set variables for reg_dead_at_p_1. */
11713 reg_dead_regno = REGNO (reg);
11714 reg_dead_endregno = reg_dead_regno + (reg_dead_regno < FIRST_PSEUDO_REGISTER
11715 ? HARD_REGNO_NREGS (reg_dead_regno,
11716 GET_MODE (reg))
11717 : 1);
11718
11719 reg_dead_flag = 0;
11720
6e25d159
RK
11721 /* Check that reg isn't mentioned in NEWPAT_USED_REGS. */
11722 if (reg_dead_regno < FIRST_PSEUDO_REGISTER)
11723 {
11724 for (i = reg_dead_regno; i < reg_dead_endregno; i++)
11725 if (TEST_HARD_REG_BIT (newpat_used_regs, i))
11726 return 0;
11727 }
11728
230d793d
RS
11729 /* Scan backwards until we find a REG_DEAD note, SET, CLOBBER, label, or
11730 beginning of function. */
60715d0b 11731 for (; insn && GET_CODE (insn) != CODE_LABEL && GET_CODE (insn) != BARRIER;
230d793d
RS
11732 insn = prev_nonnote_insn (insn))
11733 {
84832317 11734 note_stores (PATTERN (insn), reg_dead_at_p_1, NULL);
230d793d
RS
11735 if (reg_dead_flag)
11736 return reg_dead_flag == 1 ? 1 : 0;
11737
11738 if (find_regno_note (insn, REG_DEAD, reg_dead_regno))
11739 return 1;
11740 }
11741
11742 /* Get the basic block number that we were in. */
11743 if (insn == 0)
11744 block = 0;
11745 else
11746 {
11747 for (block = 0; block < n_basic_blocks; block++)
3b413743 11748 if (insn == BLOCK_HEAD (block))
230d793d
RS
11749 break;
11750
11751 if (block == n_basic_blocks)
11752 return 0;
11753 }
11754
11755 for (i = reg_dead_regno; i < reg_dead_endregno; i++)
e881bb1b 11756 if (REGNO_REG_SET_P (BASIC_BLOCK (block)->global_live_at_start, i))
230d793d
RS
11757 return 0;
11758
11759 return 1;
11760}
6e25d159
RK
11761\f
11762/* Note hard registers in X that are used. This code is similar to
11763 that in flow.c, but much simpler since we don't care about pseudos. */
11764
11765static void
11766mark_used_regs_combine (x)
11767 rtx x;
11768{
770ae6cc
RK
11769 RTX_CODE code = GET_CODE (x);
11770 unsigned int regno;
6e25d159
RK
11771 int i;
11772
11773 switch (code)
11774 {
11775 case LABEL_REF:
11776 case SYMBOL_REF:
11777 case CONST_INT:
11778 case CONST:
11779 case CONST_DOUBLE:
11780 case PC:
11781 case ADDR_VEC:
11782 case ADDR_DIFF_VEC:
11783 case ASM_INPUT:
11784#ifdef HAVE_cc0
11785 /* CC0 must die in the insn after it is set, so we don't need to take
11786 special note of it here. */
11787 case CC0:
11788#endif
11789 return;
11790
11791 case CLOBBER:
11792 /* If we are clobbering a MEM, mark any hard registers inside the
11793 address as used. */
11794 if (GET_CODE (XEXP (x, 0)) == MEM)
11795 mark_used_regs_combine (XEXP (XEXP (x, 0), 0));
11796 return;
11797
11798 case REG:
11799 regno = REGNO (x);
11800 /* A hard reg in a wide mode may really be multiple registers.
11801 If so, mark all of them just like the first. */
11802 if (regno < FIRST_PSEUDO_REGISTER)
11803 {
770ae6cc
RK
11804 unsigned int endregno, r;
11805
6e25d159
RK
11806 /* None of this applies to the stack, frame or arg pointers */
11807 if (regno == STACK_POINTER_REGNUM
11808#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
11809 || regno == HARD_FRAME_POINTER_REGNUM
11810#endif
11811#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
11812 || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
11813#endif
11814 || regno == FRAME_POINTER_REGNUM)
11815 return;
11816
770ae6cc
RK
11817 endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
11818 for (r = regno; r < endregno; r++)
11819 SET_HARD_REG_BIT (newpat_used_regs, r);
6e25d159
RK
11820 }
11821 return;
11822
11823 case SET:
11824 {
11825 /* If setting a MEM, or a SUBREG of a MEM, then note any hard regs in
11826 the address. */
11827 register rtx testreg = SET_DEST (x);
11828
e048778f
RK
11829 while (GET_CODE (testreg) == SUBREG
11830 || GET_CODE (testreg) == ZERO_EXTRACT
11831 || GET_CODE (testreg) == SIGN_EXTRACT
11832 || GET_CODE (testreg) == STRICT_LOW_PART)
6e25d159
RK
11833 testreg = XEXP (testreg, 0);
11834
11835 if (GET_CODE (testreg) == MEM)
11836 mark_used_regs_combine (XEXP (testreg, 0));
11837
11838 mark_used_regs_combine (SET_SRC (x));
6e25d159 11839 }
e9a25f70
JL
11840 return;
11841
11842 default:
11843 break;
6e25d159
RK
11844 }
11845
11846 /* Recursively scan the operands of this expression. */
11847
11848 {
6f7d635c 11849 register const char *fmt = GET_RTX_FORMAT (code);
6e25d159
RK
11850
11851 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
11852 {
663522cb 11853 if (fmt[i] == 'e')
6e25d159 11854 mark_used_regs_combine (XEXP (x, i));
663522cb
KH
11855 else if (fmt[i] == 'E')
11856 {
11857 register int j;
6e25d159 11858
663522cb
KH
11859 for (j = 0; j < XVECLEN (x, i); j++)
11860 mark_used_regs_combine (XVECEXP (x, i, j));
11861 }
6e25d159
RK
11862 }
11863 }
11864}
230d793d
RS
11865\f
11866/* Remove register number REGNO from the dead registers list of INSN.
11867
11868 Return the note used to record the death, if there was one. */
11869
11870rtx
11871remove_death (regno, insn)
770ae6cc 11872 unsigned int regno;
230d793d
RS
11873 rtx insn;
11874{
11875 register rtx note = find_regno_note (insn, REG_DEAD, regno);
11876
11877 if (note)
1a26b032 11878 {
b1f21e0a 11879 REG_N_DEATHS (regno)--;
1a26b032
RK
11880 remove_note (insn, note);
11881 }
230d793d
RS
11882
11883 return note;
11884}
11885
11886/* For each register (hardware or pseudo) used within expression X, if its
11887 death is in an instruction with cuid between FROM_CUID (inclusive) and
11888 TO_INSN (exclusive), put a REG_DEAD note for that register in the
663522cb 11889 list headed by PNOTES.
230d793d 11890
6eb12cef
RK
11891 That said, don't move registers killed by maybe_kill_insn.
11892
230d793d
RS
11893 This is done when X is being merged by combination into TO_INSN. These
11894 notes will then be distributed as needed. */
11895
11896static void
6eb12cef 11897move_deaths (x, maybe_kill_insn, from_cuid, to_insn, pnotes)
230d793d 11898 rtx x;
6eb12cef 11899 rtx maybe_kill_insn;
230d793d
RS
11900 int from_cuid;
11901 rtx to_insn;
11902 rtx *pnotes;
11903{
6f7d635c 11904 register const char *fmt;
230d793d
RS
11905 register int len, i;
11906 register enum rtx_code code = GET_CODE (x);
11907
11908 if (code == REG)
11909 {
770ae6cc 11910 unsigned int regno = REGNO (x);
230d793d 11911 register rtx where_dead = reg_last_death[regno];
e340018d
JW
11912 register rtx before_dead, after_dead;
11913
6eb12cef
RK
11914 /* Don't move the register if it gets killed in between from and to */
11915 if (maybe_kill_insn && reg_set_p (x, maybe_kill_insn)
770ae6cc 11916 && ! reg_referenced_p (x, maybe_kill_insn))
6eb12cef
RK
11917 return;
11918
e340018d
JW
11919 /* WHERE_DEAD could be a USE insn made by combine, so first we
11920 make sure that we have insns with valid INSN_CUID values. */
11921 before_dead = where_dead;
11922 while (before_dead && INSN_UID (before_dead) > max_uid_cuid)
11923 before_dead = PREV_INSN (before_dead);
770ae6cc 11924
e340018d
JW
11925 after_dead = where_dead;
11926 while (after_dead && INSN_UID (after_dead) > max_uid_cuid)
11927 after_dead = NEXT_INSN (after_dead);
11928
11929 if (before_dead && after_dead
11930 && INSN_CUID (before_dead) >= from_cuid
11931 && (INSN_CUID (after_dead) < INSN_CUID (to_insn)
11932 || (where_dead != after_dead
11933 && INSN_CUID (after_dead) == INSN_CUID (to_insn))))
230d793d 11934 {
dbc131f3 11935 rtx note = remove_death (regno, where_dead);
230d793d
RS
11936
11937 /* It is possible for the call above to return 0. This can occur
11938 when reg_last_death points to I2 or I1 that we combined with.
dbc131f3
RK
11939 In that case make a new note.
11940
11941 We must also check for the case where X is a hard register
11942 and NOTE is a death note for a range of hard registers
11943 including X. In that case, we must put REG_DEAD notes for
11944 the remaining registers in place of NOTE. */
11945
11946 if (note != 0 && regno < FIRST_PSEUDO_REGISTER
11947 && (GET_MODE_SIZE (GET_MODE (XEXP (note, 0)))
24e46fc4 11948 > GET_MODE_SIZE (GET_MODE (x))))
dbc131f3 11949 {
770ae6cc
RK
11950 unsigned int deadregno = REGNO (XEXP (note, 0));
11951 unsigned int deadend
dbc131f3
RK
11952 = (deadregno + HARD_REGNO_NREGS (deadregno,
11953 GET_MODE (XEXP (note, 0))));
770ae6cc
RK
11954 unsigned int ourend
11955 = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
11956 unsigned int i;
dbc131f3
RK
11957
11958 for (i = deadregno; i < deadend; i++)
11959 if (i < regno || i >= ourend)
11960 REG_NOTES (where_dead)
38a448ca
RH
11961 = gen_rtx_EXPR_LIST (REG_DEAD,
11962 gen_rtx_REG (reg_raw_mode[i], i),
11963 REG_NOTES (where_dead));
dbc131f3 11964 }
770ae6cc 11965
24e46fc4
JW
11966 /* If we didn't find any note, or if we found a REG_DEAD note that
11967 covers only part of the given reg, and we have a multi-reg hard
fabd69e8
RK
11968 register, then to be safe we must check for REG_DEAD notes
11969 for each register other than the first. They could have
11970 their own REG_DEAD notes lying around. */
24e46fc4
JW
11971 else if ((note == 0
11972 || (note != 0
11973 && (GET_MODE_SIZE (GET_MODE (XEXP (note, 0)))
11974 < GET_MODE_SIZE (GET_MODE (x)))))
11975 && regno < FIRST_PSEUDO_REGISTER
fabd69e8
RK
11976 && HARD_REGNO_NREGS (regno, GET_MODE (x)) > 1)
11977 {
770ae6cc
RK
11978 unsigned int ourend
11979 = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
11980 unsigned int i, offset;
fabd69e8
RK
11981 rtx oldnotes = 0;
11982
24e46fc4
JW
11983 if (note)
11984 offset = HARD_REGNO_NREGS (regno, GET_MODE (XEXP (note, 0)));
11985 else
11986 offset = 1;
11987
11988 for (i = regno + offset; i < ourend; i++)
38a448ca 11989 move_deaths (gen_rtx_REG (reg_raw_mode[i], i),
6eb12cef 11990 maybe_kill_insn, from_cuid, to_insn, &oldnotes);
fabd69e8 11991 }
230d793d 11992
dbc131f3 11993 if (note != 0 && GET_MODE (XEXP (note, 0)) == GET_MODE (x))
230d793d
RS
11994 {
11995 XEXP (note, 1) = *pnotes;
11996 *pnotes = note;
11997 }
11998 else
38a448ca 11999 *pnotes = gen_rtx_EXPR_LIST (REG_DEAD, x, *pnotes);
1a26b032 12000
b1f21e0a 12001 REG_N_DEATHS (regno)++;
230d793d
RS
12002 }
12003
12004 return;
12005 }
12006
12007 else if (GET_CODE (x) == SET)
12008 {
12009 rtx dest = SET_DEST (x);
12010
6eb12cef 12011 move_deaths (SET_SRC (x), maybe_kill_insn, from_cuid, to_insn, pnotes);
230d793d 12012
a7c99304
RK
12013 /* In the case of a ZERO_EXTRACT, a STRICT_LOW_PART, or a SUBREG
12014 that accesses one word of a multi-word item, some
12015 piece of everything register in the expression is used by
12016 this insn, so remove any old death. */
12017
12018 if (GET_CODE (dest) == ZERO_EXTRACT
12019 || GET_CODE (dest) == STRICT_LOW_PART
12020 || (GET_CODE (dest) == SUBREG
12021 && (((GET_MODE_SIZE (GET_MODE (dest))
12022 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
12023 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
12024 + UNITS_PER_WORD - 1) / UNITS_PER_WORD))))
230d793d 12025 {
6eb12cef 12026 move_deaths (dest, maybe_kill_insn, from_cuid, to_insn, pnotes);
a7c99304 12027 return;
230d793d
RS
12028 }
12029
a7c99304
RK
12030 /* If this is some other SUBREG, we know it replaces the entire
12031 value, so use that as the destination. */
12032 if (GET_CODE (dest) == SUBREG)
12033 dest = SUBREG_REG (dest);
12034
12035 /* If this is a MEM, adjust deaths of anything used in the address.
12036 For a REG (the only other possibility), the entire value is
12037 being replaced so the old value is not used in this insn. */
230d793d
RS
12038
12039 if (GET_CODE (dest) == MEM)
6eb12cef
RK
12040 move_deaths (XEXP (dest, 0), maybe_kill_insn, from_cuid,
12041 to_insn, pnotes);
230d793d
RS
12042 return;
12043 }
12044
12045 else if (GET_CODE (x) == CLOBBER)
12046 return;
12047
12048 len = GET_RTX_LENGTH (code);
12049 fmt = GET_RTX_FORMAT (code);
12050
12051 for (i = 0; i < len; i++)
12052 {
12053 if (fmt[i] == 'E')
12054 {
12055 register int j;
12056 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6eb12cef
RK
12057 move_deaths (XVECEXP (x, i, j), maybe_kill_insn, from_cuid,
12058 to_insn, pnotes);
230d793d
RS
12059 }
12060 else if (fmt[i] == 'e')
6eb12cef 12061 move_deaths (XEXP (x, i), maybe_kill_insn, from_cuid, to_insn, pnotes);
230d793d
RS
12062 }
12063}
12064\f
a7c99304
RK
12065/* Return 1 if X is the target of a bit-field assignment in BODY, the
12066 pattern of an insn. X must be a REG. */
230d793d
RS
12067
12068static int
a7c99304
RK
12069reg_bitfield_target_p (x, body)
12070 rtx x;
230d793d
RS
12071 rtx body;
12072{
12073 int i;
12074
12075 if (GET_CODE (body) == SET)
a7c99304
RK
12076 {
12077 rtx dest = SET_DEST (body);
12078 rtx target;
770ae6cc 12079 unsigned int regno, tregno, endregno, endtregno;
a7c99304
RK
12080
12081 if (GET_CODE (dest) == ZERO_EXTRACT)
12082 target = XEXP (dest, 0);
12083 else if (GET_CODE (dest) == STRICT_LOW_PART)
12084 target = SUBREG_REG (XEXP (dest, 0));
12085 else
12086 return 0;
12087
12088 if (GET_CODE (target) == SUBREG)
12089 target = SUBREG_REG (target);
12090
12091 if (GET_CODE (target) != REG)
12092 return 0;
12093
12094 tregno = REGNO (target), regno = REGNO (x);
12095 if (tregno >= FIRST_PSEUDO_REGISTER || regno >= FIRST_PSEUDO_REGISTER)
12096 return target == x;
12097
12098 endtregno = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (target));
12099 endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
12100
12101 return endregno > tregno && regno < endtregno;
12102 }
230d793d
RS
12103
12104 else if (GET_CODE (body) == PARALLEL)
12105 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
a7c99304 12106 if (reg_bitfield_target_p (x, XVECEXP (body, 0, i)))
230d793d
RS
12107 return 1;
12108
12109 return 0;
663522cb 12110}
230d793d
RS
12111\f
12112/* Given a chain of REG_NOTES originally from FROM_INSN, try to place them
12113 as appropriate. I3 and I2 are the insns resulting from the combination
12114 insns including FROM (I2 may be zero).
12115
12116 ELIM_I2 and ELIM_I1 are either zero or registers that we know will
12117 not need REG_DEAD notes because they are being substituted for. This
12118 saves searching in the most common cases.
12119
12120 Each note in the list is either ignored or placed on some insns, depending
12121 on the type of note. */
12122
12123static void
12124distribute_notes (notes, from_insn, i3, i2, elim_i2, elim_i1)
12125 rtx notes;
12126 rtx from_insn;
12127 rtx i3, i2;
12128 rtx elim_i2, elim_i1;
12129{
12130 rtx note, next_note;
12131 rtx tem;
12132
12133 for (note = notes; note; note = next_note)
12134 {
12135 rtx place = 0, place2 = 0;
12136
12137 /* If this NOTE references a pseudo register, ensure it references
12138 the latest copy of that register. */
12139 if (XEXP (note, 0) && GET_CODE (XEXP (note, 0)) == REG
12140 && REGNO (XEXP (note, 0)) >= FIRST_PSEUDO_REGISTER)
12141 XEXP (note, 0) = regno_reg_rtx[REGNO (XEXP (note, 0))];
12142
12143 next_note = XEXP (note, 1);
12144 switch (REG_NOTE_KIND (note))
12145 {
c9903b44
DE
12146 case REG_BR_PROB:
12147 case REG_EXEC_COUNT:
12148 /* Doesn't matter much where we put this, as long as it's somewhere.
12149 It is preferable to keep these notes on branches, which is most
12150 likely to be i3. */
12151 place = i3;
12152 break;
12153
f7cfa78d
GS
12154 case REG_NON_LOCAL_GOTO:
12155 if (GET_CODE (i3) == JUMP_INSN)
12156 place = i3;
12157 else if (i2 && GET_CODE (i2) == JUMP_INSN)
12158 place = i2;
12159 else
12160 abort();
12161 break;
12162
4b7c585f 12163 case REG_EH_REGION:
0e403ec3 12164 case REG_EH_RETHROW:
ca3920ad 12165 case REG_NORETURN:
0e403ec3
AS
12166 /* These notes must remain with the call. It should not be
12167 possible for both I2 and I3 to be a call. */
663522cb 12168 if (GET_CODE (i3) == CALL_INSN)
4b7c585f
JL
12169 place = i3;
12170 else if (i2 && GET_CODE (i2) == CALL_INSN)
12171 place = i2;
12172 else
12173 abort ();
12174 break;
12175
230d793d 12176 case REG_UNUSED:
07d0cbdd 12177 /* Any clobbers for i3 may still exist, and so we must process
176c9e6b
JW
12178 REG_UNUSED notes from that insn.
12179
12180 Any clobbers from i2 or i1 can only exist if they were added by
12181 recog_for_combine. In that case, recog_for_combine created the
12182 necessary REG_UNUSED notes. Trying to keep any original
12183 REG_UNUSED notes from these insns can cause incorrect output
12184 if it is for the same register as the original i3 dest.
12185 In that case, we will notice that the register is set in i3,
12186 and then add a REG_UNUSED note for the destination of i3, which
07d0cbdd
JW
12187 is wrong. However, it is possible to have REG_UNUSED notes from
12188 i2 or i1 for register which were both used and clobbered, so
12189 we keep notes from i2 or i1 if they will turn into REG_DEAD
12190 notes. */
176c9e6b 12191
230d793d
RS
12192 /* If this register is set or clobbered in I3, put the note there
12193 unless there is one already. */
07d0cbdd 12194 if (reg_set_p (XEXP (note, 0), PATTERN (i3)))
230d793d 12195 {
07d0cbdd
JW
12196 if (from_insn != i3)
12197 break;
12198
230d793d
RS
12199 if (! (GET_CODE (XEXP (note, 0)) == REG
12200 ? find_regno_note (i3, REG_UNUSED, REGNO (XEXP (note, 0)))
12201 : find_reg_note (i3, REG_UNUSED, XEXP (note, 0))))
12202 place = i3;
12203 }
12204 /* Otherwise, if this register is used by I3, then this register
12205 now dies here, so we must put a REG_DEAD note here unless there
12206 is one already. */
12207 else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3))
12208 && ! (GET_CODE (XEXP (note, 0)) == REG
770ae6cc
RK
12209 ? find_regno_note (i3, REG_DEAD,
12210 REGNO (XEXP (note, 0)))
230d793d
RS
12211 : find_reg_note (i3, REG_DEAD, XEXP (note, 0))))
12212 {
12213 PUT_REG_NOTE_KIND (note, REG_DEAD);
12214 place = i3;
12215 }
12216 break;
12217
12218 case REG_EQUAL:
12219 case REG_EQUIV:
9ae8ffe7 12220 case REG_NOALIAS:
230d793d
RS
12221 /* These notes say something about results of an insn. We can
12222 only support them if they used to be on I3 in which case they
a687e897
RK
12223 remain on I3. Otherwise they are ignored.
12224
12225 If the note refers to an expression that is not a constant, we
12226 must also ignore the note since we cannot tell whether the
12227 equivalence is still true. It might be possible to do
12228 slightly better than this (we only have a problem if I2DEST
12229 or I1DEST is present in the expression), but it doesn't
12230 seem worth the trouble. */
12231
12232 if (from_insn == i3
12233 && (XEXP (note, 0) == 0 || CONSTANT_P (XEXP (note, 0))))
230d793d
RS
12234 place = i3;
12235 break;
12236
12237 case REG_INC:
12238 case REG_NO_CONFLICT:
230d793d
RS
12239 /* These notes say something about how a register is used. They must
12240 be present on any use of the register in I2 or I3. */
12241 if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3)))
12242 place = i3;
12243
12244 if (i2 && reg_mentioned_p (XEXP (note, 0), PATTERN (i2)))
12245 {
12246 if (place)
12247 place2 = i2;
12248 else
12249 place = i2;
12250 }
12251 break;
12252
e55b4486
RH
12253 case REG_LABEL:
12254 /* This can show up in several ways -- either directly in the
12255 pattern, or hidden off in the constant pool with (or without?)
12256 a REG_EQUAL note. */
12257 /* ??? Ignore the without-reg_equal-note problem for now. */
12258 if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3))
12259 || ((tem = find_reg_note (i3, REG_EQUAL, NULL_RTX))
12260 && GET_CODE (XEXP (tem, 0)) == LABEL_REF
12261 && XEXP (XEXP (tem, 0), 0) == XEXP (note, 0)))
12262 place = i3;
12263
12264 if (i2
12265 && (reg_mentioned_p (XEXP (note, 0), PATTERN (i2))
663522cb 12266 || ((tem = find_reg_note (i2, REG_EQUAL, NULL_RTX))
e55b4486
RH
12267 && GET_CODE (XEXP (tem, 0)) == LABEL_REF
12268 && XEXP (XEXP (tem, 0), 0) == XEXP (note, 0))))
12269 {
12270 if (place)
12271 place2 = i2;
12272 else
12273 place = i2;
12274 }
12275 break;
12276
c1194d74 12277 case REG_NONNEG:
230d793d 12278 case REG_WAS_0:
c1194d74
JW
12279 /* These notes say something about the value of a register prior
12280 to the execution of an insn. It is too much trouble to see
12281 if the note is still correct in all situations. It is better
12282 to simply delete it. */
230d793d
RS
12283 break;
12284
12285 case REG_RETVAL:
12286 /* If the insn previously containing this note still exists,
12287 put it back where it was. Otherwise move it to the previous
12288 insn. Adjust the corresponding REG_LIBCALL note. */
12289 if (GET_CODE (from_insn) != NOTE)
12290 place = from_insn;
12291 else
12292 {
5f4f0e22 12293 tem = find_reg_note (XEXP (note, 0), REG_LIBCALL, NULL_RTX);
230d793d
RS
12294 place = prev_real_insn (from_insn);
12295 if (tem && place)
12296 XEXP (tem, 0) = place;
c71e1201
AO
12297 /* If we're deleting the last remaining instruction of a
12298 libcall sequence, don't add the notes. */
12299 else if (XEXP (note, 0) == from_insn)
12300 tem = place = 0;
230d793d
RS
12301 }
12302 break;
12303
12304 case REG_LIBCALL:
12305 /* This is handled similarly to REG_RETVAL. */
12306 if (GET_CODE (from_insn) != NOTE)
12307 place = from_insn;
12308 else
12309 {
5f4f0e22 12310 tem = find_reg_note (XEXP (note, 0), REG_RETVAL, NULL_RTX);
230d793d
RS
12311 place = next_real_insn (from_insn);
12312 if (tem && place)
12313 XEXP (tem, 0) = place;
c71e1201
AO
12314 /* If we're deleting the last remaining instruction of a
12315 libcall sequence, don't add the notes. */
12316 else if (XEXP (note, 0) == from_insn)
12317 tem = place = 0;
230d793d
RS
12318 }
12319 break;
12320
12321 case REG_DEAD:
12322 /* If the register is used as an input in I3, it dies there.
12323 Similarly for I2, if it is non-zero and adjacent to I3.
12324
12325 If the register is not used as an input in either I3 or I2
12326 and it is not one of the registers we were supposed to eliminate,
12327 there are two possibilities. We might have a non-adjacent I2
12328 or we might have somehow eliminated an additional register
12329 from a computation. For example, we might have had A & B where
12330 we discover that B will always be zero. In this case we will
12331 eliminate the reference to A.
12332
12333 In both cases, we must search to see if we can find a previous
12334 use of A and put the death note there. */
12335
6e2d1486
RK
12336 if (from_insn
12337 && GET_CODE (from_insn) == CALL_INSN
663522cb 12338 && find_reg_fusage (from_insn, USE, XEXP (note, 0)))
6e2d1486
RK
12339 place = from_insn;
12340 else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3)))
230d793d
RS
12341 place = i3;
12342 else if (i2 != 0 && next_nonnote_insn (i2) == i3
12343 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
12344 place = i2;
12345
03afaf36
R
12346 if (rtx_equal_p (XEXP (note, 0), elim_i2)
12347 || rtx_equal_p (XEXP (note, 0), elim_i1))
230d793d
RS
12348 break;
12349
12350 if (place == 0)
38d8473f 12351 {
d3a923ee
RH
12352 basic_block bb = BASIC_BLOCK (this_basic_block);
12353
12354 for (tem = PREV_INSN (i3); place == 0; tem = PREV_INSN (tem))
38d8473f 12355 {
2c3c49de 12356 if (! INSN_P (tem))
d3a923ee
RH
12357 {
12358 if (tem == bb->head)
12359 break;
12360 continue;
12361 }
12362
38d8473f
RK
12363 /* If the register is being set at TEM, see if that is all
12364 TEM is doing. If so, delete TEM. Otherwise, make this
12365 into a REG_UNUSED note instead. */
12366 if (reg_set_p (XEXP (note, 0), PATTERN (tem)))
12367 {
12368 rtx set = single_set (tem);
e5e809f4 12369 rtx inner_dest = 0;
e51712db 12370#ifdef HAVE_cc0
f5c97640 12371 rtx cc0_setter = NULL_RTX;
e51712db 12372#endif
e5e809f4
JL
12373
12374 if (set != 0)
12375 for (inner_dest = SET_DEST (set);
663522cb
KH
12376 (GET_CODE (inner_dest) == STRICT_LOW_PART
12377 || GET_CODE (inner_dest) == SUBREG
12378 || GET_CODE (inner_dest) == ZERO_EXTRACT);
e5e809f4
JL
12379 inner_dest = XEXP (inner_dest, 0))
12380 ;
38d8473f
RK
12381
12382 /* Verify that it was the set, and not a clobber that
663522cb 12383 modified the register.
f5c97640
RH
12384
12385 CC0 targets must be careful to maintain setter/user
12386 pairs. If we cannot delete the setter due to side
12387 effects, mark the user with an UNUSED note instead
12388 of deleting it. */
38d8473f
RK
12389
12390 if (set != 0 && ! side_effects_p (SET_SRC (set))
f5c97640
RH
12391 && rtx_equal_p (XEXP (note, 0), inner_dest)
12392#ifdef HAVE_cc0
12393 && (! reg_mentioned_p (cc0_rtx, SET_SRC (set))
12394 || ((cc0_setter = prev_cc0_setter (tem)) != NULL
12395 && sets_cc0_p (PATTERN (cc0_setter)) > 0))
12396#endif
12397 )
38d8473f
RK
12398 {
12399 /* Move the notes and links of TEM elsewhere.
663522cb 12400 This might delete other dead insns recursively.
38d8473f
RK
12401 First set the pattern to something that won't use
12402 any register. */
12403
12404 PATTERN (tem) = pc_rtx;
12405
12406 distribute_notes (REG_NOTES (tem), tem, tem,
12407 NULL_RTX, NULL_RTX, NULL_RTX);
12408 distribute_links (LOG_LINKS (tem));
12409
12410 PUT_CODE (tem, NOTE);
12411 NOTE_LINE_NUMBER (tem) = NOTE_INSN_DELETED;
12412 NOTE_SOURCE_FILE (tem) = 0;
f5c97640
RH
12413
12414#ifdef HAVE_cc0
12415 /* Delete the setter too. */
12416 if (cc0_setter)
12417 {
12418 PATTERN (cc0_setter) = pc_rtx;
12419
12420 distribute_notes (REG_NOTES (cc0_setter),
12421 cc0_setter, cc0_setter,
12422 NULL_RTX, NULL_RTX, NULL_RTX);
12423 distribute_links (LOG_LINKS (cc0_setter));
12424
12425 PUT_CODE (cc0_setter, NOTE);
d3a923ee
RH
12426 NOTE_LINE_NUMBER (cc0_setter)
12427 = NOTE_INSN_DELETED;
f5c97640
RH
12428 NOTE_SOURCE_FILE (cc0_setter) = 0;
12429 }
12430#endif
38d8473f 12431 }
e5e809f4
JL
12432 /* If the register is both set and used here, put the
12433 REG_DEAD note here, but place a REG_UNUSED note
12434 here too unless there already is one. */
12435 else if (reg_referenced_p (XEXP (note, 0),
12436 PATTERN (tem)))
12437 {
12438 place = tem;
12439
12440 if (! find_regno_note (tem, REG_UNUSED,
12441 REGNO (XEXP (note, 0))))
12442 REG_NOTES (tem)
c5c76735 12443 = gen_rtx_EXPR_LIST (REG_UNUSED, XEXP (note, 0),
9e6a5703 12444 REG_NOTES (tem));
e5e809f4 12445 }
38d8473f
RK
12446 else
12447 {
12448 PUT_REG_NOTE_KIND (note, REG_UNUSED);
663522cb 12449
38d8473f
RK
12450 /* If there isn't already a REG_UNUSED note, put one
12451 here. */
12452 if (! find_regno_note (tem, REG_UNUSED,
12453 REGNO (XEXP (note, 0))))
12454 place = tem;
12455 break;
d3a923ee
RH
12456 }
12457 }
12458 else if (reg_referenced_p (XEXP (note, 0), PATTERN (tem))
12459 || (GET_CODE (tem) == CALL_INSN
12460 && find_reg_fusage (tem, USE, XEXP (note, 0))))
12461 {
12462 place = tem;
12463
12464 /* If we are doing a 3->2 combination, and we have a
12465 register which formerly died in i3 and was not used
12466 by i2, which now no longer dies in i3 and is used in
12467 i2 but does not die in i2, and place is between i2
12468 and i3, then we may need to move a link from place to
12469 i2. */
12470 if (i2 && INSN_UID (place) <= max_uid_cuid
12471 && INSN_CUID (place) > INSN_CUID (i2)
663522cb
KH
12472 && from_insn
12473 && INSN_CUID (from_insn) > INSN_CUID (i2)
d3a923ee
RH
12474 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
12475 {
12476 rtx links = LOG_LINKS (place);
12477 LOG_LINKS (place) = 0;
12478 distribute_links (links);
12479 }
12480 break;
12481 }
12482
12483 if (tem == bb->head)
230d793d 12484 break;
38d8473f 12485 }
663522cb 12486
d3a923ee
RH
12487 /* We haven't found an insn for the death note and it
12488 is still a REG_DEAD note, but we have hit the beginning
12489 of the block. If the existing life info says the reg
715e7fbc
RH
12490 was dead, there's nothing left to do. Otherwise, we'll
12491 need to do a global life update after combine. */
770ae6cc
RK
12492 if (REG_NOTE_KIND (note) == REG_DEAD && place == 0
12493 && REGNO_REG_SET_P (bb->global_live_at_start,
12494 REGNO (XEXP (note, 0))))
e2cce0cf 12495 {
770ae6cc
RK
12496 SET_BIT (refresh_blocks, this_basic_block);
12497 need_refresh = 1;
e2cce0cf 12498 }
38d8473f 12499 }
230d793d
RS
12500
12501 /* If the register is set or already dead at PLACE, we needn't do
e5e809f4
JL
12502 anything with this note if it is still a REG_DEAD note.
12503 We can here if it is set at all, not if is it totally replace,
12504 which is what `dead_or_set_p' checks, so also check for it being
12505 set partially. */
12506
230d793d
RS
12507 if (place && REG_NOTE_KIND (note) == REG_DEAD)
12508 {
770ae6cc 12509 unsigned int regno = REGNO (XEXP (note, 0));
230d793d
RS
12510
12511 if (dead_or_set_p (place, XEXP (note, 0))
12512 || reg_bitfield_target_p (XEXP (note, 0), PATTERN (place)))
12513 {
12514 /* Unless the register previously died in PLACE, clear
12515 reg_last_death. [I no longer understand why this is
12516 being done.] */
12517 if (reg_last_death[regno] != place)
12518 reg_last_death[regno] = 0;
12519 place = 0;
12520 }
12521 else
12522 reg_last_death[regno] = place;
12523
12524 /* If this is a death note for a hard reg that is occupying
12525 multiple registers, ensure that we are still using all
12526 parts of the object. If we find a piece of the object
03afaf36
R
12527 that is unused, we must arrange for an appropriate REG_DEAD
12528 note to be added for it. However, we can't just emit a USE
12529 and tag the note to it, since the register might actually
12530 be dead; so we recourse, and the recursive call then finds
12531 the previous insn that used this register. */
230d793d
RS
12532
12533 if (place && regno < FIRST_PSEUDO_REGISTER
12534 && HARD_REGNO_NREGS (regno, GET_MODE (XEXP (note, 0))) > 1)
12535 {
770ae6cc 12536 unsigned int endregno
230d793d
RS
12537 = regno + HARD_REGNO_NREGS (regno,
12538 GET_MODE (XEXP (note, 0)));
12539 int all_used = 1;
770ae6cc 12540 unsigned int i;
230d793d
RS
12541
12542 for (i = regno; i < endregno; i++)
03afaf36
R
12543 if ((! refers_to_regno_p (i, i + 1, PATTERN (place), 0)
12544 && ! find_regno_fusage (place, USE, i))
12545 || dead_or_set_regno_p (place, i))
12546 all_used = 0;
a394b17b 12547
230d793d
RS
12548 if (! all_used)
12549 {
12550 /* Put only REG_DEAD notes for pieces that are
03afaf36 12551 not already dead or set. */
230d793d 12552
03afaf36
R
12553 for (i = regno; i < endregno;
12554 i += HARD_REGNO_NREGS (i, reg_raw_mode[i]))
230d793d 12555 {
38a448ca 12556 rtx piece = gen_rtx_REG (reg_raw_mode[i], i);
c762163e 12557 basic_block bb = BASIC_BLOCK (this_basic_block);
230d793d 12558
03afaf36 12559 if (! dead_or_set_p (place, piece)
230d793d
RS
12560 && ! reg_bitfield_target_p (piece,
12561 PATTERN (place)))
03afaf36
R
12562 {
12563 rtx new_note
12564 = gen_rtx_EXPR_LIST (REG_DEAD, piece, NULL_RTX);
12565
12566 distribute_notes (new_note, place, place,
12567 NULL_RTX, NULL_RTX, NULL_RTX);
12568 }
c762163e
R
12569 else if (! refers_to_regno_p (i, i + 1,
12570 PATTERN (place), 0)
12571 && ! find_regno_fusage (place, USE, i))
12572 for (tem = PREV_INSN (place); ;
12573 tem = PREV_INSN (tem))
12574 {
12575 if (! INSN_P (tem))
12576 {
12577 if (tem == bb->head)
12578 {
12579 SET_BIT (refresh_blocks,
12580 this_basic_block);
12581 need_refresh = 1;
12582 break;
12583 }
12584 continue;
12585 }
12586 if (dead_or_set_p (tem, piece)
12587 || reg_bitfield_target_p (piece,
12588 PATTERN (tem)))
12589 {
12590 REG_NOTES (tem)
71fd5a51 12591 = gen_rtx_EXPR_LIST (REG_UNUSED, piece,
c762163e
R
12592 REG_NOTES (tem));
12593 break;
12594 }
12595 }
12596
230d793d
RS
12597 }
12598
12599 place = 0;
12600 }
12601 }
12602 }
12603 break;
12604
12605 default:
12606 /* Any other notes should not be present at this point in the
12607 compilation. */
12608 abort ();
12609 }
12610
12611 if (place)
12612 {
12613 XEXP (note, 1) = REG_NOTES (place);
12614 REG_NOTES (place) = note;
12615 }
1a26b032
RK
12616 else if ((REG_NOTE_KIND (note) == REG_DEAD
12617 || REG_NOTE_KIND (note) == REG_UNUSED)
12618 && GET_CODE (XEXP (note, 0)) == REG)
b1f21e0a 12619 REG_N_DEATHS (REGNO (XEXP (note, 0)))--;
230d793d
RS
12620
12621 if (place2)
1a26b032
RK
12622 {
12623 if ((REG_NOTE_KIND (note) == REG_DEAD
12624 || REG_NOTE_KIND (note) == REG_UNUSED)
12625 && GET_CODE (XEXP (note, 0)) == REG)
b1f21e0a 12626 REG_N_DEATHS (REGNO (XEXP (note, 0)))++;
1a26b032 12627
38a448ca
RH
12628 REG_NOTES (place2) = gen_rtx_fmt_ee (GET_CODE (note),
12629 REG_NOTE_KIND (note),
12630 XEXP (note, 0),
12631 REG_NOTES (place2));
1a26b032 12632 }
230d793d
RS
12633 }
12634}
12635\f
12636/* Similarly to above, distribute the LOG_LINKS that used to be present on
5089e22e
RS
12637 I3, I2, and I1 to new locations. This is also called in one case to
12638 add a link pointing at I3 when I3's destination is changed. */
230d793d
RS
12639
12640static void
12641distribute_links (links)
12642 rtx links;
12643{
12644 rtx link, next_link;
12645
12646 for (link = links; link; link = next_link)
12647 {
12648 rtx place = 0;
12649 rtx insn;
12650 rtx set, reg;
12651
12652 next_link = XEXP (link, 1);
12653
12654 /* If the insn that this link points to is a NOTE or isn't a single
12655 set, ignore it. In the latter case, it isn't clear what we
663522cb 12656 can do other than ignore the link, since we can't tell which
230d793d
RS
12657 register it was for. Such links wouldn't be used by combine
12658 anyway.
12659
12660 It is not possible for the destination of the target of the link to
12661 have been changed by combine. The only potential of this is if we
12662 replace I3, I2, and I1 by I3 and I2. But in that case the
12663 destination of I2 also remains unchanged. */
12664
12665 if (GET_CODE (XEXP (link, 0)) == NOTE
12666 || (set = single_set (XEXP (link, 0))) == 0)
12667 continue;
12668
12669 reg = SET_DEST (set);
12670 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
12671 || GET_CODE (reg) == SIGN_EXTRACT
12672 || GET_CODE (reg) == STRICT_LOW_PART)
12673 reg = XEXP (reg, 0);
12674
12675 /* A LOG_LINK is defined as being placed on the first insn that uses
12676 a register and points to the insn that sets the register. Start
12677 searching at the next insn after the target of the link and stop
12678 when we reach a set of the register or the end of the basic block.
12679
12680 Note that this correctly handles the link that used to point from
5089e22e 12681 I3 to I2. Also note that not much searching is typically done here
230d793d
RS
12682 since most links don't point very far away. */
12683
12684 for (insn = NEXT_INSN (XEXP (link, 0));
0d4d42c3 12685 (insn && (this_basic_block == n_basic_blocks - 1
3b413743 12686 || BLOCK_HEAD (this_basic_block + 1) != insn));
230d793d 12687 insn = NEXT_INSN (insn))
2c3c49de 12688 if (INSN_P (insn) && reg_overlap_mentioned_p (reg, PATTERN (insn)))
230d793d
RS
12689 {
12690 if (reg_referenced_p (reg, PATTERN (insn)))
12691 place = insn;
12692 break;
12693 }
6e2d1486 12694 else if (GET_CODE (insn) == CALL_INSN
663522cb 12695 && find_reg_fusage (insn, USE, reg))
6e2d1486
RK
12696 {
12697 place = insn;
12698 break;
12699 }
230d793d
RS
12700
12701 /* If we found a place to put the link, place it there unless there
12702 is already a link to the same insn as LINK at that point. */
12703
12704 if (place)
12705 {
12706 rtx link2;
12707
12708 for (link2 = LOG_LINKS (place); link2; link2 = XEXP (link2, 1))
12709 if (XEXP (link2, 0) == XEXP (link, 0))
12710 break;
12711
12712 if (link2 == 0)
12713 {
12714 XEXP (link, 1) = LOG_LINKS (place);
12715 LOG_LINKS (place) = link;
abe6e52f
RK
12716
12717 /* Set added_links_insn to the earliest insn we added a
12718 link to. */
663522cb 12719 if (added_links_insn == 0
abe6e52f
RK
12720 || INSN_CUID (added_links_insn) > INSN_CUID (place))
12721 added_links_insn = place;
230d793d
RS
12722 }
12723 }
12724 }
12725}
12726\f
1427d6d2
RK
12727/* Compute INSN_CUID for INSN, which is an insn made by combine. */
12728
12729static int
12730insn_cuid (insn)
12731 rtx insn;
12732{
12733 while (insn != 0 && INSN_UID (insn) > max_uid_cuid
12734 && GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == USE)
12735 insn = NEXT_INSN (insn);
12736
12737 if (INSN_UID (insn) > max_uid_cuid)
12738 abort ();
12739
12740 return INSN_CUID (insn);
12741}
12742\f
230d793d
RS
12743void
12744dump_combine_stats (file)
12745 FILE *file;
12746{
ab87f8c8 12747 fnotice
230d793d
RS
12748 (file,
12749 ";; Combiner statistics: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n\n",
12750 combine_attempts, combine_merges, combine_extras, combine_successes);
12751}
12752
12753void
12754dump_combine_total_stats (file)
12755 FILE *file;
12756{
ab87f8c8 12757 fnotice
230d793d
RS
12758 (file,
12759 "\n;; Combiner totals: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n",
12760 total_attempts, total_merges, total_extras, total_successes);
12761}
This page took 3.955078 seconds and 5 git commands to generate.