]> gcc.gnu.org Git - gcc.git/blob - gcc/combine.c
1546ec64845f19f880a1112ccb9610fa6e149ad5
[gcc.git] / gcc / combine.c
1 /* Optimize by combining instructions for GNU compiler.
2 Copyright (C) 1987, 88, 92-96, 1997 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21
22 /* This module is essentially the "combiner" phase of the U. of Arizona
23 Portable Optimizer, but redone to work on our list-structured
24 representation for RTL instead of their string representation.
25
26 The LOG_LINKS of each insn identify the most recent assignment
27 to each REG used in the insn. It is a list of previous insns,
28 each of which contains a SET for a REG that is used in this insn
29 and not used or set in between. LOG_LINKs never cross basic blocks.
30 They were set up by the preceding pass (lifetime analysis).
31
32 We try to combine each pair of insns joined by a logical link.
33 We also try to combine triples of insns A, B and C when
34 C has a link back to B and B has a link back to A.
35
36 LOG_LINKS does not have links for use of the CC0. They don't
37 need to, because the insn that sets the CC0 is always immediately
38 before the insn that tests it. So we always regard a branch
39 insn as having a logical link to the preceding insn. The same is true
40 for an insn explicitly using CC0.
41
42 We check (with use_crosses_set_p) to avoid combining in such a way
43 as to move a computation to a place where its value would be different.
44
45 Combination is done by mathematically substituting the previous
46 insn(s) values for the regs they set into the expressions in
47 the later insns that refer to these regs. If the result is a valid insn
48 for our target machine, according to the machine description,
49 we install it, delete the earlier insns, and update the data flow
50 information (LOG_LINKS and REG_NOTES) for what we did.
51
52 There are a few exceptions where the dataflow information created by
53 flow.c aren't completely updated:
54
55 - reg_live_length is not updated
56 - reg_n_refs is not adjusted in the rare case when a register is
57 no longer required in a computation
58 - there are extremely rare cases (see distribute_regnotes) when a
59 REG_DEAD note is lost
60 - a LOG_LINKS entry that refers to an insn with multiple SETs may be
61 removed because there is no way to know which register it was
62 linking
63
64 To simplify substitution, we combine only when the earlier insn(s)
65 consist of only a single assignment. To simplify updating afterward,
66 we never combine when a subroutine call appears in the middle.
67
68 Since we do not represent assignments to CC0 explicitly except when that
69 is all an insn does, there is no LOG_LINKS entry in an insn that uses
70 the condition code for the insn that set the condition code.
71 Fortunately, these two insns must be consecutive.
72 Therefore, every JUMP_INSN is taken to have an implicit logical link
73 to the preceding insn. This is not quite right, since non-jumps can
74 also use the condition code; but in practice such insns would not
75 combine anyway. */
76
77 #include "config.h"
78 #ifdef __STDC__
79 #include <stdarg.h>
80 #else
81 #include <varargs.h>
82 #endif
83
84 /* Must precede rtl.h for FFS. */
85 #include <stdio.h>
86
87 #include "rtl.h"
88 #include "flags.h"
89 #include "regs.h"
90 #include "hard-reg-set.h"
91 #include "expr.h"
92 #include "basic-block.h"
93 #include "insn-config.h"
94 #include "insn-flags.h"
95 #include "insn-codes.h"
96 #include "insn-attr.h"
97 #include "recog.h"
98 #include "real.h"
99
100 /* It is not safe to use ordinary gen_lowpart in combine.
101 Use gen_lowpart_for_combine instead. See comments there. */
102 #define gen_lowpart dont_use_gen_lowpart_you_dummy
103
104 /* Number of attempts to combine instructions in this function. */
105
106 static int combine_attempts;
107
108 /* Number of attempts that got as far as substitution in this function. */
109
110 static int combine_merges;
111
112 /* Number of instructions combined with added SETs in this function. */
113
114 static int combine_extras;
115
116 /* Number of instructions combined in this function. */
117
118 static int combine_successes;
119
120 /* Totals over entire compilation. */
121
122 static int total_attempts, total_merges, total_extras, total_successes;
123
124 /* Define a default value for REVERSIBLE_CC_MODE.
125 We can never assume that a condition code mode is safe to reverse unless
126 the md tells us so. */
127 #ifndef REVERSIBLE_CC_MODE
128 #define REVERSIBLE_CC_MODE(MODE) 0
129 #endif
130 \f
131 /* Vector mapping INSN_UIDs to cuids.
132 The cuids are like uids but increase monotonically always.
133 Combine always uses cuids so that it can compare them.
134 But actually renumbering the uids, which we used to do,
135 proves to be a bad idea because it makes it hard to compare
136 the dumps produced by earlier passes with those from later passes. */
137
138 static int *uid_cuid;
139 static int max_uid_cuid;
140
141 /* Get the cuid of an insn. */
142
143 #define INSN_CUID(INSN) \
144 (INSN_UID (INSN) > max_uid_cuid ? insn_cuid (INSN) : uid_cuid[INSN_UID (INSN)])
145
146 /* Maximum register number, which is the size of the tables below. */
147
148 static int combine_max_regno;
149
150 /* Record last point of death of (hard or pseudo) register n. */
151
152 static rtx *reg_last_death;
153
154 /* Record last point of modification of (hard or pseudo) register n. */
155
156 static rtx *reg_last_set;
157
158 /* Record the cuid of the last insn that invalidated memory
159 (anything that writes memory, and subroutine calls, but not pushes). */
160
161 static int mem_last_set;
162
163 /* Record the cuid of the last CALL_INSN
164 so we can tell whether a potential combination crosses any calls. */
165
166 static int last_call_cuid;
167
168 /* When `subst' is called, this is the insn that is being modified
169 (by combining in a previous insn). The PATTERN of this insn
170 is still the old pattern partially modified and it should not be
171 looked at, but this may be used to examine the successors of the insn
172 to judge whether a simplification is valid. */
173
174 static rtx subst_insn;
175
176 /* This is an insn that belongs before subst_insn, but is not currently
177 on the insn chain. */
178
179 static rtx subst_prev_insn;
180
181 /* This is the lowest CUID that `subst' is currently dealing with.
182 get_last_value will not return a value if the register was set at or
183 after this CUID. If not for this mechanism, we could get confused if
184 I2 or I1 in try_combine were an insn that used the old value of a register
185 to obtain a new value. In that case, we might erroneously get the
186 new value of the register when we wanted the old one. */
187
188 static int subst_low_cuid;
189
190 /* This contains any hard registers that are used in newpat; reg_dead_at_p
191 must consider all these registers to be always live. */
192
193 static HARD_REG_SET newpat_used_regs;
194
195 /* This is an insn to which a LOG_LINKS entry has been added. If this
196 insn is the earlier than I2 or I3, combine should rescan starting at
197 that location. */
198
199 static rtx added_links_insn;
200
201 /* Basic block number of the block in which we are performing combines. */
202 static int this_basic_block;
203 \f
204 /* The next group of arrays allows the recording of the last value assigned
205 to (hard or pseudo) register n. We use this information to see if a
206 operation being processed is redundant given a prior operation performed
207 on the register. For example, an `and' with a constant is redundant if
208 all the zero bits are already known to be turned off.
209
210 We use an approach similar to that used by cse, but change it in the
211 following ways:
212
213 (1) We do not want to reinitialize at each label.
214 (2) It is useful, but not critical, to know the actual value assigned
215 to a register. Often just its form is helpful.
216
217 Therefore, we maintain the following arrays:
218
219 reg_last_set_value the last value assigned
220 reg_last_set_label records the value of label_tick when the
221 register was assigned
222 reg_last_set_table_tick records the value of label_tick when a
223 value using the register is assigned
224 reg_last_set_invalid set to non-zero when it is not valid
225 to use the value of this register in some
226 register's value
227
228 To understand the usage of these tables, it is important to understand
229 the distinction between the value in reg_last_set_value being valid
230 and the register being validly contained in some other expression in the
231 table.
232
233 Entry I in reg_last_set_value is valid if it is non-zero, and either
234 reg_n_sets[i] is 1 or reg_last_set_label[i] == label_tick.
235
236 Register I may validly appear in any expression returned for the value
237 of another register if reg_n_sets[i] is 1. It may also appear in the
238 value for register J if reg_last_set_label[i] < reg_last_set_label[j] or
239 reg_last_set_invalid[j] is zero.
240
241 If an expression is found in the table containing a register which may
242 not validly appear in an expression, the register is replaced by
243 something that won't match, (clobber (const_int 0)).
244
245 reg_last_set_invalid[i] is set non-zero when register I is being assigned
246 to and reg_last_set_table_tick[i] == label_tick. */
247
248 /* Record last value assigned to (hard or pseudo) register n. */
249
250 static rtx *reg_last_set_value;
251
252 /* Record the value of label_tick when the value for register n is placed in
253 reg_last_set_value[n]. */
254
255 static int *reg_last_set_label;
256
257 /* Record the value of label_tick when an expression involving register n
258 is placed in reg_last_set_value. */
259
260 static int *reg_last_set_table_tick;
261
262 /* Set non-zero if references to register n in expressions should not be
263 used. */
264
265 static char *reg_last_set_invalid;
266
267 /* Incremented for each label. */
268
269 static int label_tick;
270
271 /* Some registers that are set more than once and used in more than one
272 basic block are nevertheless always set in similar ways. For example,
273 a QImode register may be loaded from memory in two places on a machine
274 where byte loads zero extend.
275
276 We record in the following array what we know about the nonzero
277 bits of a register, specifically which bits are known to be zero.
278
279 If an entry is zero, it means that we don't know anything special. */
280
281 static unsigned HOST_WIDE_INT *reg_nonzero_bits;
282
283 /* Mode used to compute significance in reg_nonzero_bits. It is the largest
284 integer mode that can fit in HOST_BITS_PER_WIDE_INT. */
285
286 static enum machine_mode nonzero_bits_mode;
287
288 /* Nonzero if we know that a register has some leading bits that are always
289 equal to the sign bit. */
290
291 static char *reg_sign_bit_copies;
292
293 /* Nonzero when reg_nonzero_bits and reg_sign_bit_copies can be safely used.
294 It is zero while computing them and after combine has completed. This
295 former test prevents propagating values based on previously set values,
296 which can be incorrect if a variable is modified in a loop. */
297
298 static int nonzero_sign_valid;
299
300 /* These arrays are maintained in parallel with reg_last_set_value
301 and are used to store the mode in which the register was last set,
302 the bits that were known to be zero when it was last set, and the
303 number of sign bits copies it was known to have when it was last set. */
304
305 static enum machine_mode *reg_last_set_mode;
306 static unsigned HOST_WIDE_INT *reg_last_set_nonzero_bits;
307 static char *reg_last_set_sign_bit_copies;
308 \f
309 /* Record one modification to rtl structure
310 to be undone by storing old_contents into *where.
311 is_int is 1 if the contents are an int. */
312
313 struct undo
314 {
315 struct undo *next;
316 int is_int;
317 union {rtx r; int i;} old_contents;
318 union {rtx *r; int *i;} where;
319 };
320
321 /* Record a bunch of changes to be undone, up to MAX_UNDO of them.
322 num_undo says how many are currently recorded.
323
324 storage is nonzero if we must undo the allocation of new storage.
325 The value of storage is what to pass to obfree.
326
327 other_insn is nonzero if we have modified some other insn in the process
328 of working on subst_insn. It must be verified too.
329
330 previous_undos is the value of undobuf.undos when we started processing
331 this substitution. This will prevent gen_rtx_combine from re-used a piece
332 from the previous expression. Doing so can produce circular rtl
333 structures. */
334
335 struct undobuf
336 {
337 char *storage;
338 struct undo *undos;
339 struct undo *frees;
340 struct undo *previous_undos;
341 rtx other_insn;
342 };
343
344 static struct undobuf undobuf;
345
346 /* Substitute NEWVAL, an rtx expression, into INTO, a place in some
347 insn. The substitution can be undone by undo_all. If INTO is already
348 set to NEWVAL, do not record this change. Because computing NEWVAL might
349 also call SUBST, we have to compute it before we put anything into
350 the undo table. */
351
352 #define SUBST(INTO, NEWVAL) \
353 do { rtx _new = (NEWVAL); \
354 struct undo *_buf; \
355 \
356 if (undobuf.frees) \
357 _buf = undobuf.frees, undobuf.frees = _buf->next; \
358 else \
359 _buf = (struct undo *) xmalloc (sizeof (struct undo)); \
360 \
361 _buf->is_int = 0; \
362 _buf->where.r = &INTO; \
363 _buf->old_contents.r = INTO; \
364 INTO = _new; \
365 if (_buf->old_contents.r == INTO) \
366 _buf->next = undobuf.frees, undobuf.frees = _buf; \
367 else \
368 _buf->next = undobuf.undos, undobuf.undos = _buf; \
369 } while (0)
370
371 /* Similar to SUBST, but NEWVAL is an int expression. Note that substitution
372 for the value of a HOST_WIDE_INT value (including CONST_INT) is
373 not safe. */
374
375 #define SUBST_INT(INTO, NEWVAL) \
376 do { struct undo *_buf; \
377 \
378 if (undobuf.frees) \
379 _buf = undobuf.frees, undobuf.frees = _buf->next; \
380 else \
381 _buf = (struct undo *) xmalloc (sizeof (struct undo)); \
382 \
383 _buf->is_int = 1; \
384 _buf->where.i = (int *) &INTO; \
385 _buf->old_contents.i = INTO; \
386 INTO = NEWVAL; \
387 if (_buf->old_contents.i == INTO) \
388 _buf->next = undobuf.frees, undobuf.frees = _buf; \
389 else \
390 _buf->next = undobuf.undos, undobuf.undos = _buf; \
391 } while (0)
392
393 /* Number of times the pseudo being substituted for
394 was found and replaced. */
395
396 static int n_occurrences;
397
398 static void init_reg_last_arrays PROTO((void));
399 static void setup_incoming_promotions PROTO((void));
400 static void set_nonzero_bits_and_sign_copies PROTO((rtx, rtx));
401 static int can_combine_p PROTO((rtx, rtx, rtx, rtx, rtx *, rtx *));
402 static int combinable_i3pat PROTO((rtx, rtx *, rtx, rtx, int, rtx *));
403 static rtx try_combine PROTO((rtx, rtx, rtx));
404 static void undo_all PROTO((void));
405 static rtx *find_split_point PROTO((rtx *, rtx));
406 static rtx subst PROTO((rtx, rtx, rtx, int, int));
407 static rtx simplify_rtx PROTO((rtx, enum machine_mode, int, int));
408 static rtx simplify_if_then_else PROTO((rtx));
409 static rtx simplify_set PROTO((rtx));
410 static rtx simplify_logical PROTO((rtx, int));
411 static rtx expand_compound_operation PROTO((rtx));
412 static rtx expand_field_assignment PROTO((rtx));
413 static rtx make_extraction PROTO((enum machine_mode, rtx, int, rtx, int,
414 int, int, int));
415 static rtx extract_left_shift PROTO((rtx, int));
416 static rtx make_compound_operation PROTO((rtx, enum rtx_code));
417 static int get_pos_from_mask PROTO((unsigned HOST_WIDE_INT, int *));
418 static rtx force_to_mode PROTO((rtx, enum machine_mode,
419 unsigned HOST_WIDE_INT, rtx, int));
420 static rtx if_then_else_cond PROTO((rtx, rtx *, rtx *));
421 static rtx known_cond PROTO((rtx, enum rtx_code, rtx, rtx));
422 static int rtx_equal_for_field_assignment_p PROTO((rtx, rtx));
423 static rtx make_field_assignment PROTO((rtx));
424 static rtx apply_distributive_law PROTO((rtx));
425 static rtx simplify_and_const_int PROTO((rtx, enum machine_mode, rtx,
426 unsigned HOST_WIDE_INT));
427 static unsigned HOST_WIDE_INT nonzero_bits PROTO((rtx, enum machine_mode));
428 static int num_sign_bit_copies PROTO((rtx, enum machine_mode));
429 static int merge_outer_ops PROTO((enum rtx_code *, HOST_WIDE_INT *,
430 enum rtx_code, HOST_WIDE_INT,
431 enum machine_mode, int *));
432 static rtx simplify_shift_const PROTO((rtx, enum rtx_code, enum machine_mode,
433 rtx, int));
434 static int recog_for_combine PROTO((rtx *, rtx, rtx *, int *));
435 static rtx gen_lowpart_for_combine PROTO((enum machine_mode, rtx));
436 static rtx gen_rtx_combine PVPROTO((enum rtx_code code, enum machine_mode mode,
437 ...));
438 static rtx gen_binary PROTO((enum rtx_code, enum machine_mode,
439 rtx, rtx));
440 static rtx gen_unary PROTO((enum rtx_code, enum machine_mode,
441 enum machine_mode, rtx));
442 static enum rtx_code simplify_comparison PROTO((enum rtx_code, rtx *, rtx *));
443 static int reversible_comparison_p PROTO((rtx));
444 static void update_table_tick PROTO((rtx));
445 static void record_value_for_reg PROTO((rtx, rtx, rtx));
446 static void record_dead_and_set_regs_1 PROTO((rtx, rtx));
447 static void record_dead_and_set_regs PROTO((rtx));
448 static int get_last_value_validate PROTO((rtx *, rtx, int, int));
449 static rtx get_last_value PROTO((rtx));
450 static int use_crosses_set_p PROTO((rtx, int));
451 static void reg_dead_at_p_1 PROTO((rtx, rtx));
452 static int reg_dead_at_p PROTO((rtx, rtx));
453 static void move_deaths PROTO((rtx, rtx, int, rtx, rtx *));
454 static int reg_bitfield_target_p PROTO((rtx, rtx));
455 static void distribute_notes PROTO((rtx, rtx, rtx, rtx, rtx, rtx));
456 static void distribute_links PROTO((rtx));
457 static void mark_used_regs_combine PROTO((rtx));
458 static int insn_cuid PROTO((rtx));
459 \f
460 /* Main entry point for combiner. F is the first insn of the function.
461 NREGS is the first unused pseudo-reg number. */
462
463 void
464 combine_instructions (f, nregs)
465 rtx f;
466 int nregs;
467 {
468 register rtx insn, next, prev;
469 register int i;
470 register rtx links, nextlinks;
471
472 combine_attempts = 0;
473 combine_merges = 0;
474 combine_extras = 0;
475 combine_successes = 0;
476 undobuf.undos = undobuf.previous_undos = 0;
477
478 combine_max_regno = nregs;
479
480 reg_nonzero_bits
481 = (unsigned HOST_WIDE_INT *) alloca (nregs * sizeof (HOST_WIDE_INT));
482 reg_sign_bit_copies = (char *) alloca (nregs * sizeof (char));
483
484 bzero ((char *) reg_nonzero_bits, nregs * sizeof (HOST_WIDE_INT));
485 bzero (reg_sign_bit_copies, nregs * sizeof (char));
486
487 reg_last_death = (rtx *) alloca (nregs * sizeof (rtx));
488 reg_last_set = (rtx *) alloca (nregs * sizeof (rtx));
489 reg_last_set_value = (rtx *) alloca (nregs * sizeof (rtx));
490 reg_last_set_table_tick = (int *) alloca (nregs * sizeof (int));
491 reg_last_set_label = (int *) alloca (nregs * sizeof (int));
492 reg_last_set_invalid = (char *) alloca (nregs * sizeof (char));
493 reg_last_set_mode
494 = (enum machine_mode *) alloca (nregs * sizeof (enum machine_mode));
495 reg_last_set_nonzero_bits
496 = (unsigned HOST_WIDE_INT *) alloca (nregs * sizeof (HOST_WIDE_INT));
497 reg_last_set_sign_bit_copies
498 = (char *) alloca (nregs * sizeof (char));
499
500 init_reg_last_arrays ();
501
502 init_recog_no_volatile ();
503
504 /* Compute maximum uid value so uid_cuid can be allocated. */
505
506 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
507 if (INSN_UID (insn) > i)
508 i = INSN_UID (insn);
509
510 uid_cuid = (int *) alloca ((i + 1) * sizeof (int));
511 max_uid_cuid = i;
512
513 nonzero_bits_mode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
514
515 /* Don't use reg_nonzero_bits when computing it. This can cause problems
516 when, for example, we have j <<= 1 in a loop. */
517
518 nonzero_sign_valid = 0;
519
520 /* Compute the mapping from uids to cuids.
521 Cuids are numbers assigned to insns, like uids,
522 except that cuids increase monotonically through the code.
523
524 Scan all SETs and see if we can deduce anything about what
525 bits are known to be zero for some registers and how many copies
526 of the sign bit are known to exist for those registers.
527
528 Also set any known values so that we can use it while searching
529 for what bits are known to be set. */
530
531 label_tick = 1;
532
533 /* We need to initialize it here, because record_dead_and_set_regs may call
534 get_last_value. */
535 subst_prev_insn = NULL_RTX;
536
537 setup_incoming_promotions ();
538
539 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
540 {
541 uid_cuid[INSN_UID (insn)] = ++i;
542 subst_low_cuid = i;
543 subst_insn = insn;
544
545 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
546 {
547 note_stores (PATTERN (insn), set_nonzero_bits_and_sign_copies);
548 record_dead_and_set_regs (insn);
549
550 #ifdef AUTO_INC_DEC
551 for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
552 if (REG_NOTE_KIND (links) == REG_INC)
553 set_nonzero_bits_and_sign_copies (XEXP (links, 0), NULL_RTX);
554 #endif
555 }
556
557 if (GET_CODE (insn) == CODE_LABEL)
558 label_tick++;
559 }
560
561 nonzero_sign_valid = 1;
562
563 /* Now scan all the insns in forward order. */
564
565 this_basic_block = -1;
566 label_tick = 1;
567 last_call_cuid = 0;
568 mem_last_set = 0;
569 init_reg_last_arrays ();
570 setup_incoming_promotions ();
571
572 for (insn = f; insn; insn = next ? next : NEXT_INSN (insn))
573 {
574 next = 0;
575
576 /* If INSN starts a new basic block, update our basic block number. */
577 if (this_basic_block + 1 < n_basic_blocks
578 && basic_block_head[this_basic_block + 1] == insn)
579 this_basic_block++;
580
581 if (GET_CODE (insn) == CODE_LABEL)
582 label_tick++;
583
584 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
585 {
586 /* Try this insn with each insn it links back to. */
587
588 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
589 if ((next = try_combine (insn, XEXP (links, 0), NULL_RTX)) != 0)
590 goto retry;
591
592 /* Try each sequence of three linked insns ending with this one. */
593
594 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
595 for (nextlinks = LOG_LINKS (XEXP (links, 0)); nextlinks;
596 nextlinks = XEXP (nextlinks, 1))
597 if ((next = try_combine (insn, XEXP (links, 0),
598 XEXP (nextlinks, 0))) != 0)
599 goto retry;
600
601 #ifdef HAVE_cc0
602 /* Try to combine a jump insn that uses CC0
603 with a preceding insn that sets CC0, and maybe with its
604 logical predecessor as well.
605 This is how we make decrement-and-branch insns.
606 We need this special code because data flow connections
607 via CC0 do not get entered in LOG_LINKS. */
608
609 if (GET_CODE (insn) == JUMP_INSN
610 && (prev = prev_nonnote_insn (insn)) != 0
611 && GET_CODE (prev) == INSN
612 && sets_cc0_p (PATTERN (prev)))
613 {
614 if ((next = try_combine (insn, prev, NULL_RTX)) != 0)
615 goto retry;
616
617 for (nextlinks = LOG_LINKS (prev); nextlinks;
618 nextlinks = XEXP (nextlinks, 1))
619 if ((next = try_combine (insn, prev,
620 XEXP (nextlinks, 0))) != 0)
621 goto retry;
622 }
623
624 /* Do the same for an insn that explicitly references CC0. */
625 if (GET_CODE (insn) == INSN
626 && (prev = prev_nonnote_insn (insn)) != 0
627 && GET_CODE (prev) == INSN
628 && sets_cc0_p (PATTERN (prev))
629 && GET_CODE (PATTERN (insn)) == SET
630 && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (insn))))
631 {
632 if ((next = try_combine (insn, prev, NULL_RTX)) != 0)
633 goto retry;
634
635 for (nextlinks = LOG_LINKS (prev); nextlinks;
636 nextlinks = XEXP (nextlinks, 1))
637 if ((next = try_combine (insn, prev,
638 XEXP (nextlinks, 0))) != 0)
639 goto retry;
640 }
641
642 /* Finally, see if any of the insns that this insn links to
643 explicitly references CC0. If so, try this insn, that insn,
644 and its predecessor if it sets CC0. */
645 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
646 if (GET_CODE (XEXP (links, 0)) == INSN
647 && GET_CODE (PATTERN (XEXP (links, 0))) == SET
648 && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (XEXP (links, 0))))
649 && (prev = prev_nonnote_insn (XEXP (links, 0))) != 0
650 && GET_CODE (prev) == INSN
651 && sets_cc0_p (PATTERN (prev))
652 && (next = try_combine (insn, XEXP (links, 0), prev)) != 0)
653 goto retry;
654 #endif
655
656 /* Try combining an insn with two different insns whose results it
657 uses. */
658 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
659 for (nextlinks = XEXP (links, 1); nextlinks;
660 nextlinks = XEXP (nextlinks, 1))
661 if ((next = try_combine (insn, XEXP (links, 0),
662 XEXP (nextlinks, 0))) != 0)
663 goto retry;
664
665 if (GET_CODE (insn) != NOTE)
666 record_dead_and_set_regs (insn);
667
668 retry:
669 ;
670 }
671 }
672
673 total_attempts += combine_attempts;
674 total_merges += combine_merges;
675 total_extras += combine_extras;
676 total_successes += combine_successes;
677
678 nonzero_sign_valid = 0;
679 }
680
681 /* Wipe the reg_last_xxx arrays in preparation for another pass. */
682
683 static void
684 init_reg_last_arrays ()
685 {
686 int nregs = combine_max_regno;
687
688 bzero ((char *) reg_last_death, nregs * sizeof (rtx));
689 bzero ((char *) reg_last_set, nregs * sizeof (rtx));
690 bzero ((char *) reg_last_set_value, nregs * sizeof (rtx));
691 bzero ((char *) reg_last_set_table_tick, nregs * sizeof (int));
692 bzero ((char *) reg_last_set_label, nregs * sizeof (int));
693 bzero (reg_last_set_invalid, nregs * sizeof (char));
694 bzero ((char *) reg_last_set_mode, nregs * sizeof (enum machine_mode));
695 bzero ((char *) reg_last_set_nonzero_bits, nregs * sizeof (HOST_WIDE_INT));
696 bzero (reg_last_set_sign_bit_copies, nregs * sizeof (char));
697 }
698 \f
699 /* Set up any promoted values for incoming argument registers. */
700
701 static void
702 setup_incoming_promotions ()
703 {
704 #ifdef PROMOTE_FUNCTION_ARGS
705 int regno;
706 rtx reg;
707 enum machine_mode mode;
708 int unsignedp;
709 rtx first = get_insns ();
710
711 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
712 if (FUNCTION_ARG_REGNO_P (regno)
713 && (reg = promoted_input_arg (regno, &mode, &unsignedp)) != 0)
714 record_value_for_reg (reg, first,
715 gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
716 GET_MODE (reg),
717 gen_rtx (CLOBBER, mode, const0_rtx)));
718 #endif
719 }
720 \f
721 /* Called via note_stores. If X is a pseudo that is narrower than
722 HOST_BITS_PER_WIDE_INT and is being set, record what bits are known zero.
723
724 If we are setting only a portion of X and we can't figure out what
725 portion, assume all bits will be used since we don't know what will
726 be happening.
727
728 Similarly, set how many bits of X are known to be copies of the sign bit
729 at all locations in the function. This is the smallest number implied
730 by any set of X. */
731
732 static void
733 set_nonzero_bits_and_sign_copies (x, set)
734 rtx x;
735 rtx set;
736 {
737 int num;
738
739 if (GET_CODE (x) == REG
740 && REGNO (x) >= FIRST_PSEUDO_REGISTER
741 /* If this register is undefined at the start of the file, we can't
742 say what its contents were. */
743 && ! REGNO_REG_SET_P (basic_block_live_at_start[0], REGNO (x))
744 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
745 {
746 if (set == 0 || GET_CODE (set) == CLOBBER)
747 {
748 reg_nonzero_bits[REGNO (x)] = GET_MODE_MASK (GET_MODE (x));
749 reg_sign_bit_copies[REGNO (x)] = 1;
750 return;
751 }
752
753 /* If this is a complex assignment, see if we can convert it into a
754 simple assignment. */
755 set = expand_field_assignment (set);
756
757 /* If this is a simple assignment, or we have a paradoxical SUBREG,
758 set what we know about X. */
759
760 if (SET_DEST (set) == x
761 || (GET_CODE (SET_DEST (set)) == SUBREG
762 && (GET_MODE_SIZE (GET_MODE (SET_DEST (set)))
763 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (set)))))
764 && SUBREG_REG (SET_DEST (set)) == x))
765 {
766 rtx src = SET_SRC (set);
767
768 #ifdef SHORT_IMMEDIATES_SIGN_EXTEND
769 /* If X is narrower than a word and SRC is a non-negative
770 constant that would appear negative in the mode of X,
771 sign-extend it for use in reg_nonzero_bits because some
772 machines (maybe most) will actually do the sign-extension
773 and this is the conservative approach.
774
775 ??? For 2.5, try to tighten up the MD files in this regard
776 instead of this kludge. */
777
778 if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
779 && GET_CODE (src) == CONST_INT
780 && INTVAL (src) > 0
781 && 0 != (INTVAL (src)
782 & ((HOST_WIDE_INT) 1
783 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
784 src = GEN_INT (INTVAL (src)
785 | ((HOST_WIDE_INT) (-1)
786 << GET_MODE_BITSIZE (GET_MODE (x))));
787 #endif
788
789 reg_nonzero_bits[REGNO (x)]
790 |= nonzero_bits (src, nonzero_bits_mode);
791 num = num_sign_bit_copies (SET_SRC (set), GET_MODE (x));
792 if (reg_sign_bit_copies[REGNO (x)] == 0
793 || reg_sign_bit_copies[REGNO (x)] > num)
794 reg_sign_bit_copies[REGNO (x)] = num;
795 }
796 else
797 {
798 reg_nonzero_bits[REGNO (x)] = GET_MODE_MASK (GET_MODE (x));
799 reg_sign_bit_copies[REGNO (x)] = 1;
800 }
801 }
802 }
803 \f
804 /* See if INSN can be combined into I3. PRED and SUCC are optionally
805 insns that were previously combined into I3 or that will be combined
806 into the merger of INSN and I3.
807
808 Return 0 if the combination is not allowed for any reason.
809
810 If the combination is allowed, *PDEST will be set to the single
811 destination of INSN and *PSRC to the single source, and this function
812 will return 1. */
813
814 static int
815 can_combine_p (insn, i3, pred, succ, pdest, psrc)
816 rtx insn;
817 rtx i3;
818 rtx pred, succ;
819 rtx *pdest, *psrc;
820 {
821 int i;
822 rtx set = 0, src, dest;
823 rtx p, link;
824 int all_adjacent = (succ ? (next_active_insn (insn) == succ
825 && next_active_insn (succ) == i3)
826 : next_active_insn (insn) == i3);
827
828 /* Can combine only if previous insn is a SET of a REG, a SUBREG or CC0.
829 or a PARALLEL consisting of such a SET and CLOBBERs.
830
831 If INSN has CLOBBER parallel parts, ignore them for our processing.
832 By definition, these happen during the execution of the insn. When it
833 is merged with another insn, all bets are off. If they are, in fact,
834 needed and aren't also supplied in I3, they may be added by
835 recog_for_combine. Otherwise, it won't match.
836
837 We can also ignore a SET whose SET_DEST is mentioned in a REG_UNUSED
838 note.
839
840 Get the source and destination of INSN. If more than one, can't
841 combine. */
842
843 if (GET_CODE (PATTERN (insn)) == SET)
844 set = PATTERN (insn);
845 else if (GET_CODE (PATTERN (insn)) == PARALLEL
846 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
847 {
848 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
849 {
850 rtx elt = XVECEXP (PATTERN (insn), 0, i);
851
852 switch (GET_CODE (elt))
853 {
854 /* We can ignore CLOBBERs. */
855 case CLOBBER:
856 break;
857
858 case SET:
859 /* Ignore SETs whose result isn't used but not those that
860 have side-effects. */
861 if (find_reg_note (insn, REG_UNUSED, SET_DEST (elt))
862 && ! side_effects_p (elt))
863 break;
864
865 /* If we have already found a SET, this is a second one and
866 so we cannot combine with this insn. */
867 if (set)
868 return 0;
869
870 set = elt;
871 break;
872
873 default:
874 /* Anything else means we can't combine. */
875 return 0;
876 }
877 }
878
879 if (set == 0
880 /* If SET_SRC is an ASM_OPERANDS we can't throw away these CLOBBERs,
881 so don't do anything with it. */
882 || GET_CODE (SET_SRC (set)) == ASM_OPERANDS)
883 return 0;
884 }
885 else
886 return 0;
887
888 if (set == 0)
889 return 0;
890
891 set = expand_field_assignment (set);
892 src = SET_SRC (set), dest = SET_DEST (set);
893
894 /* Don't eliminate a store in the stack pointer. */
895 if (dest == stack_pointer_rtx
896 /* If we couldn't eliminate a field assignment, we can't combine. */
897 || GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == STRICT_LOW_PART
898 /* Don't combine with an insn that sets a register to itself if it has
899 a REG_EQUAL note. This may be part of a REG_NO_CONFLICT sequence. */
900 || (rtx_equal_p (src, dest) && find_reg_note (insn, REG_EQUAL, NULL_RTX))
901 /* Can't merge a function call. */
902 || GET_CODE (src) == CALL
903 /* Don't eliminate a function call argument. */
904 || (GET_CODE (i3) == CALL_INSN
905 && (find_reg_fusage (i3, USE, dest)
906 || (GET_CODE (dest) == REG
907 && REGNO (dest) < FIRST_PSEUDO_REGISTER
908 && global_regs[REGNO (dest)])))
909 /* Don't substitute into an incremented register. */
910 || FIND_REG_INC_NOTE (i3, dest)
911 || (succ && FIND_REG_INC_NOTE (succ, dest))
912 /* Don't combine the end of a libcall into anything. */
913 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
914 /* Make sure that DEST is not used after SUCC but before I3. */
915 || (succ && ! all_adjacent
916 && reg_used_between_p (dest, succ, i3))
917 /* Make sure that the value that is to be substituted for the register
918 does not use any registers whose values alter in between. However,
919 If the insns are adjacent, a use can't cross a set even though we
920 think it might (this can happen for a sequence of insns each setting
921 the same destination; reg_last_set of that register might point to
922 a NOTE). If INSN has a REG_EQUIV note, the register is always
923 equivalent to the memory so the substitution is valid even if there
924 are intervening stores. Also, don't move a volatile asm or
925 UNSPEC_VOLATILE across any other insns. */
926 || (! all_adjacent
927 && (((GET_CODE (src) != MEM
928 || ! find_reg_note (insn, REG_EQUIV, src))
929 && use_crosses_set_p (src, INSN_CUID (insn)))
930 || (GET_CODE (src) == ASM_OPERANDS && MEM_VOLATILE_P (src))
931 || GET_CODE (src) == UNSPEC_VOLATILE))
932 /* If there is a REG_NO_CONFLICT note for DEST in I3 or SUCC, we get
933 better register allocation by not doing the combine. */
934 || find_reg_note (i3, REG_NO_CONFLICT, dest)
935 || (succ && find_reg_note (succ, REG_NO_CONFLICT, dest))
936 /* Don't combine across a CALL_INSN, because that would possibly
937 change whether the life span of some REGs crosses calls or not,
938 and it is a pain to update that information.
939 Exception: if source is a constant, moving it later can't hurt.
940 Accept that special case, because it helps -fforce-addr a lot. */
941 || (INSN_CUID (insn) < last_call_cuid && ! CONSTANT_P (src)))
942 return 0;
943
944 /* DEST must either be a REG or CC0. */
945 if (GET_CODE (dest) == REG)
946 {
947 /* If register alignment is being enforced for multi-word items in all
948 cases except for parameters, it is possible to have a register copy
949 insn referencing a hard register that is not allowed to contain the
950 mode being copied and which would not be valid as an operand of most
951 insns. Eliminate this problem by not combining with such an insn.
952
953 Also, on some machines we don't want to extend the life of a hard
954 register.
955
956 This is the same test done in can_combine except that we don't test
957 if SRC is a CALL operation to permit a hard register with
958 SMALL_REGISTER_CLASSES, and that we have to take all_adjacent
959 into account. */
960
961 if (GET_CODE (src) == REG
962 && ((REGNO (dest) < FIRST_PSEUDO_REGISTER
963 && ! HARD_REGNO_MODE_OK (REGNO (dest), GET_MODE (dest)))
964 /* Don't extend the life of a hard register unless it is
965 user variable (if we have few registers) or it can't
966 fit into the desired register (meaning something special
967 is going on).
968 Also avoid substituting a return register into I3, because
969 reload can't handle a conflict with constraints of other
970 inputs. */
971 || (REGNO (src) < FIRST_PSEUDO_REGISTER
972 && (! HARD_REGNO_MODE_OK (REGNO (src), GET_MODE (src))
973 || (SMALL_REGISTER_CLASSES
974 && ((! all_adjacent && ! REG_USERVAR_P (src))
975 || (FUNCTION_VALUE_REGNO_P (REGNO (src))
976 && ! REG_USERVAR_P (src))))))))
977 return 0;
978 }
979 else if (GET_CODE (dest) != CC0)
980 return 0;
981
982 /* Don't substitute for a register intended as a clobberable operand.
983 Similarly, don't substitute an expression containing a register that
984 will be clobbered in I3. */
985 if (GET_CODE (PATTERN (i3)) == PARALLEL)
986 for (i = XVECLEN (PATTERN (i3), 0) - 1; i >= 0; i--)
987 if (GET_CODE (XVECEXP (PATTERN (i3), 0, i)) == CLOBBER
988 && (reg_overlap_mentioned_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0),
989 src)
990 || rtx_equal_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0), dest)))
991 return 0;
992
993 /* If INSN contains anything volatile, or is an `asm' (whether volatile
994 or not), reject, unless nothing volatile comes between it and I3,
995 with the exception of SUCC. */
996
997 if (GET_CODE (src) == ASM_OPERANDS || volatile_refs_p (src))
998 for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
999 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
1000 && p != succ && volatile_refs_p (PATTERN (p)))
1001 return 0;
1002
1003 /* If INSN is an asm, and DEST is a hard register, reject, since it has
1004 to be an explicit register variable, and was chosen for a reason. */
1005
1006 if (GET_CODE (src) == ASM_OPERANDS
1007 && GET_CODE (dest) == REG && REGNO (dest) < FIRST_PSEUDO_REGISTER)
1008 return 0;
1009
1010 /* If there are any volatile insns between INSN and I3, reject, because
1011 they might affect machine state. */
1012
1013 for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
1014 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
1015 && p != succ && volatile_insn_p (PATTERN (p)))
1016 return 0;
1017
1018 /* If INSN or I2 contains an autoincrement or autodecrement,
1019 make sure that register is not used between there and I3,
1020 and not already used in I3 either.
1021 Also insist that I3 not be a jump; if it were one
1022 and the incremented register were spilled, we would lose. */
1023
1024 #ifdef AUTO_INC_DEC
1025 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1026 if (REG_NOTE_KIND (link) == REG_INC
1027 && (GET_CODE (i3) == JUMP_INSN
1028 || reg_used_between_p (XEXP (link, 0), insn, i3)
1029 || reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i3))))
1030 return 0;
1031 #endif
1032
1033 #ifdef HAVE_cc0
1034 /* Don't combine an insn that follows a CC0-setting insn.
1035 An insn that uses CC0 must not be separated from the one that sets it.
1036 We do, however, allow I2 to follow a CC0-setting insn if that insn
1037 is passed as I1; in that case it will be deleted also.
1038 We also allow combining in this case if all the insns are adjacent
1039 because that would leave the two CC0 insns adjacent as well.
1040 It would be more logical to test whether CC0 occurs inside I1 or I2,
1041 but that would be much slower, and this ought to be equivalent. */
1042
1043 p = prev_nonnote_insn (insn);
1044 if (p && p != pred && GET_CODE (p) == INSN && sets_cc0_p (PATTERN (p))
1045 && ! all_adjacent)
1046 return 0;
1047 #endif
1048
1049 /* If we get here, we have passed all the tests and the combination is
1050 to be allowed. */
1051
1052 *pdest = dest;
1053 *psrc = src;
1054
1055 return 1;
1056 }
1057 \f
1058 /* Check if PAT is an insn - or a part of it - used to set up an
1059 argument for a function in a hard register. */
1060
1061 static int
1062 sets_function_arg_p (pat)
1063 rtx pat;
1064 {
1065 int i;
1066 rtx inner_dest;
1067
1068 switch (GET_CODE (pat))
1069 {
1070 case INSN:
1071 return sets_function_arg_p (PATTERN (pat));
1072
1073 case PARALLEL:
1074 for (i = XVECLEN (pat, 0); --i >= 0;)
1075 if (sets_function_arg_p (XVECEXP (pat, 0, i)))
1076 return 1;
1077
1078 break;
1079
1080 case SET:
1081 inner_dest = SET_DEST (pat);
1082 while (GET_CODE (inner_dest) == STRICT_LOW_PART
1083 || GET_CODE (inner_dest) == SUBREG
1084 || GET_CODE (inner_dest) == ZERO_EXTRACT)
1085 inner_dest = XEXP (inner_dest, 0);
1086
1087 return (GET_CODE (inner_dest) == REG
1088 && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER
1089 && FUNCTION_ARG_REGNO_P (REGNO (inner_dest)));
1090
1091 default:
1092 break;
1093 }
1094
1095 return 0;
1096 }
1097
1098 /* LOC is the location within I3 that contains its pattern or the component
1099 of a PARALLEL of the pattern. We validate that it is valid for combining.
1100
1101 One problem is if I3 modifies its output, as opposed to replacing it
1102 entirely, we can't allow the output to contain I2DEST or I1DEST as doing
1103 so would produce an insn that is not equivalent to the original insns.
1104
1105 Consider:
1106
1107 (set (reg:DI 101) (reg:DI 100))
1108 (set (subreg:SI (reg:DI 101) 0) <foo>)
1109
1110 This is NOT equivalent to:
1111
1112 (parallel [(set (subreg:SI (reg:DI 100) 0) <foo>)
1113 (set (reg:DI 101) (reg:DI 100))])
1114
1115 Not only does this modify 100 (in which case it might still be valid
1116 if 100 were dead in I2), it sets 101 to the ORIGINAL value of 100.
1117
1118 We can also run into a problem if I2 sets a register that I1
1119 uses and I1 gets directly substituted into I3 (not via I2). In that
1120 case, we would be getting the wrong value of I2DEST into I3, so we
1121 must reject the combination. This case occurs when I2 and I1 both
1122 feed into I3, rather than when I1 feeds into I2, which feeds into I3.
1123 If I1_NOT_IN_SRC is non-zero, it means that finding I1 in the source
1124 of a SET must prevent combination from occurring.
1125
1126 On machines where SMALL_REGISTER_CLASSES is non-zero, we don't combine
1127 if the destination of a SET is a hard register that isn't a user
1128 variable.
1129
1130 Before doing the above check, we first try to expand a field assignment
1131 into a set of logical operations.
1132
1133 If PI3_DEST_KILLED is non-zero, it is a pointer to a location in which
1134 we place a register that is both set and used within I3. If more than one
1135 such register is detected, we fail.
1136
1137 Return 1 if the combination is valid, zero otherwise. */
1138
1139 static int
1140 combinable_i3pat (i3, loc, i2dest, i1dest, i1_not_in_src, pi3dest_killed)
1141 rtx i3;
1142 rtx *loc;
1143 rtx i2dest;
1144 rtx i1dest;
1145 int i1_not_in_src;
1146 rtx *pi3dest_killed;
1147 {
1148 rtx x = *loc;
1149
1150 if (GET_CODE (x) == SET)
1151 {
1152 rtx set = expand_field_assignment (x);
1153 rtx dest = SET_DEST (set);
1154 rtx src = SET_SRC (set);
1155 rtx inner_dest = dest, inner_src = src;
1156
1157 SUBST (*loc, set);
1158
1159 while (GET_CODE (inner_dest) == STRICT_LOW_PART
1160 || GET_CODE (inner_dest) == SUBREG
1161 || GET_CODE (inner_dest) == ZERO_EXTRACT)
1162 inner_dest = XEXP (inner_dest, 0);
1163
1164 /* We probably don't need this any more now that LIMIT_RELOAD_CLASS
1165 was added. */
1166 #if 0
1167 while (GET_CODE (inner_src) == STRICT_LOW_PART
1168 || GET_CODE (inner_src) == SUBREG
1169 || GET_CODE (inner_src) == ZERO_EXTRACT)
1170 inner_src = XEXP (inner_src, 0);
1171
1172 /* If it is better that two different modes keep two different pseudos,
1173 avoid combining them. This avoids producing the following pattern
1174 on a 386:
1175 (set (subreg:SI (reg/v:QI 21) 0)
1176 (lshiftrt:SI (reg/v:SI 20)
1177 (const_int 24)))
1178 If that were made, reload could not handle the pair of
1179 reg 20/21, since it would try to get any GENERAL_REGS
1180 but some of them don't handle QImode. */
1181
1182 if (rtx_equal_p (inner_src, i2dest)
1183 && GET_CODE (inner_dest) == REG
1184 && ! MODES_TIEABLE_P (GET_MODE (i2dest), GET_MODE (inner_dest)))
1185 return 0;
1186 #endif
1187
1188 /* Check for the case where I3 modifies its output, as
1189 discussed above. */
1190 if ((inner_dest != dest
1191 && (reg_overlap_mentioned_p (i2dest, inner_dest)
1192 || (i1dest && reg_overlap_mentioned_p (i1dest, inner_dest))))
1193
1194 /* This is the same test done in can_combine_p except that we
1195 allow a hard register with SMALL_REGISTER_CLASSES if SRC is a
1196 CALL operation. Moreover, we can't test all_adjacent; we don't
1197 have to, since this instruction will stay in place, thus we are
1198 not considering increasing the lifetime of INNER_DEST.
1199
1200 Also, if this insn sets a function argument, combining it with
1201 something that might need a spill could clobber a previous
1202 function argument; the all_adjacent test in can_combine_p also
1203 checks this; here, we do a more specific test for this case. */
1204
1205 || (GET_CODE (inner_dest) == REG
1206 && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER
1207 && (! HARD_REGNO_MODE_OK (REGNO (inner_dest),
1208 GET_MODE (inner_dest))
1209 || (SMALL_REGISTER_CLASSES && GET_CODE (src) != CALL
1210 && ! REG_USERVAR_P (inner_dest)
1211 && (FUNCTION_VALUE_REGNO_P (REGNO (inner_dest))
1212 || (FUNCTION_ARG_REGNO_P (REGNO (inner_dest))
1213 && i3 != 0
1214 && sets_function_arg_p (prev_nonnote_insn (i3)))))))
1215 || (i1_not_in_src && reg_overlap_mentioned_p (i1dest, src)))
1216 return 0;
1217
1218 /* If DEST is used in I3, it is being killed in this insn,
1219 so record that for later.
1220 Never add REG_DEAD notes for the FRAME_POINTER_REGNUM or the
1221 STACK_POINTER_REGNUM, since these are always considered to be
1222 live. Similarly for ARG_POINTER_REGNUM if it is fixed. */
1223 if (pi3dest_killed && GET_CODE (dest) == REG
1224 && reg_referenced_p (dest, PATTERN (i3))
1225 && REGNO (dest) != FRAME_POINTER_REGNUM
1226 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1227 && REGNO (dest) != HARD_FRAME_POINTER_REGNUM
1228 #endif
1229 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
1230 && (REGNO (dest) != ARG_POINTER_REGNUM
1231 || ! fixed_regs [REGNO (dest)])
1232 #endif
1233 && REGNO (dest) != STACK_POINTER_REGNUM)
1234 {
1235 if (*pi3dest_killed)
1236 return 0;
1237
1238 *pi3dest_killed = dest;
1239 }
1240 }
1241
1242 else if (GET_CODE (x) == PARALLEL)
1243 {
1244 int i;
1245
1246 for (i = 0; i < XVECLEN (x, 0); i++)
1247 if (! combinable_i3pat (i3, &XVECEXP (x, 0, i), i2dest, i1dest,
1248 i1_not_in_src, pi3dest_killed))
1249 return 0;
1250 }
1251
1252 return 1;
1253 }
1254 \f
1255 /* Try to combine the insns I1 and I2 into I3.
1256 Here I1 and I2 appear earlier than I3.
1257 I1 can be zero; then we combine just I2 into I3.
1258
1259 It we are combining three insns and the resulting insn is not recognized,
1260 try splitting it into two insns. If that happens, I2 and I3 are retained
1261 and I1 is pseudo-deleted by turning it into a NOTE. Otherwise, I1 and I2
1262 are pseudo-deleted.
1263
1264 Return 0 if the combination does not work. Then nothing is changed.
1265 If we did the combination, return the insn at which combine should
1266 resume scanning. */
1267
1268 static rtx
1269 try_combine (i3, i2, i1)
1270 register rtx i3, i2, i1;
1271 {
1272 /* New patterns for I3 and I3, respectively. */
1273 rtx newpat, newi2pat = 0;
1274 /* Indicates need to preserve SET in I1 or I2 in I3 if it is not dead. */
1275 int added_sets_1, added_sets_2;
1276 /* Total number of SETs to put into I3. */
1277 int total_sets;
1278 /* Nonzero is I2's body now appears in I3. */
1279 int i2_is_used;
1280 /* INSN_CODEs for new I3, new I2, and user of condition code. */
1281 int insn_code_number, i2_code_number, other_code_number;
1282 /* Contains I3 if the destination of I3 is used in its source, which means
1283 that the old life of I3 is being killed. If that usage is placed into
1284 I2 and not in I3, a REG_DEAD note must be made. */
1285 rtx i3dest_killed = 0;
1286 /* SET_DEST and SET_SRC of I2 and I1. */
1287 rtx i2dest, i2src, i1dest = 0, i1src = 0;
1288 /* PATTERN (I2), or a copy of it in certain cases. */
1289 rtx i2pat;
1290 /* Indicates if I2DEST or I1DEST is in I2SRC or I1_SRC. */
1291 int i2dest_in_i2src = 0, i1dest_in_i1src = 0, i2dest_in_i1src = 0;
1292 int i1_feeds_i3 = 0;
1293 /* Notes that must be added to REG_NOTES in I3 and I2. */
1294 rtx new_i3_notes, new_i2_notes;
1295 /* Notes that we substituted I3 into I2 instead of the normal case. */
1296 int i3_subst_into_i2 = 0;
1297 /* Notes that I1, I2 or I3 is a MULT operation. */
1298 int have_mult = 0;
1299 /* Number of clobbers of SCRATCH we had to add. */
1300 int i3_scratches = 0, i2_scratches = 0, other_scratches = 0;
1301
1302 int maxreg;
1303 rtx temp;
1304 register rtx link;
1305 int i;
1306
1307 /* If any of I1, I2, and I3 isn't really an insn, we can't do anything.
1308 This can occur when flow deletes an insn that it has merged into an
1309 auto-increment address. We also can't do anything if I3 has a
1310 REG_LIBCALL note since we don't want to disrupt the contiguity of a
1311 libcall. */
1312
1313 if (GET_RTX_CLASS (GET_CODE (i3)) != 'i'
1314 || GET_RTX_CLASS (GET_CODE (i2)) != 'i'
1315 || (i1 && GET_RTX_CLASS (GET_CODE (i1)) != 'i')
1316 || find_reg_note (i3, REG_LIBCALL, NULL_RTX))
1317 return 0;
1318
1319 combine_attempts++;
1320
1321 undobuf.undos = undobuf.previous_undos = 0;
1322 undobuf.other_insn = 0;
1323
1324 /* Save the current high-water-mark so we can free storage if we didn't
1325 accept this combination. */
1326 undobuf.storage = (char *) oballoc (0);
1327
1328 /* Reset the hard register usage information. */
1329 CLEAR_HARD_REG_SET (newpat_used_regs);
1330
1331 /* If I1 and I2 both feed I3, they can be in any order. To simplify the
1332 code below, set I1 to be the earlier of the two insns. */
1333 if (i1 && INSN_CUID (i1) > INSN_CUID (i2))
1334 temp = i1, i1 = i2, i2 = temp;
1335
1336 added_links_insn = 0;
1337
1338 /* First check for one important special-case that the code below will
1339 not handle. Namely, the case where I1 is zero, I2 has multiple sets,
1340 and I3 is a SET whose SET_SRC is a SET_DEST in I2. In that case,
1341 we may be able to replace that destination with the destination of I3.
1342 This occurs in the common code where we compute both a quotient and
1343 remainder into a structure, in which case we want to do the computation
1344 directly into the structure to avoid register-register copies.
1345
1346 We make very conservative checks below and only try to handle the
1347 most common cases of this. For example, we only handle the case
1348 where I2 and I3 are adjacent to avoid making difficult register
1349 usage tests. */
1350
1351 if (i1 == 0 && GET_CODE (i3) == INSN && GET_CODE (PATTERN (i3)) == SET
1352 && GET_CODE (SET_SRC (PATTERN (i3))) == REG
1353 && REGNO (SET_SRC (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER
1354 && (! SMALL_REGISTER_CLASSES
1355 || (GET_CODE (SET_DEST (PATTERN (i3))) != REG
1356 || REGNO (SET_DEST (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER
1357 || REG_USERVAR_P (SET_DEST (PATTERN (i3)))))
1358 && find_reg_note (i3, REG_DEAD, SET_SRC (PATTERN (i3)))
1359 && GET_CODE (PATTERN (i2)) == PARALLEL
1360 && ! side_effects_p (SET_DEST (PATTERN (i3)))
1361 /* If the dest of I3 is a ZERO_EXTRACT or STRICT_LOW_PART, the code
1362 below would need to check what is inside (and reg_overlap_mentioned_p
1363 doesn't support those codes anyway). Don't allow those destinations;
1364 the resulting insn isn't likely to be recognized anyway. */
1365 && GET_CODE (SET_DEST (PATTERN (i3))) != ZERO_EXTRACT
1366 && GET_CODE (SET_DEST (PATTERN (i3))) != STRICT_LOW_PART
1367 && ! reg_overlap_mentioned_p (SET_SRC (PATTERN (i3)),
1368 SET_DEST (PATTERN (i3)))
1369 && next_real_insn (i2) == i3)
1370 {
1371 rtx p2 = PATTERN (i2);
1372
1373 /* Make sure that the destination of I3,
1374 which we are going to substitute into one output of I2,
1375 is not used within another output of I2. We must avoid making this:
1376 (parallel [(set (mem (reg 69)) ...)
1377 (set (reg 69) ...)])
1378 which is not well-defined as to order of actions.
1379 (Besides, reload can't handle output reloads for this.)
1380
1381 The problem can also happen if the dest of I3 is a memory ref,
1382 if another dest in I2 is an indirect memory ref. */
1383 for (i = 0; i < XVECLEN (p2, 0); i++)
1384 if ((GET_CODE (XVECEXP (p2, 0, i)) == SET
1385 || GET_CODE (XVECEXP (p2, 0, i)) == CLOBBER)
1386 && reg_overlap_mentioned_p (SET_DEST (PATTERN (i3)),
1387 SET_DEST (XVECEXP (p2, 0, i))))
1388 break;
1389
1390 if (i == XVECLEN (p2, 0))
1391 for (i = 0; i < XVECLEN (p2, 0); i++)
1392 if (SET_DEST (XVECEXP (p2, 0, i)) == SET_SRC (PATTERN (i3)))
1393 {
1394 combine_merges++;
1395
1396 subst_insn = i3;
1397 subst_low_cuid = INSN_CUID (i2);
1398
1399 added_sets_2 = added_sets_1 = 0;
1400 i2dest = SET_SRC (PATTERN (i3));
1401
1402 /* Replace the dest in I2 with our dest and make the resulting
1403 insn the new pattern for I3. Then skip to where we
1404 validate the pattern. Everything was set up above. */
1405 SUBST (SET_DEST (XVECEXP (p2, 0, i)),
1406 SET_DEST (PATTERN (i3)));
1407
1408 newpat = p2;
1409 i3_subst_into_i2 = 1;
1410 goto validate_replacement;
1411 }
1412 }
1413
1414 #ifndef HAVE_cc0
1415 /* If we have no I1 and I2 looks like:
1416 (parallel [(set (reg:CC X) (compare:CC OP (const_int 0)))
1417 (set Y OP)])
1418 make up a dummy I1 that is
1419 (set Y OP)
1420 and change I2 to be
1421 (set (reg:CC X) (compare:CC Y (const_int 0)))
1422
1423 (We can ignore any trailing CLOBBERs.)
1424
1425 This undoes a previous combination and allows us to match a branch-and-
1426 decrement insn. */
1427
1428 if (i1 == 0 && GET_CODE (PATTERN (i2)) == PARALLEL
1429 && XVECLEN (PATTERN (i2), 0) >= 2
1430 && GET_CODE (XVECEXP (PATTERN (i2), 0, 0)) == SET
1431 && (GET_MODE_CLASS (GET_MODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 0))))
1432 == MODE_CC)
1433 && GET_CODE (SET_SRC (XVECEXP (PATTERN (i2), 0, 0))) == COMPARE
1434 && XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 1) == const0_rtx
1435 && GET_CODE (XVECEXP (PATTERN (i2), 0, 1)) == SET
1436 && GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 1))) == REG
1437 && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 0),
1438 SET_SRC (XVECEXP (PATTERN (i2), 0, 1))))
1439 {
1440 for (i = XVECLEN (PATTERN (i2), 0) - 1; i >= 2; i--)
1441 if (GET_CODE (XVECEXP (PATTERN (i2), 0, i)) != CLOBBER)
1442 break;
1443
1444 if (i == 1)
1445 {
1446 /* We make I1 with the same INSN_UID as I2. This gives it
1447 the same INSN_CUID for value tracking. Our fake I1 will
1448 never appear in the insn stream so giving it the same INSN_UID
1449 as I2 will not cause a problem. */
1450
1451 subst_prev_insn = i1
1452 = gen_rtx (INSN, VOIDmode, INSN_UID (i2), NULL_RTX, i2,
1453 XVECEXP (PATTERN (i2), 0, 1), -1, NULL_RTX, NULL_RTX);
1454
1455 SUBST (PATTERN (i2), XVECEXP (PATTERN (i2), 0, 0));
1456 SUBST (XEXP (SET_SRC (PATTERN (i2)), 0),
1457 SET_DEST (PATTERN (i1)));
1458 }
1459 }
1460 #endif
1461
1462 /* Verify that I2 and I1 are valid for combining. */
1463 if (! can_combine_p (i2, i3, i1, NULL_RTX, &i2dest, &i2src)
1464 || (i1 && ! can_combine_p (i1, i3, NULL_RTX, i2, &i1dest, &i1src)))
1465 {
1466 undo_all ();
1467 return 0;
1468 }
1469
1470 /* Record whether I2DEST is used in I2SRC and similarly for the other
1471 cases. Knowing this will help in register status updating below. */
1472 i2dest_in_i2src = reg_overlap_mentioned_p (i2dest, i2src);
1473 i1dest_in_i1src = i1 && reg_overlap_mentioned_p (i1dest, i1src);
1474 i2dest_in_i1src = i1 && reg_overlap_mentioned_p (i2dest, i1src);
1475
1476 /* See if I1 directly feeds into I3. It does if I1DEST is not used
1477 in I2SRC. */
1478 i1_feeds_i3 = i1 && ! reg_overlap_mentioned_p (i1dest, i2src);
1479
1480 /* Ensure that I3's pattern can be the destination of combines. */
1481 if (! combinable_i3pat (i3, &PATTERN (i3), i2dest, i1dest,
1482 i1 && i2dest_in_i1src && i1_feeds_i3,
1483 &i3dest_killed))
1484 {
1485 undo_all ();
1486 return 0;
1487 }
1488
1489 /* See if any of the insns is a MULT operation. Unless one is, we will
1490 reject a combination that is, since it must be slower. Be conservative
1491 here. */
1492 if (GET_CODE (i2src) == MULT
1493 || (i1 != 0 && GET_CODE (i1src) == MULT)
1494 || (GET_CODE (PATTERN (i3)) == SET
1495 && GET_CODE (SET_SRC (PATTERN (i3))) == MULT))
1496 have_mult = 1;
1497
1498 /* If I3 has an inc, then give up if I1 or I2 uses the reg that is inc'd.
1499 We used to do this EXCEPT in one case: I3 has a post-inc in an
1500 output operand. However, that exception can give rise to insns like
1501 mov r3,(r3)+
1502 which is a famous insn on the PDP-11 where the value of r3 used as the
1503 source was model-dependent. Avoid this sort of thing. */
1504
1505 #if 0
1506 if (!(GET_CODE (PATTERN (i3)) == SET
1507 && GET_CODE (SET_SRC (PATTERN (i3))) == REG
1508 && GET_CODE (SET_DEST (PATTERN (i3))) == MEM
1509 && (GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_INC
1510 || GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_DEC)))
1511 /* It's not the exception. */
1512 #endif
1513 #ifdef AUTO_INC_DEC
1514 for (link = REG_NOTES (i3); link; link = XEXP (link, 1))
1515 if (REG_NOTE_KIND (link) == REG_INC
1516 && (reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i2))
1517 || (i1 != 0
1518 && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i1)))))
1519 {
1520 undo_all ();
1521 return 0;
1522 }
1523 #endif
1524
1525 /* See if the SETs in I1 or I2 need to be kept around in the merged
1526 instruction: whenever the value set there is still needed past I3.
1527 For the SETs in I2, this is easy: we see if I2DEST dies or is set in I3.
1528
1529 For the SET in I1, we have two cases: If I1 and I2 independently
1530 feed into I3, the set in I1 needs to be kept around if I1DEST dies
1531 or is set in I3. Otherwise (if I1 feeds I2 which feeds I3), the set
1532 in I1 needs to be kept around unless I1DEST dies or is set in either
1533 I2 or I3. We can distinguish these cases by seeing if I2SRC mentions
1534 I1DEST. If so, we know I1 feeds into I2. */
1535
1536 added_sets_2 = ! dead_or_set_p (i3, i2dest);
1537
1538 added_sets_1
1539 = i1 && ! (i1_feeds_i3 ? dead_or_set_p (i3, i1dest)
1540 : (dead_or_set_p (i3, i1dest) || dead_or_set_p (i2, i1dest)));
1541
1542 /* If the set in I2 needs to be kept around, we must make a copy of
1543 PATTERN (I2), so that when we substitute I1SRC for I1DEST in
1544 PATTERN (I2), we are only substituting for the original I1DEST, not into
1545 an already-substituted copy. This also prevents making self-referential
1546 rtx. If I2 is a PARALLEL, we just need the piece that assigns I2SRC to
1547 I2DEST. */
1548
1549 i2pat = (GET_CODE (PATTERN (i2)) == PARALLEL
1550 ? gen_rtx (SET, VOIDmode, i2dest, i2src)
1551 : PATTERN (i2));
1552
1553 if (added_sets_2)
1554 i2pat = copy_rtx (i2pat);
1555
1556 combine_merges++;
1557
1558 /* Substitute in the latest insn for the regs set by the earlier ones. */
1559
1560 maxreg = max_reg_num ();
1561
1562 subst_insn = i3;
1563
1564 /* It is possible that the source of I2 or I1 may be performing an
1565 unneeded operation, such as a ZERO_EXTEND of something that is known
1566 to have the high part zero. Handle that case by letting subst look at
1567 the innermost one of them.
1568
1569 Another way to do this would be to have a function that tries to
1570 simplify a single insn instead of merging two or more insns. We don't
1571 do this because of the potential of infinite loops and because
1572 of the potential extra memory required. However, doing it the way
1573 we are is a bit of a kludge and doesn't catch all cases.
1574
1575 But only do this if -fexpensive-optimizations since it slows things down
1576 and doesn't usually win. */
1577
1578 if (flag_expensive_optimizations)
1579 {
1580 /* Pass pc_rtx so no substitutions are done, just simplifications.
1581 The cases that we are interested in here do not involve the few
1582 cases were is_replaced is checked. */
1583 if (i1)
1584 {
1585 subst_low_cuid = INSN_CUID (i1);
1586 i1src = subst (i1src, pc_rtx, pc_rtx, 0, 0);
1587 }
1588 else
1589 {
1590 subst_low_cuid = INSN_CUID (i2);
1591 i2src = subst (i2src, pc_rtx, pc_rtx, 0, 0);
1592 }
1593
1594 undobuf.previous_undos = undobuf.undos;
1595 }
1596
1597 #ifndef HAVE_cc0
1598 /* Many machines that don't use CC0 have insns that can both perform an
1599 arithmetic operation and set the condition code. These operations will
1600 be represented as a PARALLEL with the first element of the vector
1601 being a COMPARE of an arithmetic operation with the constant zero.
1602 The second element of the vector will set some pseudo to the result
1603 of the same arithmetic operation. If we simplify the COMPARE, we won't
1604 match such a pattern and so will generate an extra insn. Here we test
1605 for this case, where both the comparison and the operation result are
1606 needed, and make the PARALLEL by just replacing I2DEST in I3SRC with
1607 I2SRC. Later we will make the PARALLEL that contains I2. */
1608
1609 if (i1 == 0 && added_sets_2 && GET_CODE (PATTERN (i3)) == SET
1610 && GET_CODE (SET_SRC (PATTERN (i3))) == COMPARE
1611 && XEXP (SET_SRC (PATTERN (i3)), 1) == const0_rtx
1612 && rtx_equal_p (XEXP (SET_SRC (PATTERN (i3)), 0), i2dest))
1613 {
1614 rtx *cc_use;
1615 enum machine_mode compare_mode;
1616
1617 newpat = PATTERN (i3);
1618 SUBST (XEXP (SET_SRC (newpat), 0), i2src);
1619
1620 i2_is_used = 1;
1621
1622 #ifdef EXTRA_CC_MODES
1623 /* See if a COMPARE with the operand we substituted in should be done
1624 with the mode that is currently being used. If not, do the same
1625 processing we do in `subst' for a SET; namely, if the destination
1626 is used only once, try to replace it with a register of the proper
1627 mode and also replace the COMPARE. */
1628 if (undobuf.other_insn == 0
1629 && (cc_use = find_single_use (SET_DEST (newpat), i3,
1630 &undobuf.other_insn))
1631 && ((compare_mode = SELECT_CC_MODE (GET_CODE (*cc_use),
1632 i2src, const0_rtx))
1633 != GET_MODE (SET_DEST (newpat))))
1634 {
1635 int regno = REGNO (SET_DEST (newpat));
1636 rtx new_dest = gen_rtx (REG, compare_mode, regno);
1637
1638 if (regno < FIRST_PSEUDO_REGISTER
1639 || (REG_N_SETS (regno) == 1 && ! added_sets_2
1640 && ! REG_USERVAR_P (SET_DEST (newpat))))
1641 {
1642 if (regno >= FIRST_PSEUDO_REGISTER)
1643 SUBST (regno_reg_rtx[regno], new_dest);
1644
1645 SUBST (SET_DEST (newpat), new_dest);
1646 SUBST (XEXP (*cc_use, 0), new_dest);
1647 SUBST (SET_SRC (newpat),
1648 gen_rtx_combine (COMPARE, compare_mode,
1649 i2src, const0_rtx));
1650 }
1651 else
1652 undobuf.other_insn = 0;
1653 }
1654 #endif
1655 }
1656 else
1657 #endif
1658 {
1659 n_occurrences = 0; /* `subst' counts here */
1660
1661 /* If I1 feeds into I2 (not into I3) and I1DEST is in I1SRC, we
1662 need to make a unique copy of I2SRC each time we substitute it
1663 to avoid self-referential rtl. */
1664
1665 subst_low_cuid = INSN_CUID (i2);
1666 newpat = subst (PATTERN (i3), i2dest, i2src, 0,
1667 ! i1_feeds_i3 && i1dest_in_i1src);
1668 undobuf.previous_undos = undobuf.undos;
1669
1670 /* Record whether i2's body now appears within i3's body. */
1671 i2_is_used = n_occurrences;
1672 }
1673
1674 /* If we already got a failure, don't try to do more. Otherwise,
1675 try to substitute in I1 if we have it. */
1676
1677 if (i1 && GET_CODE (newpat) != CLOBBER)
1678 {
1679 /* Before we can do this substitution, we must redo the test done
1680 above (see detailed comments there) that ensures that I1DEST
1681 isn't mentioned in any SETs in NEWPAT that are field assignments. */
1682
1683 if (! combinable_i3pat (NULL_RTX, &newpat, i1dest, NULL_RTX,
1684 0, NULL_PTR))
1685 {
1686 undo_all ();
1687 return 0;
1688 }
1689
1690 n_occurrences = 0;
1691 subst_low_cuid = INSN_CUID (i1);
1692 newpat = subst (newpat, i1dest, i1src, 0, 0);
1693 undobuf.previous_undos = undobuf.undos;
1694 }
1695
1696 /* Fail if an autoincrement side-effect has been duplicated. Be careful
1697 to count all the ways that I2SRC and I1SRC can be used. */
1698 if ((FIND_REG_INC_NOTE (i2, NULL_RTX) != 0
1699 && i2_is_used + added_sets_2 > 1)
1700 || (i1 != 0 && FIND_REG_INC_NOTE (i1, NULL_RTX) != 0
1701 && (n_occurrences + added_sets_1 + (added_sets_2 && ! i1_feeds_i3)
1702 > 1))
1703 /* Fail if we tried to make a new register (we used to abort, but there's
1704 really no reason to). */
1705 || max_reg_num () != maxreg
1706 /* Fail if we couldn't do something and have a CLOBBER. */
1707 || GET_CODE (newpat) == CLOBBER
1708 /* Fail if this new pattern is a MULT and we didn't have one before
1709 at the outer level. */
1710 || (GET_CODE (newpat) == SET && GET_CODE (SET_SRC (newpat)) == MULT
1711 && ! have_mult))
1712 {
1713 undo_all ();
1714 return 0;
1715 }
1716
1717 /* If the actions of the earlier insns must be kept
1718 in addition to substituting them into the latest one,
1719 we must make a new PARALLEL for the latest insn
1720 to hold additional the SETs. */
1721
1722 if (added_sets_1 || added_sets_2)
1723 {
1724 combine_extras++;
1725
1726 if (GET_CODE (newpat) == PARALLEL)
1727 {
1728 rtvec old = XVEC (newpat, 0);
1729 total_sets = XVECLEN (newpat, 0) + added_sets_1 + added_sets_2;
1730 newpat = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (total_sets));
1731 bcopy ((char *) &old->elem[0], (char *) XVEC (newpat, 0)->elem,
1732 sizeof (old->elem[0]) * old->num_elem);
1733 }
1734 else
1735 {
1736 rtx old = newpat;
1737 total_sets = 1 + added_sets_1 + added_sets_2;
1738 newpat = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (total_sets));
1739 XVECEXP (newpat, 0, 0) = old;
1740 }
1741
1742 if (added_sets_1)
1743 XVECEXP (newpat, 0, --total_sets)
1744 = (GET_CODE (PATTERN (i1)) == PARALLEL
1745 ? gen_rtx (SET, VOIDmode, i1dest, i1src) : PATTERN (i1));
1746
1747 if (added_sets_2)
1748 {
1749 /* If there is no I1, use I2's body as is. We used to also not do
1750 the subst call below if I2 was substituted into I3,
1751 but that could lose a simplification. */
1752 if (i1 == 0)
1753 XVECEXP (newpat, 0, --total_sets) = i2pat;
1754 else
1755 /* See comment where i2pat is assigned. */
1756 XVECEXP (newpat, 0, --total_sets)
1757 = subst (i2pat, i1dest, i1src, 0, 0);
1758 }
1759 }
1760
1761 /* We come here when we are replacing a destination in I2 with the
1762 destination of I3. */
1763 validate_replacement:
1764
1765 /* Note which hard regs this insn has as inputs. */
1766 mark_used_regs_combine (newpat);
1767
1768 /* Is the result of combination a valid instruction? */
1769 insn_code_number
1770 = recog_for_combine (&newpat, i3, &new_i3_notes, &i3_scratches);
1771
1772 /* If the result isn't valid, see if it is a PARALLEL of two SETs where
1773 the second SET's destination is a register that is unused. In that case,
1774 we just need the first SET. This can occur when simplifying a divmod
1775 insn. We *must* test for this case here because the code below that
1776 splits two independent SETs doesn't handle this case correctly when it
1777 updates the register status. Also check the case where the first
1778 SET's destination is unused. That would not cause incorrect code, but
1779 does cause an unneeded insn to remain. */
1780
1781 if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
1782 && XVECLEN (newpat, 0) == 2
1783 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1784 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1785 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == REG
1786 && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 1)))
1787 && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 1)))
1788 && asm_noperands (newpat) < 0)
1789 {
1790 newpat = XVECEXP (newpat, 0, 0);
1791 insn_code_number
1792 = recog_for_combine (&newpat, i3, &new_i3_notes, &i3_scratches);
1793 }
1794
1795 else if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
1796 && XVECLEN (newpat, 0) == 2
1797 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1798 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1799 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) == REG
1800 && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 0)))
1801 && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 0)))
1802 && asm_noperands (newpat) < 0)
1803 {
1804 newpat = XVECEXP (newpat, 0, 1);
1805 insn_code_number
1806 = recog_for_combine (&newpat, i3, &new_i3_notes, &i3_scratches);
1807 }
1808
1809 /* If we were combining three insns and the result is a simple SET
1810 with no ASM_OPERANDS that wasn't recognized, try to split it into two
1811 insns. There are two ways to do this. It can be split using a
1812 machine-specific method (like when you have an addition of a large
1813 constant) or by combine in the function find_split_point. */
1814
1815 if (i1 && insn_code_number < 0 && GET_CODE (newpat) == SET
1816 && asm_noperands (newpat) < 0)
1817 {
1818 rtx m_split, *split;
1819 rtx ni2dest = i2dest;
1820
1821 /* See if the MD file can split NEWPAT. If it can't, see if letting it
1822 use I2DEST as a scratch register will help. In the latter case,
1823 convert I2DEST to the mode of the source of NEWPAT if we can. */
1824
1825 m_split = split_insns (newpat, i3);
1826
1827 /* We can only use I2DEST as a scratch reg if it doesn't overlap any
1828 inputs of NEWPAT. */
1829
1830 /* ??? If I2DEST is not safe, and I1DEST exists, then it would be
1831 possible to try that as a scratch reg. This would require adding
1832 more code to make it work though. */
1833
1834 if (m_split == 0 && ! reg_overlap_mentioned_p (ni2dest, newpat))
1835 {
1836 /* If I2DEST is a hard register or the only use of a pseudo,
1837 we can change its mode. */
1838 if (GET_MODE (SET_DEST (newpat)) != GET_MODE (i2dest)
1839 && GET_MODE (SET_DEST (newpat)) != VOIDmode
1840 && GET_CODE (i2dest) == REG
1841 && (REGNO (i2dest) < FIRST_PSEUDO_REGISTER
1842 || (REG_N_SETS (REGNO (i2dest)) == 1 && ! added_sets_2
1843 && ! REG_USERVAR_P (i2dest))))
1844 ni2dest = gen_rtx (REG, GET_MODE (SET_DEST (newpat)),
1845 REGNO (i2dest));
1846
1847 m_split = split_insns (gen_rtx (PARALLEL, VOIDmode,
1848 gen_rtvec (2, newpat,
1849 gen_rtx (CLOBBER,
1850 VOIDmode,
1851 ni2dest))),
1852 i3);
1853 }
1854
1855 if (m_split && GET_CODE (m_split) == SEQUENCE
1856 && XVECLEN (m_split, 0) == 2
1857 && (next_real_insn (i2) == i3
1858 || ! use_crosses_set_p (PATTERN (XVECEXP (m_split, 0, 0)),
1859 INSN_CUID (i2))))
1860 {
1861 rtx i2set, i3set;
1862 rtx newi3pat = PATTERN (XVECEXP (m_split, 0, 1));
1863 newi2pat = PATTERN (XVECEXP (m_split, 0, 0));
1864
1865 i3set = single_set (XVECEXP (m_split, 0, 1));
1866 i2set = single_set (XVECEXP (m_split, 0, 0));
1867
1868 /* In case we changed the mode of I2DEST, replace it in the
1869 pseudo-register table here. We can't do it above in case this
1870 code doesn't get executed and we do a split the other way. */
1871
1872 if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
1873 SUBST (regno_reg_rtx[REGNO (i2dest)], ni2dest);
1874
1875 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes,
1876 &i2_scratches);
1877
1878 /* If I2 or I3 has multiple SETs, we won't know how to track
1879 register status, so don't use these insns. If I2's destination
1880 is used between I2 and I3, we also can't use these insns. */
1881
1882 if (i2_code_number >= 0 && i2set && i3set
1883 && (next_real_insn (i2) == i3
1884 || ! reg_used_between_p (SET_DEST (i2set), i2, i3)))
1885 insn_code_number = recog_for_combine (&newi3pat, i3, &new_i3_notes,
1886 &i3_scratches);
1887 if (insn_code_number >= 0)
1888 newpat = newi3pat;
1889
1890 /* It is possible that both insns now set the destination of I3.
1891 If so, we must show an extra use of it. */
1892
1893 if (insn_code_number >= 0)
1894 {
1895 rtx new_i3_dest = SET_DEST (i3set);
1896 rtx new_i2_dest = SET_DEST (i2set);
1897
1898 while (GET_CODE (new_i3_dest) == ZERO_EXTRACT
1899 || GET_CODE (new_i3_dest) == STRICT_LOW_PART
1900 || GET_CODE (new_i3_dest) == SUBREG)
1901 new_i3_dest = XEXP (new_i3_dest, 0);
1902
1903 while (GET_CODE (new_i2_dest) == ZERO_EXTRACT
1904 || GET_CODE (new_i2_dest) == STRICT_LOW_PART
1905 || GET_CODE (new_i2_dest) == SUBREG)
1906 new_i2_dest = XEXP (new_i2_dest, 0);
1907
1908 if (GET_CODE (new_i3_dest) == REG
1909 && GET_CODE (new_i2_dest) == REG
1910 && REGNO (new_i3_dest) == REGNO (new_i2_dest))
1911 REG_N_SETS (REGNO (new_i2_dest))++;
1912 }
1913 }
1914
1915 /* If we can split it and use I2DEST, go ahead and see if that
1916 helps things be recognized. Verify that none of the registers
1917 are set between I2 and I3. */
1918 if (insn_code_number < 0 && (split = find_split_point (&newpat, i3)) != 0
1919 #ifdef HAVE_cc0
1920 && GET_CODE (i2dest) == REG
1921 #endif
1922 /* We need I2DEST in the proper mode. If it is a hard register
1923 or the only use of a pseudo, we can change its mode. */
1924 && (GET_MODE (*split) == GET_MODE (i2dest)
1925 || GET_MODE (*split) == VOIDmode
1926 || REGNO (i2dest) < FIRST_PSEUDO_REGISTER
1927 || (REG_N_SETS (REGNO (i2dest)) == 1 && ! added_sets_2
1928 && ! REG_USERVAR_P (i2dest)))
1929 && (next_real_insn (i2) == i3
1930 || ! use_crosses_set_p (*split, INSN_CUID (i2)))
1931 /* We can't overwrite I2DEST if its value is still used by
1932 NEWPAT. */
1933 && ! reg_referenced_p (i2dest, newpat))
1934 {
1935 rtx newdest = i2dest;
1936 enum rtx_code split_code = GET_CODE (*split);
1937 enum machine_mode split_mode = GET_MODE (*split);
1938
1939 /* Get NEWDEST as a register in the proper mode. We have already
1940 validated that we can do this. */
1941 if (GET_MODE (i2dest) != split_mode && split_mode != VOIDmode)
1942 {
1943 newdest = gen_rtx (REG, split_mode, REGNO (i2dest));
1944
1945 if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
1946 SUBST (regno_reg_rtx[REGNO (i2dest)], newdest);
1947 }
1948
1949 /* If *SPLIT is a (mult FOO (const_int pow2)), convert it to
1950 an ASHIFT. This can occur if it was inside a PLUS and hence
1951 appeared to be a memory address. This is a kludge. */
1952 if (split_code == MULT
1953 && GET_CODE (XEXP (*split, 1)) == CONST_INT
1954 && (i = exact_log2 (INTVAL (XEXP (*split, 1)))) >= 0)
1955 {
1956 SUBST (*split, gen_rtx_combine (ASHIFT, split_mode,
1957 XEXP (*split, 0), GEN_INT (i)));
1958 /* Update split_code because we may not have a multiply
1959 anymore. */
1960 split_code = GET_CODE (*split);
1961 }
1962
1963 #ifdef INSN_SCHEDULING
1964 /* If *SPLIT is a paradoxical SUBREG, when we split it, it should
1965 be written as a ZERO_EXTEND. */
1966 if (split_code == SUBREG && GET_CODE (SUBREG_REG (*split)) == MEM)
1967 SUBST (*split, gen_rtx_combine (ZERO_EXTEND, split_mode,
1968 XEXP (*split, 0)));
1969 #endif
1970
1971 newi2pat = gen_rtx_combine (SET, VOIDmode, newdest, *split);
1972 SUBST (*split, newdest);
1973 i2_code_number
1974 = recog_for_combine (&newi2pat, i2, &new_i2_notes, &i2_scratches);
1975
1976 /* If the split point was a MULT and we didn't have one before,
1977 don't use one now. */
1978 if (i2_code_number >= 0 && ! (split_code == MULT && ! have_mult))
1979 insn_code_number
1980 = recog_for_combine (&newpat, i3, &new_i3_notes, &i3_scratches);
1981 }
1982 }
1983
1984 /* Check for a case where we loaded from memory in a narrow mode and
1985 then sign extended it, but we need both registers. In that case,
1986 we have a PARALLEL with both loads from the same memory location.
1987 We can split this into a load from memory followed by a register-register
1988 copy. This saves at least one insn, more if register allocation can
1989 eliminate the copy.
1990
1991 We cannot do this if the destination of the second assignment is
1992 a register that we have already assumed is zero-extended. Similarly
1993 for a SUBREG of such a register. */
1994
1995 else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
1996 && GET_CODE (newpat) == PARALLEL
1997 && XVECLEN (newpat, 0) == 2
1998 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1999 && GET_CODE (SET_SRC (XVECEXP (newpat, 0, 0))) == SIGN_EXTEND
2000 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2001 && rtx_equal_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2002 XEXP (SET_SRC (XVECEXP (newpat, 0, 0)), 0))
2003 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2004 INSN_CUID (i2))
2005 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
2006 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
2007 && ! (temp = SET_DEST (XVECEXP (newpat, 0, 1)),
2008 (GET_CODE (temp) == REG
2009 && reg_nonzero_bits[REGNO (temp)] != 0
2010 && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
2011 && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
2012 && (reg_nonzero_bits[REGNO (temp)]
2013 != GET_MODE_MASK (word_mode))))
2014 && ! (GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == SUBREG
2015 && (temp = SUBREG_REG (SET_DEST (XVECEXP (newpat, 0, 1))),
2016 (GET_CODE (temp) == REG
2017 && reg_nonzero_bits[REGNO (temp)] != 0
2018 && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
2019 && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
2020 && (reg_nonzero_bits[REGNO (temp)]
2021 != GET_MODE_MASK (word_mode)))))
2022 && ! reg_overlap_mentioned_p (SET_DEST (XVECEXP (newpat, 0, 1)),
2023 SET_SRC (XVECEXP (newpat, 0, 1)))
2024 && ! find_reg_note (i3, REG_UNUSED,
2025 SET_DEST (XVECEXP (newpat, 0, 0))))
2026 {
2027 rtx ni2dest;
2028
2029 newi2pat = XVECEXP (newpat, 0, 0);
2030 ni2dest = SET_DEST (XVECEXP (newpat, 0, 0));
2031 newpat = XVECEXP (newpat, 0, 1);
2032 SUBST (SET_SRC (newpat),
2033 gen_lowpart_for_combine (GET_MODE (SET_SRC (newpat)), ni2dest));
2034 i2_code_number
2035 = recog_for_combine (&newi2pat, i2, &new_i2_notes, &i2_scratches);
2036
2037 if (i2_code_number >= 0)
2038 insn_code_number
2039 = recog_for_combine (&newpat, i3, &new_i3_notes, &i3_scratches);
2040
2041 if (insn_code_number >= 0)
2042 {
2043 rtx insn;
2044 rtx link;
2045
2046 /* If we will be able to accept this, we have made a change to the
2047 destination of I3. This can invalidate a LOG_LINKS pointing
2048 to I3. No other part of combine.c makes such a transformation.
2049
2050 The new I3 will have a destination that was previously the
2051 destination of I1 or I2 and which was used in i2 or I3. Call
2052 distribute_links to make a LOG_LINK from the next use of
2053 that destination. */
2054
2055 PATTERN (i3) = newpat;
2056 distribute_links (gen_rtx (INSN_LIST, VOIDmode, i3, NULL_RTX));
2057
2058 /* I3 now uses what used to be its destination and which is
2059 now I2's destination. That means we need a LOG_LINK from
2060 I3 to I2. But we used to have one, so we still will.
2061
2062 However, some later insn might be using I2's dest and have
2063 a LOG_LINK pointing at I3. We must remove this link.
2064 The simplest way to remove the link is to point it at I1,
2065 which we know will be a NOTE. */
2066
2067 for (insn = NEXT_INSN (i3);
2068 insn && (this_basic_block == n_basic_blocks - 1
2069 || insn != basic_block_head[this_basic_block + 1]);
2070 insn = NEXT_INSN (insn))
2071 {
2072 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
2073 && reg_referenced_p (ni2dest, PATTERN (insn)))
2074 {
2075 for (link = LOG_LINKS (insn); link;
2076 link = XEXP (link, 1))
2077 if (XEXP (link, 0) == i3)
2078 XEXP (link, 0) = i1;
2079
2080 break;
2081 }
2082 }
2083 }
2084 }
2085
2086 /* Similarly, check for a case where we have a PARALLEL of two independent
2087 SETs but we started with three insns. In this case, we can do the sets
2088 as two separate insns. This case occurs when some SET allows two
2089 other insns to combine, but the destination of that SET is still live. */
2090
2091 else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
2092 && GET_CODE (newpat) == PARALLEL
2093 && XVECLEN (newpat, 0) == 2
2094 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
2095 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != ZERO_EXTRACT
2096 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != STRICT_LOW_PART
2097 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2098 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
2099 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
2100 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2101 INSN_CUID (i2))
2102 /* Don't pass sets with (USE (MEM ...)) dests to the following. */
2103 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != USE
2104 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != USE
2105 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 1)),
2106 XVECEXP (newpat, 0, 0))
2107 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 0)),
2108 XVECEXP (newpat, 0, 1)))
2109 {
2110 /* Normally, it doesn't matter which of the two is done first,
2111 but it does if one references cc0. In that case, it has to
2112 be first. */
2113 #ifdef HAVE_cc0
2114 if (reg_referenced_p (cc0_rtx, XVECEXP (newpat, 0, 0)))
2115 {
2116 newi2pat = XVECEXP (newpat, 0, 0);
2117 newpat = XVECEXP (newpat, 0, 1);
2118 }
2119 else
2120 #endif
2121 {
2122 newi2pat = XVECEXP (newpat, 0, 1);
2123 newpat = XVECEXP (newpat, 0, 0);
2124 }
2125
2126 i2_code_number
2127 = recog_for_combine (&newi2pat, i2, &new_i2_notes, &i2_scratches);
2128
2129 if (i2_code_number >= 0)
2130 insn_code_number
2131 = recog_for_combine (&newpat, i3, &new_i3_notes, &i3_scratches);
2132 }
2133
2134 /* If it still isn't recognized, fail and change things back the way they
2135 were. */
2136 if ((insn_code_number < 0
2137 /* Is the result a reasonable ASM_OPERANDS? */
2138 && (! check_asm_operands (newpat) || added_sets_1 || added_sets_2)))
2139 {
2140 undo_all ();
2141 return 0;
2142 }
2143
2144 /* If we had to change another insn, make sure it is valid also. */
2145 if (undobuf.other_insn)
2146 {
2147 rtx other_pat = PATTERN (undobuf.other_insn);
2148 rtx new_other_notes;
2149 rtx note, next;
2150
2151 CLEAR_HARD_REG_SET (newpat_used_regs);
2152
2153 other_code_number
2154 = recog_for_combine (&other_pat, undobuf.other_insn,
2155 &new_other_notes, &other_scratches);
2156
2157 if (other_code_number < 0 && ! check_asm_operands (other_pat))
2158 {
2159 undo_all ();
2160 return 0;
2161 }
2162
2163 PATTERN (undobuf.other_insn) = other_pat;
2164
2165 /* If any of the notes in OTHER_INSN were REG_UNUSED, ensure that they
2166 are still valid. Then add any non-duplicate notes added by
2167 recog_for_combine. */
2168 for (note = REG_NOTES (undobuf.other_insn); note; note = next)
2169 {
2170 next = XEXP (note, 1);
2171
2172 if (REG_NOTE_KIND (note) == REG_UNUSED
2173 && ! reg_set_p (XEXP (note, 0), PATTERN (undobuf.other_insn)))
2174 {
2175 if (GET_CODE (XEXP (note, 0)) == REG)
2176 REG_N_DEATHS (REGNO (XEXP (note, 0)))--;
2177
2178 remove_note (undobuf.other_insn, note);
2179 }
2180 }
2181
2182 for (note = new_other_notes; note; note = XEXP (note, 1))
2183 if (GET_CODE (XEXP (note, 0)) == REG)
2184 REG_N_DEATHS (REGNO (XEXP (note, 0)))++;
2185
2186 distribute_notes (new_other_notes, undobuf.other_insn,
2187 undobuf.other_insn, NULL_RTX, NULL_RTX, NULL_RTX);
2188 }
2189
2190 /* We now know that we can do this combination. Merge the insns and
2191 update the status of registers and LOG_LINKS. */
2192
2193 {
2194 rtx i3notes, i2notes, i1notes = 0;
2195 rtx i3links, i2links, i1links = 0;
2196 rtx midnotes = 0;
2197 register int regno;
2198 /* Compute which registers we expect to eliminate. newi2pat may be setting
2199 either i3dest or i2dest, so we must check it. Also, i1dest may be the
2200 same as i3dest, in which case newi2pat may be setting i1dest. */
2201 rtx elim_i2 = ((newi2pat && reg_set_p (i2dest, newi2pat))
2202 || i2dest_in_i2src || i2dest_in_i1src
2203 ? 0 : i2dest);
2204 rtx elim_i1 = (i1 == 0 || i1dest_in_i1src
2205 || (newi2pat && reg_set_p (i1dest, newi2pat))
2206 ? 0 : i1dest);
2207
2208 /* Get the old REG_NOTES and LOG_LINKS from all our insns and
2209 clear them. */
2210 i3notes = REG_NOTES (i3), i3links = LOG_LINKS (i3);
2211 i2notes = REG_NOTES (i2), i2links = LOG_LINKS (i2);
2212 if (i1)
2213 i1notes = REG_NOTES (i1), i1links = LOG_LINKS (i1);
2214
2215 /* Ensure that we do not have something that should not be shared but
2216 occurs multiple times in the new insns. Check this by first
2217 resetting all the `used' flags and then copying anything is shared. */
2218
2219 reset_used_flags (i3notes);
2220 reset_used_flags (i2notes);
2221 reset_used_flags (i1notes);
2222 reset_used_flags (newpat);
2223 reset_used_flags (newi2pat);
2224 if (undobuf.other_insn)
2225 reset_used_flags (PATTERN (undobuf.other_insn));
2226
2227 i3notes = copy_rtx_if_shared (i3notes);
2228 i2notes = copy_rtx_if_shared (i2notes);
2229 i1notes = copy_rtx_if_shared (i1notes);
2230 newpat = copy_rtx_if_shared (newpat);
2231 newi2pat = copy_rtx_if_shared (newi2pat);
2232 if (undobuf.other_insn)
2233 reset_used_flags (PATTERN (undobuf.other_insn));
2234
2235 INSN_CODE (i3) = insn_code_number;
2236 PATTERN (i3) = newpat;
2237 if (undobuf.other_insn)
2238 INSN_CODE (undobuf.other_insn) = other_code_number;
2239
2240 /* We had one special case above where I2 had more than one set and
2241 we replaced a destination of one of those sets with the destination
2242 of I3. In that case, we have to update LOG_LINKS of insns later
2243 in this basic block. Note that this (expensive) case is rare.
2244
2245 Also, in this case, we must pretend that all REG_NOTEs for I2
2246 actually came from I3, so that REG_UNUSED notes from I2 will be
2247 properly handled. */
2248
2249 if (i3_subst_into_i2)
2250 {
2251 for (i = 0; i < XVECLEN (PATTERN (i2), 0); i++)
2252 if (GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, i))) == REG
2253 && SET_DEST (XVECEXP (PATTERN (i2), 0, i)) != i2dest
2254 && ! find_reg_note (i2, REG_UNUSED,
2255 SET_DEST (XVECEXP (PATTERN (i2), 0, i))))
2256 for (temp = NEXT_INSN (i2);
2257 temp && (this_basic_block == n_basic_blocks - 1
2258 || basic_block_head[this_basic_block] != temp);
2259 temp = NEXT_INSN (temp))
2260 if (temp != i3 && GET_RTX_CLASS (GET_CODE (temp)) == 'i')
2261 for (link = LOG_LINKS (temp); link; link = XEXP (link, 1))
2262 if (XEXP (link, 0) == i2)
2263 XEXP (link, 0) = i3;
2264
2265 if (i3notes)
2266 {
2267 rtx link = i3notes;
2268 while (XEXP (link, 1))
2269 link = XEXP (link, 1);
2270 XEXP (link, 1) = i2notes;
2271 }
2272 else
2273 i3notes = i2notes;
2274 i2notes = 0;
2275 }
2276
2277 LOG_LINKS (i3) = 0;
2278 REG_NOTES (i3) = 0;
2279 LOG_LINKS (i2) = 0;
2280 REG_NOTES (i2) = 0;
2281
2282 if (newi2pat)
2283 {
2284 INSN_CODE (i2) = i2_code_number;
2285 PATTERN (i2) = newi2pat;
2286 }
2287 else
2288 {
2289 PUT_CODE (i2, NOTE);
2290 NOTE_LINE_NUMBER (i2) = NOTE_INSN_DELETED;
2291 NOTE_SOURCE_FILE (i2) = 0;
2292 }
2293
2294 if (i1)
2295 {
2296 LOG_LINKS (i1) = 0;
2297 REG_NOTES (i1) = 0;
2298 PUT_CODE (i1, NOTE);
2299 NOTE_LINE_NUMBER (i1) = NOTE_INSN_DELETED;
2300 NOTE_SOURCE_FILE (i1) = 0;
2301 }
2302
2303 /* Get death notes for everything that is now used in either I3 or
2304 I2 and used to die in a previous insn. If we built two new
2305 patterns, move from I1 to I2 then I2 to I3 so that we get the
2306 proper movement on registers that I2 modifies. */
2307
2308 if (newi2pat)
2309 {
2310 move_deaths (newi2pat, NULL_RTX, INSN_CUID (i1), i2, &midnotes);
2311 move_deaths (newpat, newi2pat, INSN_CUID (i1), i3, &midnotes);
2312 }
2313 else
2314 move_deaths (newpat, NULL_RTX, i1 ? INSN_CUID (i1) : INSN_CUID (i2),
2315 i3, &midnotes);
2316
2317 /* Distribute all the LOG_LINKS and REG_NOTES from I1, I2, and I3. */
2318 if (i3notes)
2319 distribute_notes (i3notes, i3, i3, newi2pat ? i2 : NULL_RTX,
2320 elim_i2, elim_i1);
2321 if (i2notes)
2322 distribute_notes (i2notes, i2, i3, newi2pat ? i2 : NULL_RTX,
2323 elim_i2, elim_i1);
2324 if (i1notes)
2325 distribute_notes (i1notes, i1, i3, newi2pat ? i2 : NULL_RTX,
2326 elim_i2, elim_i1);
2327 if (midnotes)
2328 distribute_notes (midnotes, NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2329 elim_i2, elim_i1);
2330
2331 /* Distribute any notes added to I2 or I3 by recog_for_combine. We
2332 know these are REG_UNUSED and want them to go to the desired insn,
2333 so we always pass it as i3. We have not counted the notes in
2334 reg_n_deaths yet, so we need to do so now. */
2335
2336 if (newi2pat && new_i2_notes)
2337 {
2338 for (temp = new_i2_notes; temp; temp = XEXP (temp, 1))
2339 if (GET_CODE (XEXP (temp, 0)) == REG)
2340 REG_N_DEATHS (REGNO (XEXP (temp, 0)))++;
2341
2342 distribute_notes (new_i2_notes, i2, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2343 }
2344
2345 if (new_i3_notes)
2346 {
2347 for (temp = new_i3_notes; temp; temp = XEXP (temp, 1))
2348 if (GET_CODE (XEXP (temp, 0)) == REG)
2349 REG_N_DEATHS (REGNO (XEXP (temp, 0)))++;
2350
2351 distribute_notes (new_i3_notes, i3, i3, NULL_RTX, NULL_RTX, NULL_RTX);
2352 }
2353
2354 /* If I3DEST was used in I3SRC, it really died in I3. We may need to
2355 put a REG_DEAD note for it somewhere. If NEWI2PAT exists and sets
2356 I3DEST, the death must be somewhere before I2, not I3. If we passed I3
2357 in that case, it might delete I2. Similarly for I2 and I1.
2358 Show an additional death due to the REG_DEAD note we make here. If
2359 we discard it in distribute_notes, we will decrement it again. */
2360
2361 if (i3dest_killed)
2362 {
2363 if (GET_CODE (i3dest_killed) == REG)
2364 REG_N_DEATHS (REGNO (i3dest_killed))++;
2365
2366 if (newi2pat && reg_set_p (i3dest_killed, newi2pat))
2367 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i3dest_killed,
2368 NULL_RTX),
2369 NULL_RTX, i2, NULL_RTX, elim_i2, elim_i1);
2370 else
2371 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i3dest_killed,
2372 NULL_RTX),
2373 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2374 elim_i2, elim_i1);
2375 }
2376
2377 if (i2dest_in_i2src)
2378 {
2379 if (GET_CODE (i2dest) == REG)
2380 REG_N_DEATHS (REGNO (i2dest))++;
2381
2382 if (newi2pat && reg_set_p (i2dest, newi2pat))
2383 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i2dest, NULL_RTX),
2384 NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2385 else
2386 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i2dest, NULL_RTX),
2387 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2388 NULL_RTX, NULL_RTX);
2389 }
2390
2391 if (i1dest_in_i1src)
2392 {
2393 if (GET_CODE (i1dest) == REG)
2394 REG_N_DEATHS (REGNO (i1dest))++;
2395
2396 if (newi2pat && reg_set_p (i1dest, newi2pat))
2397 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i1dest, NULL_RTX),
2398 NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2399 else
2400 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i1dest, NULL_RTX),
2401 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2402 NULL_RTX, NULL_RTX);
2403 }
2404
2405 distribute_links (i3links);
2406 distribute_links (i2links);
2407 distribute_links (i1links);
2408
2409 if (GET_CODE (i2dest) == REG)
2410 {
2411 rtx link;
2412 rtx i2_insn = 0, i2_val = 0, set;
2413
2414 /* The insn that used to set this register doesn't exist, and
2415 this life of the register may not exist either. See if one of
2416 I3's links points to an insn that sets I2DEST. If it does,
2417 that is now the last known value for I2DEST. If we don't update
2418 this and I2 set the register to a value that depended on its old
2419 contents, we will get confused. If this insn is used, thing
2420 will be set correctly in combine_instructions. */
2421
2422 for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2423 if ((set = single_set (XEXP (link, 0))) != 0
2424 && rtx_equal_p (i2dest, SET_DEST (set)))
2425 i2_insn = XEXP (link, 0), i2_val = SET_SRC (set);
2426
2427 record_value_for_reg (i2dest, i2_insn, i2_val);
2428
2429 /* If the reg formerly set in I2 died only once and that was in I3,
2430 zero its use count so it won't make `reload' do any work. */
2431 if (! added_sets_2
2432 && (newi2pat == 0 || ! reg_mentioned_p (i2dest, newi2pat))
2433 && ! i2dest_in_i2src)
2434 {
2435 regno = REGNO (i2dest);
2436 REG_N_SETS (regno)--;
2437 if (REG_N_SETS (regno) == 0
2438 && ! REGNO_REG_SET_P (basic_block_live_at_start[0], regno))
2439 REG_N_REFS (regno) = 0;
2440 }
2441 }
2442
2443 if (i1 && GET_CODE (i1dest) == REG)
2444 {
2445 rtx link;
2446 rtx i1_insn = 0, i1_val = 0, set;
2447
2448 for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2449 if ((set = single_set (XEXP (link, 0))) != 0
2450 && rtx_equal_p (i1dest, SET_DEST (set)))
2451 i1_insn = XEXP (link, 0), i1_val = SET_SRC (set);
2452
2453 record_value_for_reg (i1dest, i1_insn, i1_val);
2454
2455 regno = REGNO (i1dest);
2456 if (! added_sets_1 && ! i1dest_in_i1src)
2457 {
2458 REG_N_SETS (regno)--;
2459 if (REG_N_SETS (regno) == 0
2460 && ! REGNO_REG_SET_P (basic_block_live_at_start[0], regno))
2461 REG_N_REFS (regno) = 0;
2462 }
2463 }
2464
2465 /* Update reg_nonzero_bits et al for any changes that may have been made
2466 to this insn. */
2467
2468 note_stores (newpat, set_nonzero_bits_and_sign_copies);
2469 if (newi2pat)
2470 note_stores (newi2pat, set_nonzero_bits_and_sign_copies);
2471
2472 /* If we added any (clobber (scratch)), add them to the max for a
2473 block. This is a very pessimistic calculation, since we might
2474 have had them already and this might not be the worst block, but
2475 it's not worth doing any better. */
2476 max_scratch += i3_scratches + i2_scratches + other_scratches;
2477
2478 /* If I3 is now an unconditional jump, ensure that it has a
2479 BARRIER following it since it may have initially been a
2480 conditional jump. It may also be the last nonnote insn. */
2481
2482 if ((GET_CODE (newpat) == RETURN || simplejump_p (i3))
2483 && ((temp = next_nonnote_insn (i3)) == NULL_RTX
2484 || GET_CODE (temp) != BARRIER))
2485 emit_barrier_after (i3);
2486 }
2487
2488 combine_successes++;
2489
2490 /* Clear this here, so that subsequent get_last_value calls are not
2491 affected. */
2492 subst_prev_insn = NULL_RTX;
2493
2494 if (added_links_insn
2495 && (newi2pat == 0 || INSN_CUID (added_links_insn) < INSN_CUID (i2))
2496 && INSN_CUID (added_links_insn) < INSN_CUID (i3))
2497 return added_links_insn;
2498 else
2499 return newi2pat ? i2 : i3;
2500 }
2501 \f
2502 /* Undo all the modifications recorded in undobuf. */
2503
2504 static void
2505 undo_all ()
2506 {
2507 struct undo *undo, *next;
2508
2509 for (undo = undobuf.undos; undo; undo = next)
2510 {
2511 next = undo->next;
2512 if (undo->is_int)
2513 *undo->where.i = undo->old_contents.i;
2514 else
2515 *undo->where.r = undo->old_contents.r;
2516
2517 undo->next = undobuf.frees;
2518 undobuf.frees = undo;
2519 }
2520
2521 obfree (undobuf.storage);
2522 undobuf.undos = undobuf.previous_undos = 0;
2523
2524 /* Clear this here, so that subsequent get_last_value calls are not
2525 affected. */
2526 subst_prev_insn = NULL_RTX;
2527 }
2528 \f
2529 /* Find the innermost point within the rtx at LOC, possibly LOC itself,
2530 where we have an arithmetic expression and return that point. LOC will
2531 be inside INSN.
2532
2533 try_combine will call this function to see if an insn can be split into
2534 two insns. */
2535
2536 static rtx *
2537 find_split_point (loc, insn)
2538 rtx *loc;
2539 rtx insn;
2540 {
2541 rtx x = *loc;
2542 enum rtx_code code = GET_CODE (x);
2543 rtx *split;
2544 int len = 0, pos, unsignedp;
2545 rtx inner;
2546
2547 /* First special-case some codes. */
2548 switch (code)
2549 {
2550 case SUBREG:
2551 #ifdef INSN_SCHEDULING
2552 /* If we are making a paradoxical SUBREG invalid, it becomes a split
2553 point. */
2554 if (GET_CODE (SUBREG_REG (x)) == MEM)
2555 return loc;
2556 #endif
2557 return find_split_point (&SUBREG_REG (x), insn);
2558
2559 case MEM:
2560 #ifdef HAVE_lo_sum
2561 /* If we have (mem (const ..)) or (mem (symbol_ref ...)), split it
2562 using LO_SUM and HIGH. */
2563 if (GET_CODE (XEXP (x, 0)) == CONST
2564 || GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
2565 {
2566 SUBST (XEXP (x, 0),
2567 gen_rtx_combine (LO_SUM, Pmode,
2568 gen_rtx_combine (HIGH, Pmode, XEXP (x, 0)),
2569 XEXP (x, 0)));
2570 return &XEXP (XEXP (x, 0), 0);
2571 }
2572 #endif
2573
2574 /* If we have a PLUS whose second operand is a constant and the
2575 address is not valid, perhaps will can split it up using
2576 the machine-specific way to split large constants. We use
2577 the first pseudo-reg (one of the virtual regs) as a placeholder;
2578 it will not remain in the result. */
2579 if (GET_CODE (XEXP (x, 0)) == PLUS
2580 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2581 && ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
2582 {
2583 rtx reg = regno_reg_rtx[FIRST_PSEUDO_REGISTER];
2584 rtx seq = split_insns (gen_rtx (SET, VOIDmode, reg, XEXP (x, 0)),
2585 subst_insn);
2586
2587 /* This should have produced two insns, each of which sets our
2588 placeholder. If the source of the second is a valid address,
2589 we can make put both sources together and make a split point
2590 in the middle. */
2591
2592 if (seq && XVECLEN (seq, 0) == 2
2593 && GET_CODE (XVECEXP (seq, 0, 0)) == INSN
2594 && GET_CODE (PATTERN (XVECEXP (seq, 0, 0))) == SET
2595 && SET_DEST (PATTERN (XVECEXP (seq, 0, 0))) == reg
2596 && ! reg_mentioned_p (reg,
2597 SET_SRC (PATTERN (XVECEXP (seq, 0, 0))))
2598 && GET_CODE (XVECEXP (seq, 0, 1)) == INSN
2599 && GET_CODE (PATTERN (XVECEXP (seq, 0, 1))) == SET
2600 && SET_DEST (PATTERN (XVECEXP (seq, 0, 1))) == reg
2601 && memory_address_p (GET_MODE (x),
2602 SET_SRC (PATTERN (XVECEXP (seq, 0, 1)))))
2603 {
2604 rtx src1 = SET_SRC (PATTERN (XVECEXP (seq, 0, 0)));
2605 rtx src2 = SET_SRC (PATTERN (XVECEXP (seq, 0, 1)));
2606
2607 /* Replace the placeholder in SRC2 with SRC1. If we can
2608 find where in SRC2 it was placed, that can become our
2609 split point and we can replace this address with SRC2.
2610 Just try two obvious places. */
2611
2612 src2 = replace_rtx (src2, reg, src1);
2613 split = 0;
2614 if (XEXP (src2, 0) == src1)
2615 split = &XEXP (src2, 0);
2616 else if (GET_RTX_FORMAT (GET_CODE (XEXP (src2, 0)))[0] == 'e'
2617 && XEXP (XEXP (src2, 0), 0) == src1)
2618 split = &XEXP (XEXP (src2, 0), 0);
2619
2620 if (split)
2621 {
2622 SUBST (XEXP (x, 0), src2);
2623 return split;
2624 }
2625 }
2626
2627 /* If that didn't work, perhaps the first operand is complex and
2628 needs to be computed separately, so make a split point there.
2629 This will occur on machines that just support REG + CONST
2630 and have a constant moved through some previous computation. */
2631
2632 else if (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (x, 0), 0))) != 'o'
2633 && ! (GET_CODE (XEXP (XEXP (x, 0), 0)) == SUBREG
2634 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (XEXP (x, 0), 0))))
2635 == 'o')))
2636 return &XEXP (XEXP (x, 0), 0);
2637 }
2638 break;
2639
2640 case SET:
2641 #ifdef HAVE_cc0
2642 /* If SET_DEST is CC0 and SET_SRC is not an operand, a COMPARE, or a
2643 ZERO_EXTRACT, the most likely reason why this doesn't match is that
2644 we need to put the operand into a register. So split at that
2645 point. */
2646
2647 if (SET_DEST (x) == cc0_rtx
2648 && GET_CODE (SET_SRC (x)) != COMPARE
2649 && GET_CODE (SET_SRC (x)) != ZERO_EXTRACT
2650 && GET_RTX_CLASS (GET_CODE (SET_SRC (x))) != 'o'
2651 && ! (GET_CODE (SET_SRC (x)) == SUBREG
2652 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (SET_SRC (x)))) == 'o'))
2653 return &SET_SRC (x);
2654 #endif
2655
2656 /* See if we can split SET_SRC as it stands. */
2657 split = find_split_point (&SET_SRC (x), insn);
2658 if (split && split != &SET_SRC (x))
2659 return split;
2660
2661 /* See if we can split SET_DEST as it stands. */
2662 split = find_split_point (&SET_DEST (x), insn);
2663 if (split && split != &SET_DEST (x))
2664 return split;
2665
2666 /* See if this is a bitfield assignment with everything constant. If
2667 so, this is an IOR of an AND, so split it into that. */
2668 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2669 && (GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
2670 <= HOST_BITS_PER_WIDE_INT)
2671 && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT
2672 && GET_CODE (XEXP (SET_DEST (x), 2)) == CONST_INT
2673 && GET_CODE (SET_SRC (x)) == CONST_INT
2674 && ((INTVAL (XEXP (SET_DEST (x), 1))
2675 + INTVAL (XEXP (SET_DEST (x), 2)))
2676 <= GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0))))
2677 && ! side_effects_p (XEXP (SET_DEST (x), 0)))
2678 {
2679 int pos = INTVAL (XEXP (SET_DEST (x), 2));
2680 int len = INTVAL (XEXP (SET_DEST (x), 1));
2681 int src = INTVAL (SET_SRC (x));
2682 rtx dest = XEXP (SET_DEST (x), 0);
2683 enum machine_mode mode = GET_MODE (dest);
2684 unsigned HOST_WIDE_INT mask = ((HOST_WIDE_INT) 1 << len) - 1;
2685
2686 if (BITS_BIG_ENDIAN)
2687 pos = GET_MODE_BITSIZE (mode) - len - pos;
2688
2689 if (src == mask)
2690 SUBST (SET_SRC (x),
2691 gen_binary (IOR, mode, dest, GEN_INT (src << pos)));
2692 else
2693 SUBST (SET_SRC (x),
2694 gen_binary (IOR, mode,
2695 gen_binary (AND, mode, dest,
2696 GEN_INT (~ (mask << pos)
2697 & GET_MODE_MASK (mode))),
2698 GEN_INT (src << pos)));
2699
2700 SUBST (SET_DEST (x), dest);
2701
2702 split = find_split_point (&SET_SRC (x), insn);
2703 if (split && split != &SET_SRC (x))
2704 return split;
2705 }
2706
2707 /* Otherwise, see if this is an operation that we can split into two.
2708 If so, try to split that. */
2709 code = GET_CODE (SET_SRC (x));
2710
2711 switch (code)
2712 {
2713 case AND:
2714 /* If we are AND'ing with a large constant that is only a single
2715 bit and the result is only being used in a context where we
2716 need to know if it is zero or non-zero, replace it with a bit
2717 extraction. This will avoid the large constant, which might
2718 have taken more than one insn to make. If the constant were
2719 not a valid argument to the AND but took only one insn to make,
2720 this is no worse, but if it took more than one insn, it will
2721 be better. */
2722
2723 if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
2724 && GET_CODE (XEXP (SET_SRC (x), 0)) == REG
2725 && (pos = exact_log2 (INTVAL (XEXP (SET_SRC (x), 1)))) >= 7
2726 && GET_CODE (SET_DEST (x)) == REG
2727 && (split = find_single_use (SET_DEST (x), insn, NULL_PTR)) != 0
2728 && (GET_CODE (*split) == EQ || GET_CODE (*split) == NE)
2729 && XEXP (*split, 0) == SET_DEST (x)
2730 && XEXP (*split, 1) == const0_rtx)
2731 {
2732 rtx extraction = make_extraction (GET_MODE (SET_DEST (x)),
2733 XEXP (SET_SRC (x), 0),
2734 pos, NULL_RTX, 1, 1, 0, 0);
2735 if (extraction != 0)
2736 {
2737 SUBST (SET_SRC (x), extraction);
2738 return find_split_point (loc, insn);
2739 }
2740 }
2741 break;
2742
2743 case NE:
2744 /* if STORE_FLAG_VALUE is -1, this is (NE X 0) and only one bit of X
2745 is known to be on, this can be converted into a NEG of a shift. */
2746 if (STORE_FLAG_VALUE == -1 && XEXP (SET_SRC (x), 1) == const0_rtx
2747 && GET_MODE (SET_SRC (x)) == GET_MODE (XEXP (SET_SRC (x), 0))
2748 && 1 <= (pos = exact_log2
2749 (nonzero_bits (XEXP (SET_SRC (x), 0),
2750 GET_MODE (XEXP (SET_SRC (x), 0))))))
2751 {
2752 enum machine_mode mode = GET_MODE (XEXP (SET_SRC (x), 0));
2753
2754 SUBST (SET_SRC (x),
2755 gen_rtx_combine (NEG, mode,
2756 gen_rtx_combine (LSHIFTRT, mode,
2757 XEXP (SET_SRC (x), 0),
2758 GEN_INT (pos))));
2759
2760 split = find_split_point (&SET_SRC (x), insn);
2761 if (split && split != &SET_SRC (x))
2762 return split;
2763 }
2764 break;
2765
2766 case SIGN_EXTEND:
2767 inner = XEXP (SET_SRC (x), 0);
2768
2769 /* We can't optimize if either mode is a partial integer
2770 mode as we don't know how many bits are significant
2771 in those modes. */
2772 if (GET_MODE_CLASS (GET_MODE (inner)) == MODE_PARTIAL_INT
2773 || GET_MODE_CLASS (GET_MODE (SET_SRC (x))) == MODE_PARTIAL_INT)
2774 break;
2775
2776 pos = 0;
2777 len = GET_MODE_BITSIZE (GET_MODE (inner));
2778 unsignedp = 0;
2779 break;
2780
2781 case SIGN_EXTRACT:
2782 case ZERO_EXTRACT:
2783 if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
2784 && GET_CODE (XEXP (SET_SRC (x), 2)) == CONST_INT)
2785 {
2786 inner = XEXP (SET_SRC (x), 0);
2787 len = INTVAL (XEXP (SET_SRC (x), 1));
2788 pos = INTVAL (XEXP (SET_SRC (x), 2));
2789
2790 if (BITS_BIG_ENDIAN)
2791 pos = GET_MODE_BITSIZE (GET_MODE (inner)) - len - pos;
2792 unsignedp = (code == ZERO_EXTRACT);
2793 }
2794 break;
2795
2796 default:
2797 break;
2798 }
2799
2800 if (len && pos >= 0 && pos + len <= GET_MODE_BITSIZE (GET_MODE (inner)))
2801 {
2802 enum machine_mode mode = GET_MODE (SET_SRC (x));
2803
2804 /* For unsigned, we have a choice of a shift followed by an
2805 AND or two shifts. Use two shifts for field sizes where the
2806 constant might be too large. We assume here that we can
2807 always at least get 8-bit constants in an AND insn, which is
2808 true for every current RISC. */
2809
2810 if (unsignedp && len <= 8)
2811 {
2812 SUBST (SET_SRC (x),
2813 gen_rtx_combine
2814 (AND, mode,
2815 gen_rtx_combine (LSHIFTRT, mode,
2816 gen_lowpart_for_combine (mode, inner),
2817 GEN_INT (pos)),
2818 GEN_INT (((HOST_WIDE_INT) 1 << len) - 1)));
2819
2820 split = find_split_point (&SET_SRC (x), insn);
2821 if (split && split != &SET_SRC (x))
2822 return split;
2823 }
2824 else
2825 {
2826 SUBST (SET_SRC (x),
2827 gen_rtx_combine
2828 (unsignedp ? LSHIFTRT : ASHIFTRT, mode,
2829 gen_rtx_combine (ASHIFT, mode,
2830 gen_lowpart_for_combine (mode, inner),
2831 GEN_INT (GET_MODE_BITSIZE (mode)
2832 - len - pos)),
2833 GEN_INT (GET_MODE_BITSIZE (mode) - len)));
2834
2835 split = find_split_point (&SET_SRC (x), insn);
2836 if (split && split != &SET_SRC (x))
2837 return split;
2838 }
2839 }
2840
2841 /* See if this is a simple operation with a constant as the second
2842 operand. It might be that this constant is out of range and hence
2843 could be used as a split point. */
2844 if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
2845 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
2846 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<')
2847 && CONSTANT_P (XEXP (SET_SRC (x), 1))
2848 && (GET_RTX_CLASS (GET_CODE (XEXP (SET_SRC (x), 0))) == 'o'
2849 || (GET_CODE (XEXP (SET_SRC (x), 0)) == SUBREG
2850 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (SET_SRC (x), 0))))
2851 == 'o'))))
2852 return &XEXP (SET_SRC (x), 1);
2853
2854 /* Finally, see if this is a simple operation with its first operand
2855 not in a register. The operation might require this operand in a
2856 register, so return it as a split point. We can always do this
2857 because if the first operand were another operation, we would have
2858 already found it as a split point. */
2859 if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
2860 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
2861 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<'
2862 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '1')
2863 && ! register_operand (XEXP (SET_SRC (x), 0), VOIDmode))
2864 return &XEXP (SET_SRC (x), 0);
2865
2866 return 0;
2867
2868 case AND:
2869 case IOR:
2870 /* We write NOR as (and (not A) (not B)), but if we don't have a NOR,
2871 it is better to write this as (not (ior A B)) so we can split it.
2872 Similarly for IOR. */
2873 if (GET_CODE (XEXP (x, 0)) == NOT && GET_CODE (XEXP (x, 1)) == NOT)
2874 {
2875 SUBST (*loc,
2876 gen_rtx_combine (NOT, GET_MODE (x),
2877 gen_rtx_combine (code == IOR ? AND : IOR,
2878 GET_MODE (x),
2879 XEXP (XEXP (x, 0), 0),
2880 XEXP (XEXP (x, 1), 0))));
2881 return find_split_point (loc, insn);
2882 }
2883
2884 /* Many RISC machines have a large set of logical insns. If the
2885 second operand is a NOT, put it first so we will try to split the
2886 other operand first. */
2887 if (GET_CODE (XEXP (x, 1)) == NOT)
2888 {
2889 rtx tem = XEXP (x, 0);
2890 SUBST (XEXP (x, 0), XEXP (x, 1));
2891 SUBST (XEXP (x, 1), tem);
2892 }
2893 break;
2894
2895 default:
2896 break;
2897 }
2898
2899 /* Otherwise, select our actions depending on our rtx class. */
2900 switch (GET_RTX_CLASS (code))
2901 {
2902 case 'b': /* This is ZERO_EXTRACT and SIGN_EXTRACT. */
2903 case '3':
2904 split = find_split_point (&XEXP (x, 2), insn);
2905 if (split)
2906 return split;
2907 /* ... fall through ... */
2908 case '2':
2909 case 'c':
2910 case '<':
2911 split = find_split_point (&XEXP (x, 1), insn);
2912 if (split)
2913 return split;
2914 /* ... fall through ... */
2915 case '1':
2916 /* Some machines have (and (shift ...) ...) insns. If X is not
2917 an AND, but XEXP (X, 0) is, use it as our split point. */
2918 if (GET_CODE (x) != AND && GET_CODE (XEXP (x, 0)) == AND)
2919 return &XEXP (x, 0);
2920
2921 split = find_split_point (&XEXP (x, 0), insn);
2922 if (split)
2923 return split;
2924 return loc;
2925 }
2926
2927 /* Otherwise, we don't have a split point. */
2928 return 0;
2929 }
2930 \f
2931 /* Throughout X, replace FROM with TO, and return the result.
2932 The result is TO if X is FROM;
2933 otherwise the result is X, but its contents may have been modified.
2934 If they were modified, a record was made in undobuf so that
2935 undo_all will (among other things) return X to its original state.
2936
2937 If the number of changes necessary is too much to record to undo,
2938 the excess changes are not made, so the result is invalid.
2939 The changes already made can still be undone.
2940 undobuf.num_undo is incremented for such changes, so by testing that
2941 the caller can tell whether the result is valid.
2942
2943 `n_occurrences' is incremented each time FROM is replaced.
2944
2945 IN_DEST is non-zero if we are processing the SET_DEST of a SET.
2946
2947 UNIQUE_COPY is non-zero if each substitution must be unique. We do this
2948 by copying if `n_occurrences' is non-zero. */
2949
2950 static rtx
2951 subst (x, from, to, in_dest, unique_copy)
2952 register rtx x, from, to;
2953 int in_dest;
2954 int unique_copy;
2955 {
2956 register enum rtx_code code = GET_CODE (x);
2957 enum machine_mode op0_mode = VOIDmode;
2958 register char *fmt;
2959 register int len, i;
2960 rtx new;
2961
2962 /* Two expressions are equal if they are identical copies of a shared
2963 RTX or if they are both registers with the same register number
2964 and mode. */
2965
2966 #define COMBINE_RTX_EQUAL_P(X,Y) \
2967 ((X) == (Y) \
2968 || (GET_CODE (X) == REG && GET_CODE (Y) == REG \
2969 && REGNO (X) == REGNO (Y) && GET_MODE (X) == GET_MODE (Y)))
2970
2971 if (! in_dest && COMBINE_RTX_EQUAL_P (x, from))
2972 {
2973 n_occurrences++;
2974 return (unique_copy && n_occurrences > 1 ? copy_rtx (to) : to);
2975 }
2976
2977 /* If X and FROM are the same register but different modes, they will
2978 not have been seen as equal above. However, flow.c will make a
2979 LOG_LINKS entry for that case. If we do nothing, we will try to
2980 rerecognize our original insn and, when it succeeds, we will
2981 delete the feeding insn, which is incorrect.
2982
2983 So force this insn not to match in this (rare) case. */
2984 if (! in_dest && code == REG && GET_CODE (from) == REG
2985 && REGNO (x) == REGNO (from))
2986 return gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
2987
2988 /* If this is an object, we are done unless it is a MEM or LO_SUM, both
2989 of which may contain things that can be combined. */
2990 if (code != MEM && code != LO_SUM && GET_RTX_CLASS (code) == 'o')
2991 return x;
2992
2993 /* It is possible to have a subexpression appear twice in the insn.
2994 Suppose that FROM is a register that appears within TO.
2995 Then, after that subexpression has been scanned once by `subst',
2996 the second time it is scanned, TO may be found. If we were
2997 to scan TO here, we would find FROM within it and create a
2998 self-referent rtl structure which is completely wrong. */
2999 if (COMBINE_RTX_EQUAL_P (x, to))
3000 return to;
3001
3002 len = GET_RTX_LENGTH (code);
3003 fmt = GET_RTX_FORMAT (code);
3004
3005 /* We don't need to process a SET_DEST that is a register, CC0, or PC, so
3006 set up to skip this common case. All other cases where we want to
3007 suppress replacing something inside a SET_SRC are handled via the
3008 IN_DEST operand. */
3009 if (code == SET
3010 && (GET_CODE (SET_DEST (x)) == REG
3011 || GET_CODE (SET_DEST (x)) == CC0
3012 || GET_CODE (SET_DEST (x)) == PC))
3013 fmt = "ie";
3014
3015 /* Get the mode of operand 0 in case X is now a SIGN_EXTEND of a
3016 constant. */
3017 if (fmt[0] == 'e')
3018 op0_mode = GET_MODE (XEXP (x, 0));
3019
3020 for (i = 0; i < len; i++)
3021 {
3022 if (fmt[i] == 'E')
3023 {
3024 register int j;
3025 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3026 {
3027 if (COMBINE_RTX_EQUAL_P (XVECEXP (x, i, j), from))
3028 {
3029 new = (unique_copy && n_occurrences ? copy_rtx (to) : to);
3030 n_occurrences++;
3031 }
3032 else
3033 {
3034 new = subst (XVECEXP (x, i, j), from, to, 0, unique_copy);
3035
3036 /* If this substitution failed, this whole thing fails. */
3037 if (GET_CODE (new) == CLOBBER && XEXP (new, 0) == const0_rtx)
3038 return new;
3039 }
3040
3041 SUBST (XVECEXP (x, i, j), new);
3042 }
3043 }
3044 else if (fmt[i] == 'e')
3045 {
3046 if (COMBINE_RTX_EQUAL_P (XEXP (x, i), from))
3047 {
3048 /* In general, don't install a subreg involving two modes not
3049 tieable. It can worsen register allocation, and can even
3050 make invalid reload insns, since the reg inside may need to
3051 be copied from in the outside mode, and that may be invalid
3052 if it is an fp reg copied in integer mode.
3053
3054 We allow two exceptions to this: It is valid if it is inside
3055 another SUBREG and the mode of that SUBREG and the mode of
3056 the inside of TO is tieable and it is valid if X is a SET
3057 that copies FROM to CC0. */
3058 if (GET_CODE (to) == SUBREG
3059 && ! MODES_TIEABLE_P (GET_MODE (to),
3060 GET_MODE (SUBREG_REG (to)))
3061 && ! (code == SUBREG
3062 && MODES_TIEABLE_P (GET_MODE (x),
3063 GET_MODE (SUBREG_REG (to))))
3064 #ifdef HAVE_cc0
3065 && ! (code == SET && i == 1 && XEXP (x, 0) == cc0_rtx)
3066 #endif
3067 )
3068 return gen_rtx (CLOBBER, VOIDmode, const0_rtx);
3069
3070 new = (unique_copy && n_occurrences ? copy_rtx (to) : to);
3071 n_occurrences++;
3072 }
3073 else
3074 /* If we are in a SET_DEST, suppress most cases unless we
3075 have gone inside a MEM, in which case we want to
3076 simplify the address. We assume here that things that
3077 are actually part of the destination have their inner
3078 parts in the first expression. This is true for SUBREG,
3079 STRICT_LOW_PART, and ZERO_EXTRACT, which are the only
3080 things aside from REG and MEM that should appear in a
3081 SET_DEST. */
3082 new = subst (XEXP (x, i), from, to,
3083 (((in_dest
3084 && (code == SUBREG || code == STRICT_LOW_PART
3085 || code == ZERO_EXTRACT))
3086 || code == SET)
3087 && i == 0), unique_copy);
3088
3089 /* If we found that we will have to reject this combination,
3090 indicate that by returning the CLOBBER ourselves, rather than
3091 an expression containing it. This will speed things up as
3092 well as prevent accidents where two CLOBBERs are considered
3093 to be equal, thus producing an incorrect simplification. */
3094
3095 if (GET_CODE (new) == CLOBBER && XEXP (new, 0) == const0_rtx)
3096 return new;
3097
3098 SUBST (XEXP (x, i), new);
3099 }
3100 }
3101
3102 /* Try to simplify X. If the simplification changed the code, it is likely
3103 that further simplification will help, so loop, but limit the number
3104 of repetitions that will be performed. */
3105
3106 for (i = 0; i < 4; i++)
3107 {
3108 /* If X is sufficiently simple, don't bother trying to do anything
3109 with it. */
3110 if (code != CONST_INT && code != REG && code != CLOBBER)
3111 x = simplify_rtx (x, op0_mode, i == 3, in_dest);
3112
3113 if (GET_CODE (x) == code)
3114 break;
3115
3116 code = GET_CODE (x);
3117
3118 /* We no longer know the original mode of operand 0 since we
3119 have changed the form of X) */
3120 op0_mode = VOIDmode;
3121 }
3122
3123 return x;
3124 }
3125 \f
3126 /* Simplify X, a piece of RTL. We just operate on the expression at the
3127 outer level; call `subst' to simplify recursively. Return the new
3128 expression.
3129
3130 OP0_MODE is the original mode of XEXP (x, 0); LAST is nonzero if this
3131 will be the iteration even if an expression with a code different from
3132 X is returned; IN_DEST is nonzero if we are inside a SET_DEST. */
3133
3134 static rtx
3135 simplify_rtx (x, op0_mode, last, in_dest)
3136 rtx x;
3137 enum machine_mode op0_mode;
3138 int last;
3139 int in_dest;
3140 {
3141 enum rtx_code code = GET_CODE (x);
3142 enum machine_mode mode = GET_MODE (x);
3143 rtx temp;
3144 int i;
3145
3146 /* If this is a commutative operation, put a constant last and a complex
3147 expression first. We don't need to do this for comparisons here. */
3148 if (GET_RTX_CLASS (code) == 'c'
3149 && ((CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
3150 || (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == 'o'
3151 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o')
3152 || (GET_CODE (XEXP (x, 0)) == SUBREG
3153 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0)))) == 'o'
3154 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o')))
3155 {
3156 temp = XEXP (x, 0);
3157 SUBST (XEXP (x, 0), XEXP (x, 1));
3158 SUBST (XEXP (x, 1), temp);
3159 }
3160
3161 /* If this is a PLUS, MINUS, or MULT, and the first operand is the
3162 sign extension of a PLUS with a constant, reverse the order of the sign
3163 extension and the addition. Note that this not the same as the original
3164 code, but overflow is undefined for signed values. Also note that the
3165 PLUS will have been partially moved "inside" the sign-extension, so that
3166 the first operand of X will really look like:
3167 (ashiftrt (plus (ashift A C4) C5) C4).
3168 We convert this to
3169 (plus (ashiftrt (ashift A C4) C2) C4)
3170 and replace the first operand of X with that expression. Later parts
3171 of this function may simplify the expression further.
3172
3173 For example, if we start with (mult (sign_extend (plus A C1)) C2),
3174 we swap the SIGN_EXTEND and PLUS. Later code will apply the
3175 distributive law to produce (plus (mult (sign_extend X) C1) C3).
3176
3177 We do this to simplify address expressions. */
3178
3179 if ((code == PLUS || code == MINUS || code == MULT)
3180 && GET_CODE (XEXP (x, 0)) == ASHIFTRT
3181 && GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS
3182 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ASHIFT
3183 && GET_CODE (XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1)) == CONST_INT
3184 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3185 && XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1) == XEXP (XEXP (x, 0), 1)
3186 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
3187 && (temp = simplify_binary_operation (ASHIFTRT, mode,
3188 XEXP (XEXP (XEXP (x, 0), 0), 1),
3189 XEXP (XEXP (x, 0), 1))) != 0)
3190 {
3191 rtx new
3192 = simplify_shift_const (NULL_RTX, ASHIFT, mode,
3193 XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 0),
3194 INTVAL (XEXP (XEXP (x, 0), 1)));
3195
3196 new = simplify_shift_const (NULL_RTX, ASHIFTRT, mode, new,
3197 INTVAL (XEXP (XEXP (x, 0), 1)));
3198
3199 SUBST (XEXP (x, 0), gen_binary (PLUS, mode, new, temp));
3200 }
3201
3202 /* If this is a simple operation applied to an IF_THEN_ELSE, try
3203 applying it to the arms of the IF_THEN_ELSE. This often simplifies
3204 things. Check for cases where both arms are testing the same
3205 condition.
3206
3207 Don't do anything if all operands are very simple. */
3208
3209 if (((GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c'
3210 || GET_RTX_CLASS (code) == '<')
3211 && ((GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) != 'o'
3212 && ! (GET_CODE (XEXP (x, 0)) == SUBREG
3213 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0))))
3214 == 'o')))
3215 || (GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o'
3216 && ! (GET_CODE (XEXP (x, 1)) == SUBREG
3217 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 1))))
3218 == 'o')))))
3219 || (GET_RTX_CLASS (code) == '1'
3220 && ((GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) != 'o'
3221 && ! (GET_CODE (XEXP (x, 0)) == SUBREG
3222 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0))))
3223 == 'o'))))))
3224 {
3225 rtx cond, true, false;
3226
3227 cond = if_then_else_cond (x, &true, &false);
3228 if (cond != 0
3229 /* If everything is a comparison, what we have is highly unlikely
3230 to be simpler, so don't use it. */
3231 && ! (GET_RTX_CLASS (code) == '<'
3232 && (GET_RTX_CLASS (GET_CODE (true)) == '<'
3233 || GET_RTX_CLASS (GET_CODE (false)) == '<')))
3234 {
3235 rtx cop1 = const0_rtx;
3236 enum rtx_code cond_code = simplify_comparison (NE, &cond, &cop1);
3237
3238 if (cond_code == NE && GET_RTX_CLASS (GET_CODE (cond)) == '<')
3239 return x;
3240
3241 /* Simplify the alternative arms; this may collapse the true and
3242 false arms to store-flag values. */
3243 true = subst (true, pc_rtx, pc_rtx, 0, 0);
3244 false = subst (false, pc_rtx, pc_rtx, 0, 0);
3245
3246 /* Restarting if we generate a store-flag expression will cause
3247 us to loop. Just drop through in this case. */
3248
3249 /* If the result values are STORE_FLAG_VALUE and zero, we can
3250 just make the comparison operation. */
3251 if (true == const_true_rtx && false == const0_rtx)
3252 x = gen_binary (cond_code, mode, cond, cop1);
3253 else if (true == const0_rtx && false == const_true_rtx)
3254 x = gen_binary (reverse_condition (cond_code), mode, cond, cop1);
3255
3256 /* Likewise, we can make the negate of a comparison operation
3257 if the result values are - STORE_FLAG_VALUE and zero. */
3258 else if (GET_CODE (true) == CONST_INT
3259 && INTVAL (true) == - STORE_FLAG_VALUE
3260 && false == const0_rtx)
3261 x = gen_unary (NEG, mode, mode,
3262 gen_binary (cond_code, mode, cond, cop1));
3263 else if (GET_CODE (false) == CONST_INT
3264 && INTVAL (false) == - STORE_FLAG_VALUE
3265 && true == const0_rtx)
3266 x = gen_unary (NEG, mode, mode,
3267 gen_binary (reverse_condition (cond_code),
3268 mode, cond, cop1));
3269 else
3270 return gen_rtx (IF_THEN_ELSE, mode,
3271 gen_binary (cond_code, VOIDmode, cond, cop1),
3272 true, false);
3273
3274 code = GET_CODE (x);
3275 op0_mode = VOIDmode;
3276 }
3277 }
3278
3279 /* Try to fold this expression in case we have constants that weren't
3280 present before. */
3281 temp = 0;
3282 switch (GET_RTX_CLASS (code))
3283 {
3284 case '1':
3285 temp = simplify_unary_operation (code, mode, XEXP (x, 0), op0_mode);
3286 break;
3287 case '<':
3288 temp = simplify_relational_operation (code, op0_mode,
3289 XEXP (x, 0), XEXP (x, 1));
3290 #ifdef FLOAT_STORE_FLAG_VALUE
3291 if (temp != 0 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
3292 temp = ((temp == const0_rtx) ? CONST0_RTX (GET_MODE (x))
3293 : immed_real_const_1 (FLOAT_STORE_FLAG_VALUE, GET_MODE (x)));
3294 #endif
3295 break;
3296 case 'c':
3297 case '2':
3298 temp = simplify_binary_operation (code, mode, XEXP (x, 0), XEXP (x, 1));
3299 break;
3300 case 'b':
3301 case '3':
3302 temp = simplify_ternary_operation (code, mode, op0_mode, XEXP (x, 0),
3303 XEXP (x, 1), XEXP (x, 2));
3304 break;
3305 }
3306
3307 if (temp)
3308 x = temp, code = GET_CODE (temp);
3309
3310 /* First see if we can apply the inverse distributive law. */
3311 if (code == PLUS || code == MINUS
3312 || code == AND || code == IOR || code == XOR)
3313 {
3314 x = apply_distributive_law (x);
3315 code = GET_CODE (x);
3316 }
3317
3318 /* If CODE is an associative operation not otherwise handled, see if we
3319 can associate some operands. This can win if they are constants or
3320 if they are logically related (i.e. (a & b) & a. */
3321 if ((code == PLUS || code == MINUS
3322 || code == MULT || code == AND || code == IOR || code == XOR
3323 || code == DIV || code == UDIV
3324 || code == SMAX || code == SMIN || code == UMAX || code == UMIN)
3325 && INTEGRAL_MODE_P (mode))
3326 {
3327 if (GET_CODE (XEXP (x, 0)) == code)
3328 {
3329 rtx other = XEXP (XEXP (x, 0), 0);
3330 rtx inner_op0 = XEXP (XEXP (x, 0), 1);
3331 rtx inner_op1 = XEXP (x, 1);
3332 rtx inner;
3333
3334 /* Make sure we pass the constant operand if any as the second
3335 one if this is a commutative operation. */
3336 if (CONSTANT_P (inner_op0) && GET_RTX_CLASS (code) == 'c')
3337 {
3338 rtx tem = inner_op0;
3339 inner_op0 = inner_op1;
3340 inner_op1 = tem;
3341 }
3342 inner = simplify_binary_operation (code == MINUS ? PLUS
3343 : code == DIV ? MULT
3344 : code == UDIV ? MULT
3345 : code,
3346 mode, inner_op0, inner_op1);
3347
3348 /* For commutative operations, try the other pair if that one
3349 didn't simplify. */
3350 if (inner == 0 && GET_RTX_CLASS (code) == 'c')
3351 {
3352 other = XEXP (XEXP (x, 0), 1);
3353 inner = simplify_binary_operation (code, mode,
3354 XEXP (XEXP (x, 0), 0),
3355 XEXP (x, 1));
3356 }
3357
3358 if (inner)
3359 return gen_binary (code, mode, other, inner);
3360 }
3361 }
3362
3363 /* A little bit of algebraic simplification here. */
3364 switch (code)
3365 {
3366 case MEM:
3367 /* Ensure that our address has any ASHIFTs converted to MULT in case
3368 address-recognizing predicates are called later. */
3369 temp = make_compound_operation (XEXP (x, 0), MEM);
3370 SUBST (XEXP (x, 0), temp);
3371 break;
3372
3373 case SUBREG:
3374 /* (subreg:A (mem:B X) N) becomes a modified MEM unless the SUBREG
3375 is paradoxical. If we can't do that safely, then it becomes
3376 something nonsensical so that this combination won't take place. */
3377
3378 if (GET_CODE (SUBREG_REG (x)) == MEM
3379 && (GET_MODE_SIZE (mode)
3380 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
3381 {
3382 rtx inner = SUBREG_REG (x);
3383 int endian_offset = 0;
3384 /* Don't change the mode of the MEM
3385 if that would change the meaning of the address. */
3386 if (MEM_VOLATILE_P (SUBREG_REG (x))
3387 || mode_dependent_address_p (XEXP (inner, 0)))
3388 return gen_rtx (CLOBBER, mode, const0_rtx);
3389
3390 if (BYTES_BIG_ENDIAN)
3391 {
3392 if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
3393 endian_offset += UNITS_PER_WORD - GET_MODE_SIZE (mode);
3394 if (GET_MODE_SIZE (GET_MODE (inner)) < UNITS_PER_WORD)
3395 endian_offset -= (UNITS_PER_WORD
3396 - GET_MODE_SIZE (GET_MODE (inner)));
3397 }
3398 /* Note if the plus_constant doesn't make a valid address
3399 then this combination won't be accepted. */
3400 x = gen_rtx (MEM, mode,
3401 plus_constant (XEXP (inner, 0),
3402 (SUBREG_WORD (x) * UNITS_PER_WORD
3403 + endian_offset)));
3404 MEM_VOLATILE_P (x) = MEM_VOLATILE_P (inner);
3405 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (inner);
3406 MEM_IN_STRUCT_P (x) = MEM_IN_STRUCT_P (inner);
3407 return x;
3408 }
3409
3410 /* If we are in a SET_DEST, these other cases can't apply. */
3411 if (in_dest)
3412 return x;
3413
3414 /* Changing mode twice with SUBREG => just change it once,
3415 or not at all if changing back to starting mode. */
3416 if (GET_CODE (SUBREG_REG (x)) == SUBREG)
3417 {
3418 if (mode == GET_MODE (SUBREG_REG (SUBREG_REG (x)))
3419 && SUBREG_WORD (x) == 0 && SUBREG_WORD (SUBREG_REG (x)) == 0)
3420 return SUBREG_REG (SUBREG_REG (x));
3421
3422 SUBST_INT (SUBREG_WORD (x),
3423 SUBREG_WORD (x) + SUBREG_WORD (SUBREG_REG (x)));
3424 SUBST (SUBREG_REG (x), SUBREG_REG (SUBREG_REG (x)));
3425 }
3426
3427 /* SUBREG of a hard register => just change the register number
3428 and/or mode. If the hard register is not valid in that mode,
3429 suppress this combination. If the hard register is the stack,
3430 frame, or argument pointer, leave this as a SUBREG. */
3431
3432 if (GET_CODE (SUBREG_REG (x)) == REG
3433 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
3434 && REGNO (SUBREG_REG (x)) != FRAME_POINTER_REGNUM
3435 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3436 && REGNO (SUBREG_REG (x)) != HARD_FRAME_POINTER_REGNUM
3437 #endif
3438 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3439 && REGNO (SUBREG_REG (x)) != ARG_POINTER_REGNUM
3440 #endif
3441 && REGNO (SUBREG_REG (x)) != STACK_POINTER_REGNUM)
3442 {
3443 if (HARD_REGNO_MODE_OK (REGNO (SUBREG_REG (x)) + SUBREG_WORD (x),
3444 mode))
3445 return gen_rtx (REG, mode,
3446 REGNO (SUBREG_REG (x)) + SUBREG_WORD (x));
3447 else
3448 return gen_rtx (CLOBBER, mode, const0_rtx);
3449 }
3450
3451 /* For a constant, try to pick up the part we want. Handle a full
3452 word and low-order part. Only do this if we are narrowing
3453 the constant; if it is being widened, we have no idea what
3454 the extra bits will have been set to. */
3455
3456 if (CONSTANT_P (SUBREG_REG (x)) && op0_mode != VOIDmode
3457 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
3458 && GET_MODE_SIZE (op0_mode) > UNITS_PER_WORD
3459 && GET_MODE_CLASS (mode) == MODE_INT)
3460 {
3461 temp = operand_subword (SUBREG_REG (x), SUBREG_WORD (x),
3462 0, op0_mode);
3463 if (temp)
3464 return temp;
3465 }
3466
3467 /* If we want a subreg of a constant, at offset 0,
3468 take the low bits. On a little-endian machine, that's
3469 always valid. On a big-endian machine, it's valid
3470 only if the constant's mode fits in one word. Note that we
3471 cannot use subreg_lowpart_p since SUBREG_REG may be VOIDmode. */
3472 if (CONSTANT_P (SUBREG_REG (x))
3473 && ((GET_MODE_SIZE (op0_mode) <= UNITS_PER_WORD
3474 || ! WORDS_BIG_ENDIAN)
3475 ? SUBREG_WORD (x) == 0
3476 : (SUBREG_WORD (x)
3477 == ((GET_MODE_SIZE (op0_mode)
3478 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD))
3479 / UNITS_PER_WORD)))
3480 && GET_MODE_SIZE (mode) <= GET_MODE_SIZE (op0_mode)
3481 && (! WORDS_BIG_ENDIAN
3482 || GET_MODE_BITSIZE (op0_mode) <= BITS_PER_WORD))
3483 return gen_lowpart_for_combine (mode, SUBREG_REG (x));
3484
3485 /* A paradoxical SUBREG of a VOIDmode constant is the same constant,
3486 since we are saying that the high bits don't matter. */
3487 if (CONSTANT_P (SUBREG_REG (x)) && GET_MODE (SUBREG_REG (x)) == VOIDmode
3488 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (op0_mode))
3489 return SUBREG_REG (x);
3490
3491 /* Note that we cannot do any narrowing for non-constants since
3492 we might have been counting on using the fact that some bits were
3493 zero. We now do this in the SET. */
3494
3495 break;
3496
3497 case NOT:
3498 /* (not (plus X -1)) can become (neg X). */
3499 if (GET_CODE (XEXP (x, 0)) == PLUS
3500 && XEXP (XEXP (x, 0), 1) == constm1_rtx)
3501 return gen_rtx_combine (NEG, mode, XEXP (XEXP (x, 0), 0));
3502
3503 /* Similarly, (not (neg X)) is (plus X -1). */
3504 if (GET_CODE (XEXP (x, 0)) == NEG)
3505 return gen_rtx_combine (PLUS, mode, XEXP (XEXP (x, 0), 0),
3506 constm1_rtx);
3507
3508 /* (not (xor X C)) for C constant is (xor X D) with D = ~ C. */
3509 if (GET_CODE (XEXP (x, 0)) == XOR
3510 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3511 && (temp = simplify_unary_operation (NOT, mode,
3512 XEXP (XEXP (x, 0), 1),
3513 mode)) != 0)
3514 return gen_binary (XOR, mode, XEXP (XEXP (x, 0), 0), temp);
3515
3516 /* (not (ashift 1 X)) is (rotate ~1 X). We used to do this for operands
3517 other than 1, but that is not valid. We could do a similar
3518 simplification for (not (lshiftrt C X)) where C is just the sign bit,
3519 but this doesn't seem common enough to bother with. */
3520 if (GET_CODE (XEXP (x, 0)) == ASHIFT
3521 && XEXP (XEXP (x, 0), 0) == const1_rtx)
3522 return gen_rtx (ROTATE, mode, gen_unary (NOT, mode, mode, const1_rtx),
3523 XEXP (XEXP (x, 0), 1));
3524
3525 if (GET_CODE (XEXP (x, 0)) == SUBREG
3526 && subreg_lowpart_p (XEXP (x, 0))
3527 && (GET_MODE_SIZE (GET_MODE (XEXP (x, 0)))
3528 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (x, 0)))))
3529 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == ASHIFT
3530 && XEXP (SUBREG_REG (XEXP (x, 0)), 0) == const1_rtx)
3531 {
3532 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (XEXP (x, 0)));
3533
3534 x = gen_rtx (ROTATE, inner_mode,
3535 gen_unary (NOT, inner_mode, inner_mode, const1_rtx),
3536 XEXP (SUBREG_REG (XEXP (x, 0)), 1));
3537 return gen_lowpart_for_combine (mode, x);
3538 }
3539
3540 /* If STORE_FLAG_VALUE is -1, (not (comparison foo bar)) can be done by
3541 reversing the comparison code if valid. */
3542 if (STORE_FLAG_VALUE == -1
3543 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3544 && reversible_comparison_p (XEXP (x, 0)))
3545 return gen_rtx_combine (reverse_condition (GET_CODE (XEXP (x, 0))),
3546 mode, XEXP (XEXP (x, 0), 0),
3547 XEXP (XEXP (x, 0), 1));
3548
3549 /* (ashiftrt foo C) where C is the number of bits in FOO minus 1
3550 is (lt foo (const_int 0)) if STORE_FLAG_VALUE is -1, so we can
3551 perform the above simplification. */
3552
3553 if (STORE_FLAG_VALUE == -1
3554 && XEXP (x, 1) == const1_rtx
3555 && GET_CODE (XEXP (x, 0)) == ASHIFTRT
3556 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3557 && INTVAL (XEXP (XEXP (x, 0), 1)) == GET_MODE_BITSIZE (mode) - 1)
3558 return gen_rtx_combine (GE, mode, XEXP (XEXP (x, 0), 0), const0_rtx);
3559
3560 /* Apply De Morgan's laws to reduce number of patterns for machines
3561 with negating logical insns (and-not, nand, etc.). If result has
3562 only one NOT, put it first, since that is how the patterns are
3563 coded. */
3564
3565 if (GET_CODE (XEXP (x, 0)) == IOR || GET_CODE (XEXP (x, 0)) == AND)
3566 {
3567 rtx in1 = XEXP (XEXP (x, 0), 0), in2 = XEXP (XEXP (x, 0), 1);
3568
3569 if (GET_CODE (in1) == NOT)
3570 in1 = XEXP (in1, 0);
3571 else
3572 in1 = gen_rtx_combine (NOT, GET_MODE (in1), in1);
3573
3574 if (GET_CODE (in2) == NOT)
3575 in2 = XEXP (in2, 0);
3576 else if (GET_CODE (in2) == CONST_INT
3577 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3578 in2 = GEN_INT (GET_MODE_MASK (mode) & ~ INTVAL (in2));
3579 else
3580 in2 = gen_rtx_combine (NOT, GET_MODE (in2), in2);
3581
3582 if (GET_CODE (in2) == NOT)
3583 {
3584 rtx tem = in2;
3585 in2 = in1; in1 = tem;
3586 }
3587
3588 return gen_rtx_combine (GET_CODE (XEXP (x, 0)) == IOR ? AND : IOR,
3589 mode, in1, in2);
3590 }
3591 break;
3592
3593 case NEG:
3594 /* (neg (plus X 1)) can become (not X). */
3595 if (GET_CODE (XEXP (x, 0)) == PLUS
3596 && XEXP (XEXP (x, 0), 1) == const1_rtx)
3597 return gen_rtx_combine (NOT, mode, XEXP (XEXP (x, 0), 0));
3598
3599 /* Similarly, (neg (not X)) is (plus X 1). */
3600 if (GET_CODE (XEXP (x, 0)) == NOT)
3601 return plus_constant (XEXP (XEXP (x, 0), 0), 1);
3602
3603 /* (neg (minus X Y)) can become (minus Y X). */
3604 if (GET_CODE (XEXP (x, 0)) == MINUS
3605 && (! FLOAT_MODE_P (mode)
3606 /* x-y != -(y-x) with IEEE floating point. */
3607 || TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3608 || flag_fast_math))
3609 return gen_binary (MINUS, mode, XEXP (XEXP (x, 0), 1),
3610 XEXP (XEXP (x, 0), 0));
3611
3612 /* (neg (xor A 1)) is (plus A -1) if A is known to be either 0 or 1. */
3613 if (GET_CODE (XEXP (x, 0)) == XOR && XEXP (XEXP (x, 0), 1) == const1_rtx
3614 && nonzero_bits (XEXP (XEXP (x, 0), 0), mode) == 1)
3615 return gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0), constm1_rtx);
3616
3617 /* NEG commutes with ASHIFT since it is multiplication. Only do this
3618 if we can then eliminate the NEG (e.g.,
3619 if the operand is a constant). */
3620
3621 if (GET_CODE (XEXP (x, 0)) == ASHIFT)
3622 {
3623 temp = simplify_unary_operation (NEG, mode,
3624 XEXP (XEXP (x, 0), 0), mode);
3625 if (temp)
3626 {
3627 SUBST (XEXP (XEXP (x, 0), 0), temp);
3628 return XEXP (x, 0);
3629 }
3630 }
3631
3632 temp = expand_compound_operation (XEXP (x, 0));
3633
3634 /* For C equal to the width of MODE minus 1, (neg (ashiftrt X C)) can be
3635 replaced by (lshiftrt X C). This will convert
3636 (neg (sign_extract X 1 Y)) to (zero_extract X 1 Y). */
3637
3638 if (GET_CODE (temp) == ASHIFTRT
3639 && GET_CODE (XEXP (temp, 1)) == CONST_INT
3640 && INTVAL (XEXP (temp, 1)) == GET_MODE_BITSIZE (mode) - 1)
3641 return simplify_shift_const (temp, LSHIFTRT, mode, XEXP (temp, 0),
3642 INTVAL (XEXP (temp, 1)));
3643
3644 /* If X has only a single bit that might be nonzero, say, bit I, convert
3645 (neg X) to (ashiftrt (ashift X C-I) C-I) where C is the bitsize of
3646 MODE minus 1. This will convert (neg (zero_extract X 1 Y)) to
3647 (sign_extract X 1 Y). But only do this if TEMP isn't a register
3648 or a SUBREG of one since we'd be making the expression more
3649 complex if it was just a register. */
3650
3651 if (GET_CODE (temp) != REG
3652 && ! (GET_CODE (temp) == SUBREG
3653 && GET_CODE (SUBREG_REG (temp)) == REG)
3654 && (i = exact_log2 (nonzero_bits (temp, mode))) >= 0)
3655 {
3656 rtx temp1 = simplify_shift_const
3657 (NULL_RTX, ASHIFTRT, mode,
3658 simplify_shift_const (NULL_RTX, ASHIFT, mode, temp,
3659 GET_MODE_BITSIZE (mode) - 1 - i),
3660 GET_MODE_BITSIZE (mode) - 1 - i);
3661
3662 /* If all we did was surround TEMP with the two shifts, we
3663 haven't improved anything, so don't use it. Otherwise,
3664 we are better off with TEMP1. */
3665 if (GET_CODE (temp1) != ASHIFTRT
3666 || GET_CODE (XEXP (temp1, 0)) != ASHIFT
3667 || XEXP (XEXP (temp1, 0), 0) != temp)
3668 return temp1;
3669 }
3670 break;
3671
3672 case TRUNCATE:
3673 /* We can't handle truncation to a partial integer mode here
3674 because we don't know the real bitsize of the partial
3675 integer mode. */
3676 if (GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
3677 break;
3678
3679 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3680 SUBST (XEXP (x, 0),
3681 force_to_mode (XEXP (x, 0), GET_MODE (XEXP (x, 0)),
3682 GET_MODE_MASK (mode), NULL_RTX, 0));
3683
3684 /* (truncate:SI ({sign,zero}_extend:DI foo:SI)) == foo:SI. */
3685 if ((GET_CODE (XEXP (x, 0)) == SIGN_EXTEND
3686 || GET_CODE (XEXP (x, 0)) == ZERO_EXTEND)
3687 && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode)
3688 return XEXP (XEXP (x, 0), 0);
3689
3690 /* (truncate:SI (OP:DI ({sign,zero}_extend:DI foo:SI))) is
3691 (OP:SI foo:SI) if OP is NEG or ABS. */
3692 if ((GET_CODE (XEXP (x, 0)) == ABS
3693 || GET_CODE (XEXP (x, 0)) == NEG)
3694 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == SIGN_EXTEND
3695 || GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND)
3696 && GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == mode)
3697 return gen_unary (GET_CODE (XEXP (x, 0)), mode, mode,
3698 XEXP (XEXP (XEXP (x, 0), 0), 0));
3699
3700 /* (truncate:SI (subreg:DI (truncate:SI X) 0)) is
3701 (truncate:SI x). */
3702 if (GET_CODE (XEXP (x, 0)) == SUBREG
3703 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == TRUNCATE
3704 && subreg_lowpart_p (XEXP (x, 0)))
3705 return SUBREG_REG (XEXP (x, 0));
3706
3707 /* If we know that the value is already truncated, we can
3708 replace the TRUNCATE with a SUBREG. */
3709 if (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) <= HOST_BITS_PER_WIDE_INT
3710 && (nonzero_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
3711 &~ GET_MODE_MASK (mode)) == 0)
3712 return gen_lowpart_for_combine (mode, XEXP (x, 0));
3713
3714 /* A truncate of a comparison can be replaced with a subreg if
3715 STORE_FLAG_VALUE permits. This is like the previous test,
3716 but it works even if the comparison is done in a mode larger
3717 than HOST_BITS_PER_WIDE_INT. */
3718 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3719 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3720 && ((HOST_WIDE_INT) STORE_FLAG_VALUE &~ GET_MODE_MASK (mode)) == 0)
3721 return gen_lowpart_for_combine (mode, XEXP (x, 0));
3722
3723 /* Similarly, a truncate of a register whose value is a
3724 comparison can be replaced with a subreg if STORE_FLAG_VALUE
3725 permits. */
3726 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3727 && ((HOST_WIDE_INT) STORE_FLAG_VALUE &~ GET_MODE_MASK (mode)) == 0
3728 && (temp = get_last_value (XEXP (x, 0)))
3729 && GET_RTX_CLASS (GET_CODE (temp)) == '<')
3730 return gen_lowpart_for_combine (mode, XEXP (x, 0));
3731
3732 break;
3733
3734 case FLOAT_TRUNCATE:
3735 /* (float_truncate:SF (float_extend:DF foo:SF)) = foo:SF. */
3736 if (GET_CODE (XEXP (x, 0)) == FLOAT_EXTEND
3737 && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode)
3738 return XEXP (XEXP (x, 0), 0);
3739
3740 /* (float_truncate:SF (OP:DF (float_extend:DF foo:sf))) is
3741 (OP:SF foo:SF) if OP is NEG or ABS. */
3742 if ((GET_CODE (XEXP (x, 0)) == ABS
3743 || GET_CODE (XEXP (x, 0)) == NEG)
3744 && GET_CODE (XEXP (XEXP (x, 0), 0)) == FLOAT_EXTEND
3745 && GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == mode)
3746 return gen_unary (GET_CODE (XEXP (x, 0)), mode, mode,
3747 XEXP (XEXP (XEXP (x, 0), 0), 0));
3748
3749 /* (float_truncate:SF (subreg:DF (float_truncate:SF X) 0))
3750 is (float_truncate:SF x). */
3751 if (GET_CODE (XEXP (x, 0)) == SUBREG
3752 && subreg_lowpart_p (XEXP (x, 0))
3753 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == FLOAT_TRUNCATE)
3754 return SUBREG_REG (XEXP (x, 0));
3755 break;
3756
3757 #ifdef HAVE_cc0
3758 case COMPARE:
3759 /* Convert (compare FOO (const_int 0)) to FOO unless we aren't
3760 using cc0, in which case we want to leave it as a COMPARE
3761 so we can distinguish it from a register-register-copy. */
3762 if (XEXP (x, 1) == const0_rtx)
3763 return XEXP (x, 0);
3764
3765 /* In IEEE floating point, x-0 is not the same as x. */
3766 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3767 || ! FLOAT_MODE_P (GET_MODE (XEXP (x, 0)))
3768 || flag_fast_math)
3769 && XEXP (x, 1) == CONST0_RTX (GET_MODE (XEXP (x, 0))))
3770 return XEXP (x, 0);
3771 break;
3772 #endif
3773
3774 case CONST:
3775 /* (const (const X)) can become (const X). Do it this way rather than
3776 returning the inner CONST since CONST can be shared with a
3777 REG_EQUAL note. */
3778 if (GET_CODE (XEXP (x, 0)) == CONST)
3779 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
3780 break;
3781
3782 #ifdef HAVE_lo_sum
3783 case LO_SUM:
3784 /* Convert (lo_sum (high FOO) FOO) to FOO. This is necessary so we
3785 can add in an offset. find_split_point will split this address up
3786 again if it doesn't match. */
3787 if (GET_CODE (XEXP (x, 0)) == HIGH
3788 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1)))
3789 return XEXP (x, 1);
3790 break;
3791 #endif
3792
3793 case PLUS:
3794 /* If we have (plus (plus (A const) B)), associate it so that CONST is
3795 outermost. That's because that's the way indexed addresses are
3796 supposed to appear. This code used to check many more cases, but
3797 they are now checked elsewhere. */
3798 if (GET_CODE (XEXP (x, 0)) == PLUS
3799 && CONSTANT_ADDRESS_P (XEXP (XEXP (x, 0), 1)))
3800 return gen_binary (PLUS, mode,
3801 gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0),
3802 XEXP (x, 1)),
3803 XEXP (XEXP (x, 0), 1));
3804
3805 /* (plus (xor (and <foo> (const_int pow2 - 1)) <c>) <-c>)
3806 when c is (const_int (pow2 + 1) / 2) is a sign extension of a
3807 bit-field and can be replaced by either a sign_extend or a
3808 sign_extract. The `and' may be a zero_extend. */
3809 if (GET_CODE (XEXP (x, 0)) == XOR
3810 && GET_CODE (XEXP (x, 1)) == CONST_INT
3811 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3812 && INTVAL (XEXP (x, 1)) == - INTVAL (XEXP (XEXP (x, 0), 1))
3813 && (i = exact_log2 (INTVAL (XEXP (XEXP (x, 0), 1)))) >= 0
3814 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3815 && ((GET_CODE (XEXP (XEXP (x, 0), 0)) == AND
3816 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
3817 && (INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))
3818 == ((HOST_WIDE_INT) 1 << (i + 1)) - 1))
3819 || (GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND
3820 && (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)))
3821 == i + 1))))
3822 return simplify_shift_const
3823 (NULL_RTX, ASHIFTRT, mode,
3824 simplify_shift_const (NULL_RTX, ASHIFT, mode,
3825 XEXP (XEXP (XEXP (x, 0), 0), 0),
3826 GET_MODE_BITSIZE (mode) - (i + 1)),
3827 GET_MODE_BITSIZE (mode) - (i + 1));
3828
3829 /* (plus (comparison A B) C) can become (neg (rev-comp A B)) if
3830 C is 1 and STORE_FLAG_VALUE is -1 or if C is -1 and STORE_FLAG_VALUE
3831 is 1. This produces better code than the alternative immediately
3832 below. */
3833 if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3834 && reversible_comparison_p (XEXP (x, 0))
3835 && ((STORE_FLAG_VALUE == -1 && XEXP (x, 1) == const1_rtx)
3836 || (STORE_FLAG_VALUE == 1 && XEXP (x, 1) == constm1_rtx)))
3837 return
3838 gen_unary (NEG, mode, mode,
3839 gen_binary (reverse_condition (GET_CODE (XEXP (x, 0))),
3840 mode, XEXP (XEXP (x, 0), 0),
3841 XEXP (XEXP (x, 0), 1)));
3842
3843 /* If only the low-order bit of X is possibly nonzero, (plus x -1)
3844 can become (ashiftrt (ashift (xor x 1) C) C) where C is
3845 the bitsize of the mode - 1. This allows simplification of
3846 "a = (b & 8) == 0;" */
3847 if (XEXP (x, 1) == constm1_rtx
3848 && GET_CODE (XEXP (x, 0)) != REG
3849 && ! (GET_CODE (XEXP (x,0)) == SUBREG
3850 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG)
3851 && nonzero_bits (XEXP (x, 0), mode) == 1)
3852 return simplify_shift_const (NULL_RTX, ASHIFTRT, mode,
3853 simplify_shift_const (NULL_RTX, ASHIFT, mode,
3854 gen_rtx_combine (XOR, mode,
3855 XEXP (x, 0), const1_rtx),
3856 GET_MODE_BITSIZE (mode) - 1),
3857 GET_MODE_BITSIZE (mode) - 1);
3858
3859 /* If we are adding two things that have no bits in common, convert
3860 the addition into an IOR. This will often be further simplified,
3861 for example in cases like ((a & 1) + (a & 2)), which can
3862 become a & 3. */
3863
3864 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3865 && (nonzero_bits (XEXP (x, 0), mode)
3866 & nonzero_bits (XEXP (x, 1), mode)) == 0)
3867 return gen_binary (IOR, mode, XEXP (x, 0), XEXP (x, 1));
3868 break;
3869
3870 case MINUS:
3871 /* If STORE_FLAG_VALUE is 1, (minus 1 (comparison foo bar)) can be done
3872 by reversing the comparison code if valid. */
3873 if (STORE_FLAG_VALUE == 1
3874 && XEXP (x, 0) == const1_rtx
3875 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<'
3876 && reversible_comparison_p (XEXP (x, 1)))
3877 return gen_binary (reverse_condition (GET_CODE (XEXP (x, 1))),
3878 mode, XEXP (XEXP (x, 1), 0),
3879 XEXP (XEXP (x, 1), 1));
3880
3881 /* (minus <foo> (and <foo> (const_int -pow2))) becomes
3882 (and <foo> (const_int pow2-1)) */
3883 if (GET_CODE (XEXP (x, 1)) == AND
3884 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
3885 && exact_log2 (- INTVAL (XEXP (XEXP (x, 1), 1))) >= 0
3886 && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0)))
3887 return simplify_and_const_int (NULL_RTX, mode, XEXP (x, 0),
3888 - INTVAL (XEXP (XEXP (x, 1), 1)) - 1);
3889
3890 /* Canonicalize (minus A (plus B C)) to (minus (minus A B) C) for
3891 integers. */
3892 if (GET_CODE (XEXP (x, 1)) == PLUS && INTEGRAL_MODE_P (mode))
3893 return gen_binary (MINUS, mode,
3894 gen_binary (MINUS, mode, XEXP (x, 0),
3895 XEXP (XEXP (x, 1), 0)),
3896 XEXP (XEXP (x, 1), 1));
3897 break;
3898
3899 case MULT:
3900 /* If we have (mult (plus A B) C), apply the distributive law and then
3901 the inverse distributive law to see if things simplify. This
3902 occurs mostly in addresses, often when unrolling loops. */
3903
3904 if (GET_CODE (XEXP (x, 0)) == PLUS)
3905 {
3906 x = apply_distributive_law
3907 (gen_binary (PLUS, mode,
3908 gen_binary (MULT, mode,
3909 XEXP (XEXP (x, 0), 0), XEXP (x, 1)),
3910 gen_binary (MULT, mode,
3911 XEXP (XEXP (x, 0), 1), XEXP (x, 1))));
3912
3913 if (GET_CODE (x) != MULT)
3914 return x;
3915 }
3916 break;
3917
3918 case UDIV:
3919 /* If this is a divide by a power of two, treat it as a shift if
3920 its first operand is a shift. */
3921 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3922 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0
3923 && (GET_CODE (XEXP (x, 0)) == ASHIFT
3924 || GET_CODE (XEXP (x, 0)) == LSHIFTRT
3925 || GET_CODE (XEXP (x, 0)) == ASHIFTRT
3926 || GET_CODE (XEXP (x, 0)) == ROTATE
3927 || GET_CODE (XEXP (x, 0)) == ROTATERT))
3928 return simplify_shift_const (NULL_RTX, LSHIFTRT, mode, XEXP (x, 0), i);
3929 break;
3930
3931 case EQ: case NE:
3932 case GT: case GTU: case GE: case GEU:
3933 case LT: case LTU: case LE: case LEU:
3934 /* If the first operand is a condition code, we can't do anything
3935 with it. */
3936 if (GET_CODE (XEXP (x, 0)) == COMPARE
3937 || (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) != MODE_CC
3938 #ifdef HAVE_cc0
3939 && XEXP (x, 0) != cc0_rtx
3940 #endif
3941 ))
3942 {
3943 rtx op0 = XEXP (x, 0);
3944 rtx op1 = XEXP (x, 1);
3945 enum rtx_code new_code;
3946
3947 if (GET_CODE (op0) == COMPARE)
3948 op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
3949
3950 /* Simplify our comparison, if possible. */
3951 new_code = simplify_comparison (code, &op0, &op1);
3952
3953 /* If STORE_FLAG_VALUE is 1, we can convert (ne x 0) to simply X
3954 if only the low-order bit is possibly nonzero in X (such as when
3955 X is a ZERO_EXTRACT of one bit). Similarly, we can convert EQ to
3956 (xor X 1) or (minus 1 X); we use the former. Finally, if X is
3957 known to be either 0 or -1, NE becomes a NEG and EQ becomes
3958 (plus X 1).
3959
3960 Remove any ZERO_EXTRACT we made when thinking this was a
3961 comparison. It may now be simpler to use, e.g., an AND. If a
3962 ZERO_EXTRACT is indeed appropriate, it will be placed back by
3963 the call to make_compound_operation in the SET case. */
3964
3965 if (STORE_FLAG_VALUE == 1
3966 && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
3967 && op1 == const0_rtx && nonzero_bits (op0, mode) == 1)
3968 return gen_lowpart_for_combine (mode,
3969 expand_compound_operation (op0));
3970
3971 else if (STORE_FLAG_VALUE == 1
3972 && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
3973 && op1 == const0_rtx
3974 && (num_sign_bit_copies (op0, mode)
3975 == GET_MODE_BITSIZE (mode)))
3976 {
3977 op0 = expand_compound_operation (op0);
3978 return gen_unary (NEG, mode, mode,
3979 gen_lowpart_for_combine (mode, op0));
3980 }
3981
3982 else if (STORE_FLAG_VALUE == 1
3983 && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
3984 && op1 == const0_rtx
3985 && nonzero_bits (op0, mode) == 1)
3986 {
3987 op0 = expand_compound_operation (op0);
3988 return gen_binary (XOR, mode,
3989 gen_lowpart_for_combine (mode, op0),
3990 const1_rtx);
3991 }
3992
3993 else if (STORE_FLAG_VALUE == 1
3994 && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
3995 && op1 == const0_rtx
3996 && (num_sign_bit_copies (op0, mode)
3997 == GET_MODE_BITSIZE (mode)))
3998 {
3999 op0 = expand_compound_operation (op0);
4000 return plus_constant (gen_lowpart_for_combine (mode, op0), 1);
4001 }
4002
4003 /* If STORE_FLAG_VALUE is -1, we have cases similar to
4004 those above. */
4005 if (STORE_FLAG_VALUE == -1
4006 && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4007 && op1 == const0_rtx
4008 && (num_sign_bit_copies (op0, mode)
4009 == GET_MODE_BITSIZE (mode)))
4010 return gen_lowpart_for_combine (mode,
4011 expand_compound_operation (op0));
4012
4013 else if (STORE_FLAG_VALUE == -1
4014 && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4015 && op1 == const0_rtx
4016 && nonzero_bits (op0, mode) == 1)
4017 {
4018 op0 = expand_compound_operation (op0);
4019 return gen_unary (NEG, mode, mode,
4020 gen_lowpart_for_combine (mode, op0));
4021 }
4022
4023 else if (STORE_FLAG_VALUE == -1
4024 && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
4025 && op1 == const0_rtx
4026 && (num_sign_bit_copies (op0, mode)
4027 == GET_MODE_BITSIZE (mode)))
4028 {
4029 op0 = expand_compound_operation (op0);
4030 return gen_unary (NOT, mode, mode,
4031 gen_lowpart_for_combine (mode, op0));
4032 }
4033
4034 /* If X is 0/1, (eq X 0) is X-1. */
4035 else if (STORE_FLAG_VALUE == -1
4036 && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
4037 && op1 == const0_rtx
4038 && nonzero_bits (op0, mode) == 1)
4039 {
4040 op0 = expand_compound_operation (op0);
4041 return plus_constant (gen_lowpart_for_combine (mode, op0), -1);
4042 }
4043
4044 /* If STORE_FLAG_VALUE says to just test the sign bit and X has just
4045 one bit that might be nonzero, we can convert (ne x 0) to
4046 (ashift x c) where C puts the bit in the sign bit. Remove any
4047 AND with STORE_FLAG_VALUE when we are done, since we are only
4048 going to test the sign bit. */
4049 if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4050 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4051 && ((STORE_FLAG_VALUE & GET_MODE_MASK (mode))
4052 == (HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
4053 && op1 == const0_rtx
4054 && mode == GET_MODE (op0)
4055 && (i = exact_log2 (nonzero_bits (op0, mode))) >= 0)
4056 {
4057 x = simplify_shift_const (NULL_RTX, ASHIFT, mode,
4058 expand_compound_operation (op0),
4059 GET_MODE_BITSIZE (mode) - 1 - i);
4060 if (GET_CODE (x) == AND && XEXP (x, 1) == const_true_rtx)
4061 return XEXP (x, 0);
4062 else
4063 return x;
4064 }
4065
4066 /* If the code changed, return a whole new comparison. */
4067 if (new_code != code)
4068 return gen_rtx_combine (new_code, mode, op0, op1);
4069
4070 /* Otherwise, keep this operation, but maybe change its operands.
4071 This also converts (ne (compare FOO BAR) 0) to (ne FOO BAR). */
4072 SUBST (XEXP (x, 0), op0);
4073 SUBST (XEXP (x, 1), op1);
4074 }
4075 break;
4076
4077 case IF_THEN_ELSE:
4078 return simplify_if_then_else (x);
4079
4080 case ZERO_EXTRACT:
4081 case SIGN_EXTRACT:
4082 case ZERO_EXTEND:
4083 case SIGN_EXTEND:
4084 /* If we are processing SET_DEST, we are done. */
4085 if (in_dest)
4086 return x;
4087
4088 return expand_compound_operation (x);
4089
4090 case SET:
4091 return simplify_set (x);
4092
4093 case AND:
4094 case IOR:
4095 case XOR:
4096 return simplify_logical (x, last);
4097
4098 case ABS:
4099 /* (abs (neg <foo>)) -> (abs <foo>) */
4100 if (GET_CODE (XEXP (x, 0)) == NEG)
4101 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4102
4103 /* If the mode of the operand is VOIDmode (i.e. if it is ASM_OPERANDS),
4104 do nothing. */
4105 if (GET_MODE (XEXP (x, 0)) == VOIDmode)
4106 break;
4107
4108 /* If operand is something known to be positive, ignore the ABS. */
4109 if (GET_CODE (XEXP (x, 0)) == FFS || GET_CODE (XEXP (x, 0)) == ABS
4110 || ((GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4111 <= HOST_BITS_PER_WIDE_INT)
4112 && ((nonzero_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
4113 & ((HOST_WIDE_INT) 1
4114 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1)))
4115 == 0)))
4116 return XEXP (x, 0);
4117
4118
4119 /* If operand is known to be only -1 or 0, convert ABS to NEG. */
4120 if (num_sign_bit_copies (XEXP (x, 0), mode) == GET_MODE_BITSIZE (mode))
4121 return gen_rtx_combine (NEG, mode, XEXP (x, 0));
4122
4123 break;
4124
4125 case FFS:
4126 /* (ffs (*_extend <X>)) = (ffs <X>) */
4127 if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND
4128 || GET_CODE (XEXP (x, 0)) == ZERO_EXTEND)
4129 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4130 break;
4131
4132 case FLOAT:
4133 /* (float (sign_extend <X>)) = (float <X>). */
4134 if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND)
4135 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4136 break;
4137
4138 case ASHIFT:
4139 case LSHIFTRT:
4140 case ASHIFTRT:
4141 case ROTATE:
4142 case ROTATERT:
4143 /* If this is a shift by a constant amount, simplify it. */
4144 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
4145 return simplify_shift_const (x, code, mode, XEXP (x, 0),
4146 INTVAL (XEXP (x, 1)));
4147
4148 #ifdef SHIFT_COUNT_TRUNCATED
4149 else if (SHIFT_COUNT_TRUNCATED && GET_CODE (XEXP (x, 1)) != REG)
4150 SUBST (XEXP (x, 1),
4151 force_to_mode (XEXP (x, 1), GET_MODE (x),
4152 ((HOST_WIDE_INT) 1
4153 << exact_log2 (GET_MODE_BITSIZE (GET_MODE (x))))
4154 - 1,
4155 NULL_RTX, 0));
4156 #endif
4157
4158 break;
4159
4160 default:
4161 break;
4162 }
4163
4164 return x;
4165 }
4166 \f
4167 /* Simplify X, an IF_THEN_ELSE expression. Return the new expression. */
4168
4169 static rtx
4170 simplify_if_then_else (x)
4171 rtx x;
4172 {
4173 enum machine_mode mode = GET_MODE (x);
4174 rtx cond = XEXP (x, 0);
4175 rtx true = XEXP (x, 1);
4176 rtx false = XEXP (x, 2);
4177 enum rtx_code true_code = GET_CODE (cond);
4178 int comparison_p = GET_RTX_CLASS (true_code) == '<';
4179 rtx temp;
4180 int i;
4181
4182 /* Simplify storing of the truth value. */
4183 if (comparison_p && true == const_true_rtx && false == const0_rtx)
4184 return gen_binary (true_code, mode, XEXP (cond, 0), XEXP (cond, 1));
4185
4186 /* Also when the truth value has to be reversed. */
4187 if (comparison_p && reversible_comparison_p (cond)
4188 && true == const0_rtx && false == const_true_rtx)
4189 return gen_binary (reverse_condition (true_code),
4190 mode, XEXP (cond, 0), XEXP (cond, 1));
4191
4192 /* Sometimes we can simplify the arm of an IF_THEN_ELSE if a register used
4193 in it is being compared against certain values. Get the true and false
4194 comparisons and see if that says anything about the value of each arm. */
4195
4196 if (comparison_p && reversible_comparison_p (cond)
4197 && GET_CODE (XEXP (cond, 0)) == REG)
4198 {
4199 HOST_WIDE_INT nzb;
4200 rtx from = XEXP (cond, 0);
4201 enum rtx_code false_code = reverse_condition (true_code);
4202 rtx true_val = XEXP (cond, 1);
4203 rtx false_val = true_val;
4204 int swapped = 0;
4205
4206 /* If FALSE_CODE is EQ, swap the codes and arms. */
4207
4208 if (false_code == EQ)
4209 {
4210 swapped = 1, true_code = EQ, false_code = NE;
4211 temp = true, true = false, false = temp;
4212 }
4213
4214 /* If we are comparing against zero and the expression being tested has
4215 only a single bit that might be nonzero, that is its value when it is
4216 not equal to zero. Similarly if it is known to be -1 or 0. */
4217
4218 if (true_code == EQ && true_val == const0_rtx
4219 && exact_log2 (nzb = nonzero_bits (from, GET_MODE (from))) >= 0)
4220 false_code = EQ, false_val = GEN_INT (nzb);
4221 else if (true_code == EQ && true_val == const0_rtx
4222 && (num_sign_bit_copies (from, GET_MODE (from))
4223 == GET_MODE_BITSIZE (GET_MODE (from))))
4224 false_code = EQ, false_val = constm1_rtx;
4225
4226 /* Now simplify an arm if we know the value of the register in the
4227 branch and it is used in the arm. Be careful due to the potential
4228 of locally-shared RTL. */
4229
4230 if (reg_mentioned_p (from, true))
4231 true = subst (known_cond (copy_rtx (true), true_code, from, true_val),
4232 pc_rtx, pc_rtx, 0, 0);
4233 if (reg_mentioned_p (from, false))
4234 false = subst (known_cond (copy_rtx (false), false_code,
4235 from, false_val),
4236 pc_rtx, pc_rtx, 0, 0);
4237
4238 SUBST (XEXP (x, 1), swapped ? false : true);
4239 SUBST (XEXP (x, 2), swapped ? true : false);
4240
4241 true = XEXP (x, 1), false = XEXP (x, 2), true_code = GET_CODE (cond);
4242 }
4243
4244 /* If we have (if_then_else FOO (pc) (label_ref BAR)) and FOO can be
4245 reversed, do so to avoid needing two sets of patterns for
4246 subtract-and-branch insns. Similarly if we have a constant in the true
4247 arm, the false arm is the same as the first operand of the comparison, or
4248 the false arm is more complicated than the true arm. */
4249
4250 if (comparison_p && reversible_comparison_p (cond)
4251 && (true == pc_rtx
4252 || (CONSTANT_P (true)
4253 && GET_CODE (false) != CONST_INT && false != pc_rtx)
4254 || true == const0_rtx
4255 || (GET_RTX_CLASS (GET_CODE (true)) == 'o'
4256 && GET_RTX_CLASS (GET_CODE (false)) != 'o')
4257 || (GET_CODE (true) == SUBREG
4258 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (true))) == 'o'
4259 && GET_RTX_CLASS (GET_CODE (false)) != 'o')
4260 || reg_mentioned_p (true, false)
4261 || rtx_equal_p (false, XEXP (cond, 0))))
4262 {
4263 true_code = reverse_condition (true_code);
4264 SUBST (XEXP (x, 0),
4265 gen_binary (true_code, GET_MODE (cond), XEXP (cond, 0),
4266 XEXP (cond, 1)));
4267
4268 SUBST (XEXP (x, 1), false);
4269 SUBST (XEXP (x, 2), true);
4270
4271 temp = true, true = false, false = temp, cond = XEXP (x, 0);
4272
4273 /* It is possible that the conditional has been simplified out. */
4274 true_code = GET_CODE (cond);
4275 comparison_p = GET_RTX_CLASS (true_code) == '<';
4276 }
4277
4278 /* If the two arms are identical, we don't need the comparison. */
4279
4280 if (rtx_equal_p (true, false) && ! side_effects_p (cond))
4281 return true;
4282
4283 /* Convert a == b ? b : a to "a". */
4284 if (true_code == EQ && ! side_effects_p (cond)
4285 && rtx_equal_p (XEXP (cond, 0), false)
4286 && rtx_equal_p (XEXP (cond, 1), true))
4287 return false;
4288 else if (true_code == NE && ! side_effects_p (cond)
4289 && rtx_equal_p (XEXP (cond, 0), true)
4290 && rtx_equal_p (XEXP (cond, 1), false))
4291 return true;
4292
4293 /* Look for cases where we have (abs x) or (neg (abs X)). */
4294
4295 if (GET_MODE_CLASS (mode) == MODE_INT
4296 && GET_CODE (false) == NEG
4297 && rtx_equal_p (true, XEXP (false, 0))
4298 && comparison_p
4299 && rtx_equal_p (true, XEXP (cond, 0))
4300 && ! side_effects_p (true))
4301 switch (true_code)
4302 {
4303 case GT:
4304 case GE:
4305 return gen_unary (ABS, mode, mode, true);
4306 case LT:
4307 case LE:
4308 return gen_unary (NEG, mode, mode, gen_unary (ABS, mode, mode, true));
4309 default:
4310 break;
4311 }
4312
4313 /* Look for MIN or MAX. */
4314
4315 if ((! FLOAT_MODE_P (mode) || flag_fast_math)
4316 && comparison_p
4317 && rtx_equal_p (XEXP (cond, 0), true)
4318 && rtx_equal_p (XEXP (cond, 1), false)
4319 && ! side_effects_p (cond))
4320 switch (true_code)
4321 {
4322 case GE:
4323 case GT:
4324 return gen_binary (SMAX, mode, true, false);
4325 case LE:
4326 case LT:
4327 return gen_binary (SMIN, mode, true, false);
4328 case GEU:
4329 case GTU:
4330 return gen_binary (UMAX, mode, true, false);
4331 case LEU:
4332 case LTU:
4333 return gen_binary (UMIN, mode, true, false);
4334 default:
4335 break;
4336 }
4337
4338 /* If we have (if_then_else COND (OP Z C1) Z) and OP is an identity when its
4339 second operand is zero, this can be done as (OP Z (mult COND C2)) where
4340 C2 = C1 * STORE_FLAG_VALUE. Similarly if OP has an outer ZERO_EXTEND or
4341 SIGN_EXTEND as long as Z is already extended (so we don't destroy it).
4342 We can do this kind of thing in some cases when STORE_FLAG_VALUE is
4343 neither 1 or -1, but it isn't worth checking for. */
4344
4345 if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
4346 && comparison_p && mode != VOIDmode && ! side_effects_p (x))
4347 {
4348 rtx t = make_compound_operation (true, SET);
4349 rtx f = make_compound_operation (false, SET);
4350 rtx cond_op0 = XEXP (cond, 0);
4351 rtx cond_op1 = XEXP (cond, 1);
4352 enum rtx_code op, extend_op = NIL;
4353 enum machine_mode m = mode;
4354 rtx z = 0, c1;
4355
4356 if ((GET_CODE (t) == PLUS || GET_CODE (t) == MINUS
4357 || GET_CODE (t) == IOR || GET_CODE (t) == XOR
4358 || GET_CODE (t) == ASHIFT
4359 || GET_CODE (t) == LSHIFTRT || GET_CODE (t) == ASHIFTRT)
4360 && rtx_equal_p (XEXP (t, 0), f))
4361 c1 = XEXP (t, 1), op = GET_CODE (t), z = f;
4362
4363 /* If an identity-zero op is commutative, check whether there
4364 would be a match if we swapped the operands. */
4365 else if ((GET_CODE (t) == PLUS || GET_CODE (t) == IOR
4366 || GET_CODE (t) == XOR)
4367 && rtx_equal_p (XEXP (t, 1), f))
4368 c1 = XEXP (t, 0), op = GET_CODE (t), z = f;
4369 else if (GET_CODE (t) == SIGN_EXTEND
4370 && (GET_CODE (XEXP (t, 0)) == PLUS
4371 || GET_CODE (XEXP (t, 0)) == MINUS
4372 || GET_CODE (XEXP (t, 0)) == IOR
4373 || GET_CODE (XEXP (t, 0)) == XOR
4374 || GET_CODE (XEXP (t, 0)) == ASHIFT
4375 || GET_CODE (XEXP (t, 0)) == LSHIFTRT
4376 || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
4377 && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
4378 && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
4379 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
4380 && (num_sign_bit_copies (f, GET_MODE (f))
4381 > (GET_MODE_BITSIZE (mode)
4382 - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 0))))))
4383 {
4384 c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
4385 extend_op = SIGN_EXTEND;
4386 m = GET_MODE (XEXP (t, 0));
4387 }
4388 else if (GET_CODE (t) == SIGN_EXTEND
4389 && (GET_CODE (XEXP (t, 0)) == PLUS
4390 || GET_CODE (XEXP (t, 0)) == IOR
4391 || GET_CODE (XEXP (t, 0)) == XOR)
4392 && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
4393 && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
4394 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
4395 && (num_sign_bit_copies (f, GET_MODE (f))
4396 > (GET_MODE_BITSIZE (mode)
4397 - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 1))))))
4398 {
4399 c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
4400 extend_op = SIGN_EXTEND;
4401 m = GET_MODE (XEXP (t, 0));
4402 }
4403 else if (GET_CODE (t) == ZERO_EXTEND
4404 && (GET_CODE (XEXP (t, 0)) == PLUS
4405 || GET_CODE (XEXP (t, 0)) == MINUS
4406 || GET_CODE (XEXP (t, 0)) == IOR
4407 || GET_CODE (XEXP (t, 0)) == XOR
4408 || GET_CODE (XEXP (t, 0)) == ASHIFT
4409 || GET_CODE (XEXP (t, 0)) == LSHIFTRT
4410 || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
4411 && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
4412 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4413 && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
4414 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
4415 && ((nonzero_bits (f, GET_MODE (f))
4416 & ~ GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 0))))
4417 == 0))
4418 {
4419 c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
4420 extend_op = ZERO_EXTEND;
4421 m = GET_MODE (XEXP (t, 0));
4422 }
4423 else if (GET_CODE (t) == ZERO_EXTEND
4424 && (GET_CODE (XEXP (t, 0)) == PLUS
4425 || GET_CODE (XEXP (t, 0)) == IOR
4426 || GET_CODE (XEXP (t, 0)) == XOR)
4427 && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
4428 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4429 && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
4430 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
4431 && ((nonzero_bits (f, GET_MODE (f))
4432 & ~ GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 1))))
4433 == 0))
4434 {
4435 c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
4436 extend_op = ZERO_EXTEND;
4437 m = GET_MODE (XEXP (t, 0));
4438 }
4439
4440 if (z)
4441 {
4442 temp = subst (gen_binary (true_code, m, cond_op0, cond_op1),
4443 pc_rtx, pc_rtx, 0, 0);
4444 temp = gen_binary (MULT, m, temp,
4445 gen_binary (MULT, m, c1, const_true_rtx));
4446 temp = subst (temp, pc_rtx, pc_rtx, 0, 0);
4447 temp = gen_binary (op, m, gen_lowpart_for_combine (m, z), temp);
4448
4449 if (extend_op != NIL)
4450 temp = gen_unary (extend_op, mode, m, temp);
4451
4452 return temp;
4453 }
4454 }
4455
4456 /* If we have (if_then_else (ne A 0) C1 0) and either A is known to be 0 or
4457 1 and C1 is a single bit or A is known to be 0 or -1 and C1 is the
4458 negation of a single bit, we can convert this operation to a shift. We
4459 can actually do this more generally, but it doesn't seem worth it. */
4460
4461 if (true_code == NE && XEXP (cond, 1) == const0_rtx
4462 && false == const0_rtx && GET_CODE (true) == CONST_INT
4463 && ((1 == nonzero_bits (XEXP (cond, 0), mode)
4464 && (i = exact_log2 (INTVAL (true))) >= 0)
4465 || ((num_sign_bit_copies (XEXP (cond, 0), mode)
4466 == GET_MODE_BITSIZE (mode))
4467 && (i = exact_log2 (- INTVAL (true))) >= 0)))
4468 return
4469 simplify_shift_const (NULL_RTX, ASHIFT, mode,
4470 gen_lowpart_for_combine (mode, XEXP (cond, 0)), i);
4471
4472 return x;
4473 }
4474 \f
4475 /* Simplify X, a SET expression. Return the new expression. */
4476
4477 static rtx
4478 simplify_set (x)
4479 rtx x;
4480 {
4481 rtx src = SET_SRC (x);
4482 rtx dest = SET_DEST (x);
4483 enum machine_mode mode
4484 = GET_MODE (src) != VOIDmode ? GET_MODE (src) : GET_MODE (dest);
4485 rtx other_insn;
4486 rtx *cc_use;
4487
4488 /* (set (pc) (return)) gets written as (return). */
4489 if (GET_CODE (dest) == PC && GET_CODE (src) == RETURN)
4490 return src;
4491
4492 /* Now that we know for sure which bits of SRC we are using, see if we can
4493 simplify the expression for the object knowing that we only need the
4494 low-order bits. */
4495
4496 if (GET_MODE_CLASS (mode) == MODE_INT)
4497 src = force_to_mode (src, mode, GET_MODE_MASK (mode), NULL_RTX, 0);
4498
4499 /* If we are setting CC0 or if the source is a COMPARE, look for the use of
4500 the comparison result and try to simplify it unless we already have used
4501 undobuf.other_insn. */
4502 if ((GET_CODE (src) == COMPARE
4503 #ifdef HAVE_cc0
4504 || dest == cc0_rtx
4505 #endif
4506 )
4507 && (cc_use = find_single_use (dest, subst_insn, &other_insn)) != 0
4508 && (undobuf.other_insn == 0 || other_insn == undobuf.other_insn)
4509 && GET_RTX_CLASS (GET_CODE (*cc_use)) == '<'
4510 && rtx_equal_p (XEXP (*cc_use, 0), dest))
4511 {
4512 enum rtx_code old_code = GET_CODE (*cc_use);
4513 enum rtx_code new_code;
4514 rtx op0, op1;
4515 int other_changed = 0;
4516 enum machine_mode compare_mode = GET_MODE (dest);
4517
4518 if (GET_CODE (src) == COMPARE)
4519 op0 = XEXP (src, 0), op1 = XEXP (src, 1);
4520 else
4521 op0 = src, op1 = const0_rtx;
4522
4523 /* Simplify our comparison, if possible. */
4524 new_code = simplify_comparison (old_code, &op0, &op1);
4525
4526 #ifdef EXTRA_CC_MODES
4527 /* If this machine has CC modes other than CCmode, check to see if we
4528 need to use a different CC mode here. */
4529 compare_mode = SELECT_CC_MODE (new_code, op0, op1);
4530 #endif /* EXTRA_CC_MODES */
4531
4532 #if !defined (HAVE_cc0) && defined (EXTRA_CC_MODES)
4533 /* If the mode changed, we have to change SET_DEST, the mode in the
4534 compare, and the mode in the place SET_DEST is used. If SET_DEST is
4535 a hard register, just build new versions with the proper mode. If it
4536 is a pseudo, we lose unless it is only time we set the pseudo, in
4537 which case we can safely change its mode. */
4538 if (compare_mode != GET_MODE (dest))
4539 {
4540 int regno = REGNO (dest);
4541 rtx new_dest = gen_rtx (REG, compare_mode, regno);
4542
4543 if (regno < FIRST_PSEUDO_REGISTER
4544 || (REG_N_SETS (regno) == 1 && ! REG_USERVAR_P (dest)))
4545 {
4546 if (regno >= FIRST_PSEUDO_REGISTER)
4547 SUBST (regno_reg_rtx[regno], new_dest);
4548
4549 SUBST (SET_DEST (x), new_dest);
4550 SUBST (XEXP (*cc_use, 0), new_dest);
4551 other_changed = 1;
4552
4553 dest = new_dest;
4554 }
4555 }
4556 #endif
4557
4558 /* If the code changed, we have to build a new comparison in
4559 undobuf.other_insn. */
4560 if (new_code != old_code)
4561 {
4562 unsigned HOST_WIDE_INT mask;
4563
4564 SUBST (*cc_use, gen_rtx_combine (new_code, GET_MODE (*cc_use),
4565 dest, const0_rtx));
4566
4567 /* If the only change we made was to change an EQ into an NE or
4568 vice versa, OP0 has only one bit that might be nonzero, and OP1
4569 is zero, check if changing the user of the condition code will
4570 produce a valid insn. If it won't, we can keep the original code
4571 in that insn by surrounding our operation with an XOR. */
4572
4573 if (((old_code == NE && new_code == EQ)
4574 || (old_code == EQ && new_code == NE))
4575 && ! other_changed && op1 == const0_rtx
4576 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
4577 && exact_log2 (mask = nonzero_bits (op0, GET_MODE (op0))) >= 0)
4578 {
4579 rtx pat = PATTERN (other_insn), note = 0;
4580 int scratches;
4581
4582 if ((recog_for_combine (&pat, other_insn, &note, &scratches) < 0
4583 && ! check_asm_operands (pat)))
4584 {
4585 PUT_CODE (*cc_use, old_code);
4586 other_insn = 0;
4587
4588 op0 = gen_binary (XOR, GET_MODE (op0), op0, GEN_INT (mask));
4589 }
4590 }
4591
4592 other_changed = 1;
4593 }
4594
4595 if (other_changed)
4596 undobuf.other_insn = other_insn;
4597
4598 #ifdef HAVE_cc0
4599 /* If we are now comparing against zero, change our source if
4600 needed. If we do not use cc0, we always have a COMPARE. */
4601 if (op1 == const0_rtx && dest == cc0_rtx)
4602 {
4603 SUBST (SET_SRC (x), op0);
4604 src = op0;
4605 }
4606 else
4607 #endif
4608
4609 /* Otherwise, if we didn't previously have a COMPARE in the
4610 correct mode, we need one. */
4611 if (GET_CODE (src) != COMPARE || GET_MODE (src) != compare_mode)
4612 {
4613 SUBST (SET_SRC (x),
4614 gen_rtx_combine (COMPARE, compare_mode, op0, op1));
4615 src = SET_SRC (x);
4616 }
4617 else
4618 {
4619 /* Otherwise, update the COMPARE if needed. */
4620 SUBST (XEXP (src, 0), op0);
4621 SUBST (XEXP (src, 1), op1);
4622 }
4623 }
4624 else
4625 {
4626 /* Get SET_SRC in a form where we have placed back any
4627 compound expressions. Then do the checks below. */
4628 src = make_compound_operation (src, SET);
4629 SUBST (SET_SRC (x), src);
4630 }
4631
4632 /* If we have (set x (subreg:m1 (op:m2 ...) 0)) with OP being some operation,
4633 and X being a REG or (subreg (reg)), we may be able to convert this to
4634 (set (subreg:m2 x) (op)).
4635
4636 We can always do this if M1 is narrower than M2 because that means that
4637 we only care about the low bits of the result.
4638
4639 However, on machines without WORD_REGISTER_OPERATIONS defined, we cannot
4640 perform a narrower operation that requested since the high-order bits will
4641 be undefined. On machine where it is defined, this transformation is safe
4642 as long as M1 and M2 have the same number of words. */
4643
4644 if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
4645 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (src))) != 'o'
4646 && (((GET_MODE_SIZE (GET_MODE (src)) + (UNITS_PER_WORD - 1))
4647 / UNITS_PER_WORD)
4648 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
4649 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
4650 #ifndef WORD_REGISTER_OPERATIONS
4651 && (GET_MODE_SIZE (GET_MODE (src))
4652 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
4653 #endif
4654 #ifdef CLASS_CANNOT_CHANGE_SIZE
4655 && ! (GET_CODE (dest) == REG && REGNO (dest) < FIRST_PSEUDO_REGISTER
4656 && (TEST_HARD_REG_BIT
4657 (reg_class_contents[(int) CLASS_CANNOT_CHANGE_SIZE],
4658 REGNO (dest)))
4659 && (GET_MODE_SIZE (GET_MODE (src))
4660 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))))
4661 #endif
4662 && (GET_CODE (dest) == REG
4663 || (GET_CODE (dest) == SUBREG
4664 && GET_CODE (SUBREG_REG (dest)) == REG)))
4665 {
4666 SUBST (SET_DEST (x),
4667 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (src)),
4668 dest));
4669 SUBST (SET_SRC (x), SUBREG_REG (src));
4670
4671 src = SET_SRC (x), dest = SET_DEST (x);
4672 }
4673
4674 #ifdef LOAD_EXTEND_OP
4675 /* If we have (set FOO (subreg:M (mem:N BAR) 0)) with M wider than N, this
4676 would require a paradoxical subreg. Replace the subreg with a
4677 zero_extend to avoid the reload that would otherwise be required. */
4678
4679 if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
4680 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))) != NIL
4681 && SUBREG_WORD (src) == 0
4682 && (GET_MODE_SIZE (GET_MODE (src))
4683 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
4684 && GET_CODE (SUBREG_REG (src)) == MEM)
4685 {
4686 SUBST (SET_SRC (x),
4687 gen_rtx_combine (LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))),
4688 GET_MODE (src), XEXP (src, 0)));
4689
4690 src = SET_SRC (x);
4691 }
4692 #endif
4693
4694 /* If we don't have a conditional move, SET_SRC is an IF_THEN_ELSE, and we
4695 are comparing an item known to be 0 or -1 against 0, use a logical
4696 operation instead. Check for one of the arms being an IOR of the other
4697 arm with some value. We compute three terms to be IOR'ed together. In
4698 practice, at most two will be nonzero. Then we do the IOR's. */
4699
4700 if (GET_CODE (dest) != PC
4701 && GET_CODE (src) == IF_THEN_ELSE
4702 && GET_MODE_CLASS (GET_MODE (src)) == MODE_INT
4703 && (GET_CODE (XEXP (src, 0)) == EQ || GET_CODE (XEXP (src, 0)) == NE)
4704 && XEXP (XEXP (src, 0), 1) == const0_rtx
4705 && GET_MODE (src) == GET_MODE (XEXP (XEXP (src, 0), 0))
4706 #ifdef HAVE_conditional_move
4707 && ! can_conditionally_move_p (GET_MODE (src))
4708 #endif
4709 && (num_sign_bit_copies (XEXP (XEXP (src, 0), 0),
4710 GET_MODE (XEXP (XEXP (src, 0), 0)))
4711 == GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (src, 0), 0))))
4712 && ! side_effects_p (src))
4713 {
4714 rtx true = (GET_CODE (XEXP (src, 0)) == NE
4715 ? XEXP (src, 1) : XEXP (src, 2));
4716 rtx false = (GET_CODE (XEXP (src, 0)) == NE
4717 ? XEXP (src, 2) : XEXP (src, 1));
4718 rtx term1 = const0_rtx, term2, term3;
4719
4720 if (GET_CODE (true) == IOR && rtx_equal_p (XEXP (true, 0), false))
4721 term1 = false, true = XEXP (true, 1), false = const0_rtx;
4722 else if (GET_CODE (true) == IOR
4723 && rtx_equal_p (XEXP (true, 1), false))
4724 term1 = false, true = XEXP (true, 0), false = const0_rtx;
4725 else if (GET_CODE (false) == IOR
4726 && rtx_equal_p (XEXP (false, 0), true))
4727 term1 = true, false = XEXP (false, 1), true = const0_rtx;
4728 else if (GET_CODE (false) == IOR
4729 && rtx_equal_p (XEXP (false, 1), true))
4730 term1 = true, false = XEXP (false, 0), true = const0_rtx;
4731
4732 term2 = gen_binary (AND, GET_MODE (src), XEXP (XEXP (src, 0), 0), true);
4733 term3 = gen_binary (AND, GET_MODE (src),
4734 gen_unary (NOT, GET_MODE (src), GET_MODE (src),
4735 XEXP (XEXP (src, 0), 0)),
4736 false);
4737
4738 SUBST (SET_SRC (x),
4739 gen_binary (IOR, GET_MODE (src),
4740 gen_binary (IOR, GET_MODE (src), term1, term2),
4741 term3));
4742
4743 src = SET_SRC (x);
4744 }
4745
4746 /* If either SRC or DEST is a CLOBBER of (const_int 0), make this
4747 whole thing fail. */
4748 if (GET_CODE (src) == CLOBBER && XEXP (src, 0) == const0_rtx)
4749 return src;
4750 else if (GET_CODE (dest) == CLOBBER && XEXP (dest, 0) == const0_rtx)
4751 return dest;
4752 else
4753 /* Convert this into a field assignment operation, if possible. */
4754 return make_field_assignment (x);
4755 }
4756 \f
4757 /* Simplify, X, and AND, IOR, or XOR operation, and return the simplified
4758 result. LAST is nonzero if this is the last retry. */
4759
4760 static rtx
4761 simplify_logical (x, last)
4762 rtx x;
4763 int last;
4764 {
4765 enum machine_mode mode = GET_MODE (x);
4766 rtx op0 = XEXP (x, 0);
4767 rtx op1 = XEXP (x, 1);
4768
4769 switch (GET_CODE (x))
4770 {
4771 case AND:
4772 /* Convert (A ^ B) & A to A & (~ B) since the latter is often a single
4773 insn (and may simplify more). */
4774 if (GET_CODE (op0) == XOR
4775 && rtx_equal_p (XEXP (op0, 0), op1)
4776 && ! side_effects_p (op1))
4777 x = gen_binary (AND, mode,
4778 gen_unary (NOT, mode, mode, XEXP (op0, 1)), op1);
4779
4780 if (GET_CODE (op0) == XOR
4781 && rtx_equal_p (XEXP (op0, 1), op1)
4782 && ! side_effects_p (op1))
4783 x = gen_binary (AND, mode,
4784 gen_unary (NOT, mode, mode, XEXP (op0, 0)), op1);
4785
4786 /* Similarly for (~ (A ^ B)) & A. */
4787 if (GET_CODE (op0) == NOT
4788 && GET_CODE (XEXP (op0, 0)) == XOR
4789 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), op1)
4790 && ! side_effects_p (op1))
4791 x = gen_binary (AND, mode, XEXP (XEXP (op0, 0), 1), op1);
4792
4793 if (GET_CODE (op0) == NOT
4794 && GET_CODE (XEXP (op0, 0)) == XOR
4795 && rtx_equal_p (XEXP (XEXP (op0, 0), 1), op1)
4796 && ! side_effects_p (op1))
4797 x = gen_binary (AND, mode, XEXP (XEXP (op0, 0), 0), op1);
4798
4799 if (GET_CODE (op1) == CONST_INT)
4800 {
4801 x = simplify_and_const_int (x, mode, op0, INTVAL (op1));
4802
4803 /* If we have (ior (and (X C1) C2)) and the next restart would be
4804 the last, simplify this by making C1 as small as possible
4805 and then exit. */
4806 if (last
4807 && GET_CODE (x) == IOR && GET_CODE (op0) == AND
4808 && GET_CODE (XEXP (op0, 1)) == CONST_INT
4809 && GET_CODE (op1) == CONST_INT)
4810 return gen_binary (IOR, mode,
4811 gen_binary (AND, mode, XEXP (op0, 0),
4812 GEN_INT (INTVAL (XEXP (op0, 1))
4813 & ~ INTVAL (op1))), op1);
4814
4815 if (GET_CODE (x) != AND)
4816 return x;
4817
4818 if (GET_RTX_CLASS (GET_CODE (x)) == 'c'
4819 || GET_RTX_CLASS (GET_CODE (x)) == '2')
4820 op0 = XEXP (x, 0), op1 = XEXP (x, 1);
4821 }
4822
4823 /* Convert (A | B) & A to A. */
4824 if (GET_CODE (op0) == IOR
4825 && (rtx_equal_p (XEXP (op0, 0), op1)
4826 || rtx_equal_p (XEXP (op0, 1), op1))
4827 && ! side_effects_p (XEXP (op0, 0))
4828 && ! side_effects_p (XEXP (op0, 1)))
4829 return op1;
4830
4831 /* In the following group of tests (and those in case IOR below),
4832 we start with some combination of logical operations and apply
4833 the distributive law followed by the inverse distributive law.
4834 Most of the time, this results in no change. However, if some of
4835 the operands are the same or inverses of each other, simplifications
4836 will result.
4837
4838 For example, (and (ior A B) (not B)) can occur as the result of
4839 expanding a bit field assignment. When we apply the distributive
4840 law to this, we get (ior (and (A (not B))) (and (B (not B)))),
4841 which then simplifies to (and (A (not B))).
4842
4843 If we have (and (ior A B) C), apply the distributive law and then
4844 the inverse distributive law to see if things simplify. */
4845
4846 if (GET_CODE (op0) == IOR || GET_CODE (op0) == XOR)
4847 {
4848 x = apply_distributive_law
4849 (gen_binary (GET_CODE (op0), mode,
4850 gen_binary (AND, mode, XEXP (op0, 0), op1),
4851 gen_binary (AND, mode, XEXP (op0, 1), op1)));
4852 if (GET_CODE (x) != AND)
4853 return x;
4854 }
4855
4856 if (GET_CODE (op1) == IOR || GET_CODE (op1) == XOR)
4857 return apply_distributive_law
4858 (gen_binary (GET_CODE (op1), mode,
4859 gen_binary (AND, mode, XEXP (op1, 0), op0),
4860 gen_binary (AND, mode, XEXP (op1, 1), op0)));
4861
4862 /* Similarly, taking advantage of the fact that
4863 (and (not A) (xor B C)) == (xor (ior A B) (ior A C)) */
4864
4865 if (GET_CODE (op0) == NOT && GET_CODE (op1) == XOR)
4866 return apply_distributive_law
4867 (gen_binary (XOR, mode,
4868 gen_binary (IOR, mode, XEXP (op0, 0), XEXP (op1, 0)),
4869 gen_binary (IOR, mode, XEXP (op0, 0), XEXP (op1, 1))));
4870
4871 else if (GET_CODE (op1) == NOT && GET_CODE (op0) == XOR)
4872 return apply_distributive_law
4873 (gen_binary (XOR, mode,
4874 gen_binary (IOR, mode, XEXP (op1, 0), XEXP (op0, 0)),
4875 gen_binary (IOR, mode, XEXP (op1, 0), XEXP (op0, 1))));
4876 break;
4877
4878 case IOR:
4879 /* (ior A C) is C if all bits of A that might be nonzero are on in C. */
4880 if (GET_CODE (op1) == CONST_INT
4881 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4882 && (nonzero_bits (op0, mode) & ~ INTVAL (op1)) == 0)
4883 return op1;
4884
4885 /* Convert (A & B) | A to A. */
4886 if (GET_CODE (op0) == AND
4887 && (rtx_equal_p (XEXP (op0, 0), op1)
4888 || rtx_equal_p (XEXP (op0, 1), op1))
4889 && ! side_effects_p (XEXP (op0, 0))
4890 && ! side_effects_p (XEXP (op0, 1)))
4891 return op1;
4892
4893 /* If we have (ior (and A B) C), apply the distributive law and then
4894 the inverse distributive law to see if things simplify. */
4895
4896 if (GET_CODE (op0) == AND)
4897 {
4898 x = apply_distributive_law
4899 (gen_binary (AND, mode,
4900 gen_binary (IOR, mode, XEXP (op0, 0), op1),
4901 gen_binary (IOR, mode, XEXP (op0, 1), op1)));
4902
4903 if (GET_CODE (x) != IOR)
4904 return x;
4905 }
4906
4907 if (GET_CODE (op1) == AND)
4908 {
4909 x = apply_distributive_law
4910 (gen_binary (AND, mode,
4911 gen_binary (IOR, mode, XEXP (op1, 0), op0),
4912 gen_binary (IOR, mode, XEXP (op1, 1), op0)));
4913
4914 if (GET_CODE (x) != IOR)
4915 return x;
4916 }
4917
4918 /* Convert (ior (ashift A CX) (lshiftrt A CY)) where CX+CY equals the
4919 mode size to (rotate A CX). */
4920
4921 if (((GET_CODE (op0) == ASHIFT && GET_CODE (op1) == LSHIFTRT)
4922 || (GET_CODE (op1) == ASHIFT && GET_CODE (op0) == LSHIFTRT))
4923 && rtx_equal_p (XEXP (op0, 0), XEXP (op1, 0))
4924 && GET_CODE (XEXP (op0, 1)) == CONST_INT
4925 && GET_CODE (XEXP (op1, 1)) == CONST_INT
4926 && (INTVAL (XEXP (op0, 1)) + INTVAL (XEXP (op1, 1))
4927 == GET_MODE_BITSIZE (mode)))
4928 return gen_rtx (ROTATE, mode, XEXP (op0, 0),
4929 (GET_CODE (op0) == ASHIFT
4930 ? XEXP (op0, 1) : XEXP (op1, 1)));
4931
4932 /* If OP0 is (ashiftrt (plus ...) C), it might actually be
4933 a (sign_extend (plus ...)). If so, OP1 is a CONST_INT, and the PLUS
4934 does not affect any of the bits in OP1, it can really be done
4935 as a PLUS and we can associate. We do this by seeing if OP1
4936 can be safely shifted left C bits. */
4937 if (GET_CODE (op1) == CONST_INT && GET_CODE (op0) == ASHIFTRT
4938 && GET_CODE (XEXP (op0, 0)) == PLUS
4939 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
4940 && GET_CODE (XEXP (op0, 1)) == CONST_INT
4941 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT)
4942 {
4943 int count = INTVAL (XEXP (op0, 1));
4944 HOST_WIDE_INT mask = INTVAL (op1) << count;
4945
4946 if (mask >> count == INTVAL (op1)
4947 && (mask & nonzero_bits (XEXP (op0, 0), mode)) == 0)
4948 {
4949 SUBST (XEXP (XEXP (op0, 0), 1),
4950 GEN_INT (INTVAL (XEXP (XEXP (op0, 0), 1)) | mask));
4951 return op0;
4952 }
4953 }
4954 break;
4955
4956 case XOR:
4957 /* Convert (XOR (NOT x) (NOT y)) to (XOR x y).
4958 Also convert (XOR (NOT x) y) to (NOT (XOR x y)), similarly for
4959 (NOT y). */
4960 {
4961 int num_negated = 0;
4962
4963 if (GET_CODE (op0) == NOT)
4964 num_negated++, op0 = XEXP (op0, 0);
4965 if (GET_CODE (op1) == NOT)
4966 num_negated++, op1 = XEXP (op1, 0);
4967
4968 if (num_negated == 2)
4969 {
4970 SUBST (XEXP (x, 0), op0);
4971 SUBST (XEXP (x, 1), op1);
4972 }
4973 else if (num_negated == 1)
4974 return gen_unary (NOT, mode, mode, gen_binary (XOR, mode, op0, op1));
4975 }
4976
4977 /* Convert (xor (and A B) B) to (and (not A) B). The latter may
4978 correspond to a machine insn or result in further simplifications
4979 if B is a constant. */
4980
4981 if (GET_CODE (op0) == AND
4982 && rtx_equal_p (XEXP (op0, 1), op1)
4983 && ! side_effects_p (op1))
4984 return gen_binary (AND, mode,
4985 gen_unary (NOT, mode, mode, XEXP (op0, 0)),
4986 op1);
4987
4988 else if (GET_CODE (op0) == AND
4989 && rtx_equal_p (XEXP (op0, 0), op1)
4990 && ! side_effects_p (op1))
4991 return gen_binary (AND, mode,
4992 gen_unary (NOT, mode, mode, XEXP (op0, 1)),
4993 op1);
4994
4995 /* (xor (comparison foo bar) (const_int 1)) can become the reversed
4996 comparison if STORE_FLAG_VALUE is 1. */
4997 if (STORE_FLAG_VALUE == 1
4998 && op1 == const1_rtx
4999 && GET_RTX_CLASS (GET_CODE (op0)) == '<'
5000 && reversible_comparison_p (op0))
5001 return gen_rtx_combine (reverse_condition (GET_CODE (op0)),
5002 mode, XEXP (op0, 0), XEXP (op0, 1));
5003
5004 /* (lshiftrt foo C) where C is the number of bits in FOO minus 1
5005 is (lt foo (const_int 0)), so we can perform the above
5006 simplification if STORE_FLAG_VALUE is 1. */
5007
5008 if (STORE_FLAG_VALUE == 1
5009 && op1 == const1_rtx
5010 && GET_CODE (op0) == LSHIFTRT
5011 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5012 && INTVAL (XEXP (op0, 1)) == GET_MODE_BITSIZE (mode) - 1)
5013 return gen_rtx_combine (GE, mode, XEXP (op0, 0), const0_rtx);
5014
5015 /* (xor (comparison foo bar) (const_int sign-bit))
5016 when STORE_FLAG_VALUE is the sign bit. */
5017 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5018 && ((STORE_FLAG_VALUE & GET_MODE_MASK (mode))
5019 == (HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
5020 && op1 == const_true_rtx
5021 && GET_RTX_CLASS (GET_CODE (op0)) == '<'
5022 && reversible_comparison_p (op0))
5023 return gen_rtx_combine (reverse_condition (GET_CODE (op0)),
5024 mode, XEXP (op0, 0), XEXP (op0, 1));
5025 break;
5026
5027 default:
5028 abort ();
5029 }
5030
5031 return x;
5032 }
5033 \f
5034 /* We consider ZERO_EXTRACT, SIGN_EXTRACT, and SIGN_EXTEND as "compound
5035 operations" because they can be replaced with two more basic operations.
5036 ZERO_EXTEND is also considered "compound" because it can be replaced with
5037 an AND operation, which is simpler, though only one operation.
5038
5039 The function expand_compound_operation is called with an rtx expression
5040 and will convert it to the appropriate shifts and AND operations,
5041 simplifying at each stage.
5042
5043 The function make_compound_operation is called to convert an expression
5044 consisting of shifts and ANDs into the equivalent compound expression.
5045 It is the inverse of this function, loosely speaking. */
5046
5047 static rtx
5048 expand_compound_operation (x)
5049 rtx x;
5050 {
5051 int pos = 0, len;
5052 int unsignedp = 0;
5053 int modewidth;
5054 rtx tem;
5055
5056 switch (GET_CODE (x))
5057 {
5058 case ZERO_EXTEND:
5059 unsignedp = 1;
5060 case SIGN_EXTEND:
5061 /* We can't necessarily use a const_int for a multiword mode;
5062 it depends on implicitly extending the value.
5063 Since we don't know the right way to extend it,
5064 we can't tell whether the implicit way is right.
5065
5066 Even for a mode that is no wider than a const_int,
5067 we can't win, because we need to sign extend one of its bits through
5068 the rest of it, and we don't know which bit. */
5069 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
5070 return x;
5071
5072 /* Return if (subreg:MODE FROM 0) is not a safe replacement for
5073 (zero_extend:MODE FROM) or (sign_extend:MODE FROM). It is for any MEM
5074 because (SUBREG (MEM...)) is guaranteed to cause the MEM to be
5075 reloaded. If not for that, MEM's would very rarely be safe.
5076
5077 Reject MODEs bigger than a word, because we might not be able
5078 to reference a two-register group starting with an arbitrary register
5079 (and currently gen_lowpart might crash for a SUBREG). */
5080
5081 if (GET_MODE_SIZE (GET_MODE (XEXP (x, 0))) > UNITS_PER_WORD)
5082 return x;
5083
5084 len = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)));
5085 /* If the inner object has VOIDmode (the only way this can happen
5086 is if it is a ASM_OPERANDS), we can't do anything since we don't
5087 know how much masking to do. */
5088 if (len == 0)
5089 return x;
5090
5091 break;
5092
5093 case ZERO_EXTRACT:
5094 unsignedp = 1;
5095 case SIGN_EXTRACT:
5096 /* If the operand is a CLOBBER, just return it. */
5097 if (GET_CODE (XEXP (x, 0)) == CLOBBER)
5098 return XEXP (x, 0);
5099
5100 if (GET_CODE (XEXP (x, 1)) != CONST_INT
5101 || GET_CODE (XEXP (x, 2)) != CONST_INT
5102 || GET_MODE (XEXP (x, 0)) == VOIDmode)
5103 return x;
5104
5105 len = INTVAL (XEXP (x, 1));
5106 pos = INTVAL (XEXP (x, 2));
5107
5108 /* If this goes outside the object being extracted, replace the object
5109 with a (use (mem ...)) construct that only combine understands
5110 and is used only for this purpose. */
5111 if (len + pos > GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
5112 SUBST (XEXP (x, 0), gen_rtx (USE, GET_MODE (x), XEXP (x, 0)));
5113
5114 if (BITS_BIG_ENDIAN)
5115 pos = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - len - pos;
5116
5117 break;
5118
5119 default:
5120 return x;
5121 }
5122
5123 /* We can optimize some special cases of ZERO_EXTEND. */
5124 if (GET_CODE (x) == ZERO_EXTEND)
5125 {
5126 /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI if we
5127 know that the last value didn't have any inappropriate bits
5128 set. */
5129 if (GET_CODE (XEXP (x, 0)) == TRUNCATE
5130 && GET_MODE (XEXP (XEXP (x, 0), 0)) == GET_MODE (x)
5131 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
5132 && (nonzero_bits (XEXP (XEXP (x, 0), 0), GET_MODE (x))
5133 & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5134 return XEXP (XEXP (x, 0), 0);
5135
5136 /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)). */
5137 if (GET_CODE (XEXP (x, 0)) == SUBREG
5138 && GET_MODE (SUBREG_REG (XEXP (x, 0))) == GET_MODE (x)
5139 && subreg_lowpart_p (XEXP (x, 0))
5140 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
5141 && (nonzero_bits (SUBREG_REG (XEXP (x, 0)), GET_MODE (x))
5142 & ~ GET_MODE_MASK (GET_MODE (SUBREG_REG (x)))) == 0)
5143 return SUBREG_REG (XEXP (x, 0));
5144
5145 /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI when foo
5146 is a comparison and STORE_FLAG_VALUE permits. This is like
5147 the first case, but it works even when GET_MODE (x) is larger
5148 than HOST_WIDE_INT. */
5149 if (GET_CODE (XEXP (x, 0)) == TRUNCATE
5150 && GET_MODE (XEXP (XEXP (x, 0), 0)) == GET_MODE (x)
5151 && GET_RTX_CLASS (GET_CODE (XEXP (XEXP (x, 0), 0))) == '<'
5152 && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
5153 <= HOST_BITS_PER_WIDE_INT)
5154 && ((HOST_WIDE_INT) STORE_FLAG_VALUE
5155 & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5156 return XEXP (XEXP (x, 0), 0);
5157
5158 /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)). */
5159 if (GET_CODE (XEXP (x, 0)) == SUBREG
5160 && GET_MODE (SUBREG_REG (XEXP (x, 0))) == GET_MODE (x)
5161 && subreg_lowpart_p (XEXP (x, 0))
5162 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0)))) == '<'
5163 && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
5164 <= HOST_BITS_PER_WIDE_INT)
5165 && ((HOST_WIDE_INT) STORE_FLAG_VALUE
5166 & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5167 return SUBREG_REG (XEXP (x, 0));
5168
5169 /* If sign extension is cheaper than zero extension, then use it
5170 if we know that no extraneous bits are set, and that the high
5171 bit is not set. */
5172 if (flag_expensive_optimizations
5173 && ((GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
5174 && ((nonzero_bits (XEXP (x, 0), GET_MODE (x))
5175 & ~ (((unsigned HOST_WIDE_INT)
5176 GET_MODE_MASK (GET_MODE (XEXP (x, 0))))
5177 >> 1))
5178 == 0))
5179 || (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
5180 && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
5181 <= HOST_BITS_PER_WIDE_INT)
5182 && (((HOST_WIDE_INT) STORE_FLAG_VALUE
5183 & ~ (((unsigned HOST_WIDE_INT)
5184 GET_MODE_MASK (GET_MODE (XEXP (x, 0))))
5185 >> 1))
5186 == 0))))
5187 {
5188 rtx temp = gen_rtx (SIGN_EXTEND, GET_MODE (x), XEXP (x, 0));
5189
5190 if (rtx_cost (temp, SET) < rtx_cost (x, SET))
5191 return expand_compound_operation (temp);
5192 }
5193 }
5194
5195 /* If we reach here, we want to return a pair of shifts. The inner
5196 shift is a left shift of BITSIZE - POS - LEN bits. The outer
5197 shift is a right shift of BITSIZE - LEN bits. It is arithmetic or
5198 logical depending on the value of UNSIGNEDP.
5199
5200 If this was a ZERO_EXTEND or ZERO_EXTRACT, this pair of shifts will be
5201 converted into an AND of a shift.
5202
5203 We must check for the case where the left shift would have a negative
5204 count. This can happen in a case like (x >> 31) & 255 on machines
5205 that can't shift by a constant. On those machines, we would first
5206 combine the shift with the AND to produce a variable-position
5207 extraction. Then the constant of 31 would be substituted in to produce
5208 a such a position. */
5209
5210 modewidth = GET_MODE_BITSIZE (GET_MODE (x));
5211 if (modewidth >= pos - len)
5212 tem = simplify_shift_const (NULL_RTX, unsignedp ? LSHIFTRT : ASHIFTRT,
5213 GET_MODE (x),
5214 simplify_shift_const (NULL_RTX, ASHIFT,
5215 GET_MODE (x),
5216 XEXP (x, 0),
5217 modewidth - pos - len),
5218 modewidth - len);
5219
5220 else if (unsignedp && len < HOST_BITS_PER_WIDE_INT)
5221 tem = simplify_and_const_int (NULL_RTX, GET_MODE (x),
5222 simplify_shift_const (NULL_RTX, LSHIFTRT,
5223 GET_MODE (x),
5224 XEXP (x, 0), pos),
5225 ((HOST_WIDE_INT) 1 << len) - 1);
5226 else
5227 /* Any other cases we can't handle. */
5228 return x;
5229
5230
5231 /* If we couldn't do this for some reason, return the original
5232 expression. */
5233 if (GET_CODE (tem) == CLOBBER)
5234 return x;
5235
5236 return tem;
5237 }
5238 \f
5239 /* X is a SET which contains an assignment of one object into
5240 a part of another (such as a bit-field assignment, STRICT_LOW_PART,
5241 or certain SUBREGS). If possible, convert it into a series of
5242 logical operations.
5243
5244 We half-heartedly support variable positions, but do not at all
5245 support variable lengths. */
5246
5247 static rtx
5248 expand_field_assignment (x)
5249 rtx x;
5250 {
5251 rtx inner;
5252 rtx pos; /* Always counts from low bit. */
5253 int len;
5254 rtx mask;
5255 enum machine_mode compute_mode;
5256
5257 /* Loop until we find something we can't simplify. */
5258 while (1)
5259 {
5260 if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
5261 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG)
5262 {
5263 inner = SUBREG_REG (XEXP (SET_DEST (x), 0));
5264 len = GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)));
5265 pos = GEN_INT (BITS_PER_WORD * SUBREG_WORD (XEXP (SET_DEST (x), 0)));
5266 }
5267 else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
5268 && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT)
5269 {
5270 inner = XEXP (SET_DEST (x), 0);
5271 len = INTVAL (XEXP (SET_DEST (x), 1));
5272 pos = XEXP (SET_DEST (x), 2);
5273
5274 /* If the position is constant and spans the width of INNER,
5275 surround INNER with a USE to indicate this. */
5276 if (GET_CODE (pos) == CONST_INT
5277 && INTVAL (pos) + len > GET_MODE_BITSIZE (GET_MODE (inner)))
5278 inner = gen_rtx (USE, GET_MODE (SET_DEST (x)), inner);
5279
5280 if (BITS_BIG_ENDIAN)
5281 {
5282 if (GET_CODE (pos) == CONST_INT)
5283 pos = GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner)) - len
5284 - INTVAL (pos));
5285 else if (GET_CODE (pos) == MINUS
5286 && GET_CODE (XEXP (pos, 1)) == CONST_INT
5287 && (INTVAL (XEXP (pos, 1))
5288 == GET_MODE_BITSIZE (GET_MODE (inner)) - len))
5289 /* If position is ADJUST - X, new position is X. */
5290 pos = XEXP (pos, 0);
5291 else
5292 pos = gen_binary (MINUS, GET_MODE (pos),
5293 GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner))
5294 - len),
5295 pos);
5296 }
5297 }
5298
5299 /* A SUBREG between two modes that occupy the same numbers of words
5300 can be done by moving the SUBREG to the source. */
5301 else if (GET_CODE (SET_DEST (x)) == SUBREG
5302 && (((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
5303 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
5304 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
5305 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
5306 {
5307 x = gen_rtx (SET, VOIDmode, SUBREG_REG (SET_DEST (x)),
5308 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (SET_DEST (x))),
5309 SET_SRC (x)));
5310 continue;
5311 }
5312 else
5313 break;
5314
5315 while (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
5316 inner = SUBREG_REG (inner);
5317
5318 compute_mode = GET_MODE (inner);
5319
5320 /* Compute a mask of LEN bits, if we can do this on the host machine. */
5321 if (len < HOST_BITS_PER_WIDE_INT)
5322 mask = GEN_INT (((HOST_WIDE_INT) 1 << len) - 1);
5323 else
5324 break;
5325
5326 /* Now compute the equivalent expression. Make a copy of INNER
5327 for the SET_DEST in case it is a MEM into which we will substitute;
5328 we don't want shared RTL in that case. */
5329 x = gen_rtx (SET, VOIDmode, copy_rtx (inner),
5330 gen_binary (IOR, compute_mode,
5331 gen_binary (AND, compute_mode,
5332 gen_unary (NOT, compute_mode,
5333 compute_mode,
5334 gen_binary (ASHIFT,
5335 compute_mode,
5336 mask, pos)),
5337 inner),
5338 gen_binary (ASHIFT, compute_mode,
5339 gen_binary (AND, compute_mode,
5340 gen_lowpart_for_combine
5341 (compute_mode,
5342 SET_SRC (x)),
5343 mask),
5344 pos)));
5345 }
5346
5347 return x;
5348 }
5349 \f
5350 /* Return an RTX for a reference to LEN bits of INNER. If POS_RTX is nonzero,
5351 it is an RTX that represents a variable starting position; otherwise,
5352 POS is the (constant) starting bit position (counted from the LSB).
5353
5354 INNER may be a USE. This will occur when we started with a bitfield
5355 that went outside the boundary of the object in memory, which is
5356 allowed on most machines. To isolate this case, we produce a USE
5357 whose mode is wide enough and surround the MEM with it. The only
5358 code that understands the USE is this routine. If it is not removed,
5359 it will cause the resulting insn not to match.
5360
5361 UNSIGNEDP is non-zero for an unsigned reference and zero for a
5362 signed reference.
5363
5364 IN_DEST is non-zero if this is a reference in the destination of a
5365 SET. This is used when a ZERO_ or SIGN_EXTRACT isn't needed. If non-zero,
5366 a STRICT_LOW_PART will be used, if zero, ZERO_EXTEND or SIGN_EXTEND will
5367 be used.
5368
5369 IN_COMPARE is non-zero if we are in a COMPARE. This means that a
5370 ZERO_EXTRACT should be built even for bits starting at bit 0.
5371
5372 MODE is the desired mode of the result (if IN_DEST == 0).
5373
5374 The result is an RTX for the extraction or NULL_RTX if the target
5375 can't handle it. */
5376
5377 static rtx
5378 make_extraction (mode, inner, pos, pos_rtx, len,
5379 unsignedp, in_dest, in_compare)
5380 enum machine_mode mode;
5381 rtx inner;
5382 int pos;
5383 rtx pos_rtx;
5384 int len;
5385 int unsignedp;
5386 int in_dest, in_compare;
5387 {
5388 /* This mode describes the size of the storage area
5389 to fetch the overall value from. Within that, we
5390 ignore the POS lowest bits, etc. */
5391 enum machine_mode is_mode = GET_MODE (inner);
5392 enum machine_mode inner_mode;
5393 enum machine_mode wanted_inner_mode = byte_mode;
5394 enum machine_mode wanted_inner_reg_mode = word_mode;
5395 enum machine_mode pos_mode = word_mode;
5396 enum machine_mode extraction_mode = word_mode;
5397 enum machine_mode tmode = mode_for_size (len, MODE_INT, 1);
5398 int spans_byte = 0;
5399 rtx new = 0;
5400 rtx orig_pos_rtx = pos_rtx;
5401 int orig_pos;
5402
5403 /* Get some information about INNER and get the innermost object. */
5404 if (GET_CODE (inner) == USE)
5405 /* (use:SI (mem:QI foo)) stands for (mem:SI foo). */
5406 /* We don't need to adjust the position because we set up the USE
5407 to pretend that it was a full-word object. */
5408 spans_byte = 1, inner = XEXP (inner, 0);
5409 else if (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
5410 {
5411 /* If going from (subreg:SI (mem:QI ...)) to (mem:QI ...),
5412 consider just the QI as the memory to extract from.
5413 The subreg adds or removes high bits; its mode is
5414 irrelevant to the meaning of this extraction,
5415 since POS and LEN count from the lsb. */
5416 if (GET_CODE (SUBREG_REG (inner)) == MEM)
5417 is_mode = GET_MODE (SUBREG_REG (inner));
5418 inner = SUBREG_REG (inner);
5419 }
5420
5421 inner_mode = GET_MODE (inner);
5422
5423 if (pos_rtx && GET_CODE (pos_rtx) == CONST_INT)
5424 pos = INTVAL (pos_rtx), pos_rtx = 0;
5425
5426 /* See if this can be done without an extraction. We never can if the
5427 width of the field is not the same as that of some integer mode. For
5428 registers, we can only avoid the extraction if the position is at the
5429 low-order bit and this is either not in the destination or we have the
5430 appropriate STRICT_LOW_PART operation available.
5431
5432 For MEM, we can avoid an extract if the field starts on an appropriate
5433 boundary and we can change the mode of the memory reference. However,
5434 we cannot directly access the MEM if we have a USE and the underlying
5435 MEM is not TMODE. This combination means that MEM was being used in a
5436 context where bits outside its mode were being referenced; that is only
5437 valid in bit-field insns. */
5438
5439 if (tmode != BLKmode
5440 && ! (spans_byte && inner_mode != tmode)
5441 && ((pos_rtx == 0 && (pos % BITS_PER_WORD) == 0
5442 && GET_CODE (inner) != MEM
5443 && (! in_dest
5444 || (GET_CODE (inner) == REG
5445 && (movstrict_optab->handlers[(int) tmode].insn_code
5446 != CODE_FOR_nothing))))
5447 || (GET_CODE (inner) == MEM && pos_rtx == 0
5448 && (pos
5449 % (STRICT_ALIGNMENT ? GET_MODE_ALIGNMENT (tmode)
5450 : BITS_PER_UNIT)) == 0
5451 /* We can't do this if we are widening INNER_MODE (it
5452 may not be aligned, for one thing). */
5453 && GET_MODE_BITSIZE (inner_mode) >= GET_MODE_BITSIZE (tmode)
5454 && (inner_mode == tmode
5455 || (! mode_dependent_address_p (XEXP (inner, 0))
5456 && ! MEM_VOLATILE_P (inner))))))
5457 {
5458 /* If INNER is a MEM, make a new MEM that encompasses just the desired
5459 field. If the original and current mode are the same, we need not
5460 adjust the offset. Otherwise, we do if bytes big endian.
5461
5462 If INNER is not a MEM, get a piece consisting of just the field
5463 of interest (in this case POS % BITS_PER_WORD must be 0). */
5464
5465 if (GET_CODE (inner) == MEM)
5466 {
5467 int offset;
5468 /* POS counts from lsb, but make OFFSET count in memory order. */
5469 if (BYTES_BIG_ENDIAN)
5470 offset = (GET_MODE_BITSIZE (is_mode) - len - pos) / BITS_PER_UNIT;
5471 else
5472 offset = pos / BITS_PER_UNIT;
5473
5474 new = gen_rtx (MEM, tmode, plus_constant (XEXP (inner, 0), offset));
5475 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (inner);
5476 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (inner);
5477 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (inner);
5478 }
5479 else if (GET_CODE (inner) == REG)
5480 {
5481 /* We can't call gen_lowpart_for_combine here since we always want
5482 a SUBREG and it would sometimes return a new hard register. */
5483 if (tmode != inner_mode)
5484 new = gen_rtx (SUBREG, tmode, inner,
5485 (WORDS_BIG_ENDIAN
5486 && GET_MODE_SIZE (inner_mode) > UNITS_PER_WORD
5487 ? (((GET_MODE_SIZE (inner_mode)
5488 - GET_MODE_SIZE (tmode))
5489 / UNITS_PER_WORD)
5490 - pos / BITS_PER_WORD)
5491 : pos / BITS_PER_WORD));
5492 else
5493 new = inner;
5494 }
5495 else
5496 new = force_to_mode (inner, tmode,
5497 len >= HOST_BITS_PER_WIDE_INT
5498 ? GET_MODE_MASK (tmode)
5499 : ((HOST_WIDE_INT) 1 << len) - 1,
5500 NULL_RTX, 0);
5501
5502 /* If this extraction is going into the destination of a SET,
5503 make a STRICT_LOW_PART unless we made a MEM. */
5504
5505 if (in_dest)
5506 return (GET_CODE (new) == MEM ? new
5507 : (GET_CODE (new) != SUBREG
5508 ? gen_rtx (CLOBBER, tmode, const0_rtx)
5509 : gen_rtx_combine (STRICT_LOW_PART, VOIDmode, new)));
5510
5511 /* Otherwise, sign- or zero-extend unless we already are in the
5512 proper mode. */
5513
5514 return (mode == tmode ? new
5515 : gen_rtx_combine (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
5516 mode, new));
5517 }
5518
5519 /* Unless this is a COMPARE or we have a funny memory reference,
5520 don't do anything with zero-extending field extracts starting at
5521 the low-order bit since they are simple AND operations. */
5522 if (pos_rtx == 0 && pos == 0 && ! in_dest
5523 && ! in_compare && ! spans_byte && unsignedp)
5524 return 0;
5525
5526 /* Unless we are allowed to span bytes, reject this if we would be
5527 spanning bytes or if the position is not a constant and the length
5528 is not 1. In all other cases, we would only be going outside
5529 out object in cases when an original shift would have been
5530 undefined. */
5531 if (! spans_byte
5532 && ((pos_rtx == 0 && pos + len > GET_MODE_BITSIZE (is_mode))
5533 || (pos_rtx != 0 && len != 1)))
5534 return 0;
5535
5536 /* Get the mode to use should INNER not be a MEM, the mode for the position,
5537 and the mode for the result. */
5538 #ifdef HAVE_insv
5539 if (in_dest)
5540 {
5541 wanted_inner_reg_mode = insn_operand_mode[(int) CODE_FOR_insv][0];
5542 pos_mode = insn_operand_mode[(int) CODE_FOR_insv][2];
5543 extraction_mode = insn_operand_mode[(int) CODE_FOR_insv][3];
5544 }
5545 #endif
5546
5547 #ifdef HAVE_extzv
5548 if (! in_dest && unsignedp)
5549 {
5550 wanted_inner_reg_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
5551 pos_mode = insn_operand_mode[(int) CODE_FOR_extzv][3];
5552 extraction_mode = insn_operand_mode[(int) CODE_FOR_extzv][0];
5553 }
5554 #endif
5555
5556 #ifdef HAVE_extv
5557 if (! in_dest && ! unsignedp)
5558 {
5559 wanted_inner_reg_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
5560 pos_mode = insn_operand_mode[(int) CODE_FOR_extv][3];
5561 extraction_mode = insn_operand_mode[(int) CODE_FOR_extv][0];
5562 }
5563 #endif
5564
5565 /* Never narrow an object, since that might not be safe. */
5566
5567 if (mode != VOIDmode
5568 && GET_MODE_SIZE (extraction_mode) < GET_MODE_SIZE (mode))
5569 extraction_mode = mode;
5570
5571 if (pos_rtx && GET_MODE (pos_rtx) != VOIDmode
5572 && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
5573 pos_mode = GET_MODE (pos_rtx);
5574
5575 /* If this is not from memory, the desired mode is wanted_inner_reg_mode;
5576 if we have to change the mode of memory and cannot, the desired mode is
5577 EXTRACTION_MODE. */
5578 if (GET_CODE (inner) != MEM)
5579 wanted_inner_mode = wanted_inner_reg_mode;
5580 else if (inner_mode != wanted_inner_mode
5581 && (mode_dependent_address_p (XEXP (inner, 0))
5582 || MEM_VOLATILE_P (inner)))
5583 wanted_inner_mode = extraction_mode;
5584
5585 orig_pos = pos;
5586
5587 if (BITS_BIG_ENDIAN)
5588 {
5589 /* POS is passed as if BITS_BIG_ENDIAN == 0, so we need to convert it to
5590 BITS_BIG_ENDIAN style. If position is constant, compute new
5591 position. Otherwise, build subtraction.
5592 Note that POS is relative to the mode of the original argument.
5593 If it's a MEM we need to recompute POS relative to that.
5594 However, if we're extracting from (or inserting into) a register,
5595 we want to recompute POS relative to wanted_inner_mode. */
5596 int width = (GET_CODE (inner) == MEM
5597 ? GET_MODE_BITSIZE (is_mode)
5598 : GET_MODE_BITSIZE (wanted_inner_mode));
5599
5600 if (pos_rtx == 0)
5601 pos = width - len - pos;
5602 else
5603 pos_rtx
5604 = gen_rtx_combine (MINUS, GET_MODE (pos_rtx),
5605 GEN_INT (width - len), pos_rtx);
5606 /* POS may be less than 0 now, but we check for that below.
5607 Note that it can only be less than 0 if GET_CODE (inner) != MEM. */
5608 }
5609
5610 /* If INNER has a wider mode, make it smaller. If this is a constant
5611 extract, try to adjust the byte to point to the byte containing
5612 the value. */
5613 if (wanted_inner_mode != VOIDmode
5614 && GET_MODE_SIZE (wanted_inner_mode) < GET_MODE_SIZE (is_mode)
5615 && ((GET_CODE (inner) == MEM
5616 && (inner_mode == wanted_inner_mode
5617 || (! mode_dependent_address_p (XEXP (inner, 0))
5618 && ! MEM_VOLATILE_P (inner))))))
5619 {
5620 int offset = 0;
5621
5622 /* The computations below will be correct if the machine is big
5623 endian in both bits and bytes or little endian in bits and bytes.
5624 If it is mixed, we must adjust. */
5625
5626 /* If bytes are big endian and we had a paradoxical SUBREG, we must
5627 adjust OFFSET to compensate. */
5628 if (BYTES_BIG_ENDIAN
5629 && ! spans_byte
5630 && GET_MODE_SIZE (inner_mode) < GET_MODE_SIZE (is_mode))
5631 offset -= GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (inner_mode);
5632
5633 /* If this is a constant position, we can move to the desired byte. */
5634 if (pos_rtx == 0)
5635 {
5636 offset += pos / BITS_PER_UNIT;
5637 pos %= GET_MODE_BITSIZE (wanted_inner_mode);
5638 }
5639
5640 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
5641 && ! spans_byte
5642 && is_mode != wanted_inner_mode)
5643 offset = (GET_MODE_SIZE (is_mode)
5644 - GET_MODE_SIZE (wanted_inner_mode) - offset);
5645
5646 if (offset != 0 || inner_mode != wanted_inner_mode)
5647 {
5648 rtx newmem = gen_rtx (MEM, wanted_inner_mode,
5649 plus_constant (XEXP (inner, 0), offset));
5650 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (inner);
5651 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (inner);
5652 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (inner);
5653 inner = newmem;
5654 }
5655 }
5656
5657 /* If INNER is not memory, we can always get it into the proper mode. If we
5658 are changing its mode, POS must be a constant and smaller than the size
5659 of the new mode. */
5660 else if (GET_CODE (inner) != MEM)
5661 {
5662 if (GET_MODE (inner) != wanted_inner_mode
5663 && (pos_rtx != 0
5664 || orig_pos + len > GET_MODE_BITSIZE (wanted_inner_mode)))
5665 return 0;
5666
5667 inner = force_to_mode (inner, wanted_inner_mode,
5668 pos_rtx
5669 || len + orig_pos >= HOST_BITS_PER_WIDE_INT
5670 ? GET_MODE_MASK (wanted_inner_mode)
5671 : (((HOST_WIDE_INT) 1 << len) - 1) << orig_pos,
5672 NULL_RTX, 0);
5673 }
5674
5675 /* Adjust mode of POS_RTX, if needed. If we want a wider mode, we
5676 have to zero extend. Otherwise, we can just use a SUBREG. */
5677 if (pos_rtx != 0
5678 && GET_MODE_SIZE (pos_mode) > GET_MODE_SIZE (GET_MODE (pos_rtx)))
5679 pos_rtx = gen_rtx_combine (ZERO_EXTEND, pos_mode, pos_rtx);
5680 else if (pos_rtx != 0
5681 && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
5682 pos_rtx = gen_lowpart_for_combine (pos_mode, pos_rtx);
5683
5684 /* Make POS_RTX unless we already have it and it is correct. If we don't
5685 have a POS_RTX but we do have an ORIG_POS_RTX, the latter must
5686 be a CONST_INT. */
5687 if (pos_rtx == 0 && orig_pos_rtx != 0 && INTVAL (orig_pos_rtx) == pos)
5688 pos_rtx = orig_pos_rtx;
5689
5690 else if (pos_rtx == 0)
5691 pos_rtx = GEN_INT (pos);
5692
5693 /* Make the required operation. See if we can use existing rtx. */
5694 new = gen_rtx_combine (unsignedp ? ZERO_EXTRACT : SIGN_EXTRACT,
5695 extraction_mode, inner, GEN_INT (len), pos_rtx);
5696 if (! in_dest)
5697 new = gen_lowpart_for_combine (mode, new);
5698
5699 return new;
5700 }
5701 \f
5702 /* See if X contains an ASHIFT of COUNT or more bits that can be commuted
5703 with any other operations in X. Return X without that shift if so. */
5704
5705 static rtx
5706 extract_left_shift (x, count)
5707 rtx x;
5708 int count;
5709 {
5710 enum rtx_code code = GET_CODE (x);
5711 enum machine_mode mode = GET_MODE (x);
5712 rtx tem;
5713
5714 switch (code)
5715 {
5716 case ASHIFT:
5717 /* This is the shift itself. If it is wide enough, we will return
5718 either the value being shifted if the shift count is equal to
5719 COUNT or a shift for the difference. */
5720 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5721 && INTVAL (XEXP (x, 1)) >= count)
5722 return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (x, 0),
5723 INTVAL (XEXP (x, 1)) - count);
5724 break;
5725
5726 case NEG: case NOT:
5727 if ((tem = extract_left_shift (XEXP (x, 0), count)) != 0)
5728 return gen_unary (code, mode, mode, tem);
5729
5730 break;
5731
5732 case PLUS: case IOR: case XOR: case AND:
5733 /* If we can safely shift this constant and we find the inner shift,
5734 make a new operation. */
5735 if (GET_CODE (XEXP (x,1)) == CONST_INT
5736 && (INTVAL (XEXP (x, 1)) & (((HOST_WIDE_INT) 1 << count)) - 1) == 0
5737 && (tem = extract_left_shift (XEXP (x, 0), count)) != 0)
5738 return gen_binary (code, mode, tem,
5739 GEN_INT (INTVAL (XEXP (x, 1)) >> count));
5740
5741 break;
5742
5743 default:
5744 break;
5745 }
5746
5747 return 0;
5748 }
5749 \f
5750 /* Look at the expression rooted at X. Look for expressions
5751 equivalent to ZERO_EXTRACT, SIGN_EXTRACT, ZERO_EXTEND, SIGN_EXTEND.
5752 Form these expressions.
5753
5754 Return the new rtx, usually just X.
5755
5756 Also, for machines like the Vax that don't have logical shift insns,
5757 try to convert logical to arithmetic shift operations in cases where
5758 they are equivalent. This undoes the canonicalizations to logical
5759 shifts done elsewhere.
5760
5761 We try, as much as possible, to re-use rtl expressions to save memory.
5762
5763 IN_CODE says what kind of expression we are processing. Normally, it is
5764 SET. In a memory address (inside a MEM, PLUS or minus, the latter two
5765 being kludges), it is MEM. When processing the arguments of a comparison
5766 or a COMPARE against zero, it is COMPARE. */
5767
5768 static rtx
5769 make_compound_operation (x, in_code)
5770 rtx x;
5771 enum rtx_code in_code;
5772 {
5773 enum rtx_code code = GET_CODE (x);
5774 enum machine_mode mode = GET_MODE (x);
5775 int mode_width = GET_MODE_BITSIZE (mode);
5776 rtx rhs, lhs;
5777 enum rtx_code next_code;
5778 int i;
5779 rtx new = 0;
5780 rtx tem;
5781 char *fmt;
5782
5783 /* Select the code to be used in recursive calls. Once we are inside an
5784 address, we stay there. If we have a comparison, set to COMPARE,
5785 but once inside, go back to our default of SET. */
5786
5787 next_code = (code == MEM || code == PLUS || code == MINUS ? MEM
5788 : ((code == COMPARE || GET_RTX_CLASS (code) == '<')
5789 && XEXP (x, 1) == const0_rtx) ? COMPARE
5790 : in_code == COMPARE ? SET : in_code);
5791
5792 /* Process depending on the code of this operation. If NEW is set
5793 non-zero, it will be returned. */
5794
5795 switch (code)
5796 {
5797 case ASHIFT:
5798 /* Convert shifts by constants into multiplications if inside
5799 an address. */
5800 if (in_code == MEM && GET_CODE (XEXP (x, 1)) == CONST_INT
5801 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
5802 && INTVAL (XEXP (x, 1)) >= 0)
5803 {
5804 new = make_compound_operation (XEXP (x, 0), next_code);
5805 new = gen_rtx_combine (MULT, mode, new,
5806 GEN_INT ((HOST_WIDE_INT) 1
5807 << INTVAL (XEXP (x, 1))));
5808 }
5809 break;
5810
5811 case AND:
5812 /* If the second operand is not a constant, we can't do anything
5813 with it. */
5814 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5815 break;
5816
5817 /* If the constant is a power of two minus one and the first operand
5818 is a logical right shift, make an extraction. */
5819 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
5820 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
5821 {
5822 new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
5823 new = make_extraction (mode, new, 0, XEXP (XEXP (x, 0), 1), i, 1,
5824 0, in_code == COMPARE);
5825 }
5826
5827 /* Same as previous, but for (subreg (lshiftrt ...)) in first op. */
5828 else if (GET_CODE (XEXP (x, 0)) == SUBREG
5829 && subreg_lowpart_p (XEXP (x, 0))
5830 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == LSHIFTRT
5831 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
5832 {
5833 new = make_compound_operation (XEXP (SUBREG_REG (XEXP (x, 0)), 0),
5834 next_code);
5835 new = make_extraction (GET_MODE (SUBREG_REG (XEXP (x, 0))), new, 0,
5836 XEXP (SUBREG_REG (XEXP (x, 0)), 1), i, 1,
5837 0, in_code == COMPARE);
5838 }
5839 /* Same as previous, but for (xor/ior (lshiftrt...) (lshiftrt...)). */
5840 else if ((GET_CODE (XEXP (x, 0)) == XOR
5841 || GET_CODE (XEXP (x, 0)) == IOR)
5842 && GET_CODE (XEXP (XEXP (x, 0), 0)) == LSHIFTRT
5843 && GET_CODE (XEXP (XEXP (x, 0), 1)) == LSHIFTRT
5844 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
5845 {
5846 /* Apply the distributive law, and then try to make extractions. */
5847 new = gen_rtx_combine (GET_CODE (XEXP (x, 0)), mode,
5848 gen_rtx (AND, mode, XEXP (XEXP (x, 0), 0),
5849 XEXP (x, 1)),
5850 gen_rtx (AND, mode, XEXP (XEXP (x, 0), 1),
5851 XEXP (x, 1)));
5852 new = make_compound_operation (new, in_code);
5853 }
5854
5855 /* If we are have (and (rotate X C) M) and C is larger than the number
5856 of bits in M, this is an extraction. */
5857
5858 else if (GET_CODE (XEXP (x, 0)) == ROTATE
5859 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5860 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0
5861 && i <= INTVAL (XEXP (XEXP (x, 0), 1)))
5862 {
5863 new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
5864 new = make_extraction (mode, new,
5865 (GET_MODE_BITSIZE (mode)
5866 - INTVAL (XEXP (XEXP (x, 0), 1))),
5867 NULL_RTX, i, 1, 0, in_code == COMPARE);
5868 }
5869
5870 /* On machines without logical shifts, if the operand of the AND is
5871 a logical shift and our mask turns off all the propagated sign
5872 bits, we can replace the logical shift with an arithmetic shift. */
5873 else if (ashr_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
5874 && (lshr_optab->handlers[(int) mode].insn_code
5875 == CODE_FOR_nothing)
5876 && GET_CODE (XEXP (x, 0)) == LSHIFTRT
5877 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5878 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
5879 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
5880 && mode_width <= HOST_BITS_PER_WIDE_INT)
5881 {
5882 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
5883
5884 mask >>= INTVAL (XEXP (XEXP (x, 0), 1));
5885 if ((INTVAL (XEXP (x, 1)) & ~mask) == 0)
5886 SUBST (XEXP (x, 0),
5887 gen_rtx_combine (ASHIFTRT, mode,
5888 make_compound_operation (XEXP (XEXP (x, 0), 0),
5889 next_code),
5890 XEXP (XEXP (x, 0), 1)));
5891 }
5892
5893 /* If the constant is one less than a power of two, this might be
5894 representable by an extraction even if no shift is present.
5895 If it doesn't end up being a ZERO_EXTEND, we will ignore it unless
5896 we are in a COMPARE. */
5897 else if ((i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
5898 new = make_extraction (mode,
5899 make_compound_operation (XEXP (x, 0),
5900 next_code),
5901 0, NULL_RTX, i, 1, 0, in_code == COMPARE);
5902
5903 /* If we are in a comparison and this is an AND with a power of two,
5904 convert this into the appropriate bit extract. */
5905 else if (in_code == COMPARE
5906 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
5907 new = make_extraction (mode,
5908 make_compound_operation (XEXP (x, 0),
5909 next_code),
5910 i, NULL_RTX, 1, 1, 0, 1);
5911
5912 break;
5913
5914 case LSHIFTRT:
5915 /* If the sign bit is known to be zero, replace this with an
5916 arithmetic shift. */
5917 if (ashr_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing
5918 && lshr_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
5919 && mode_width <= HOST_BITS_PER_WIDE_INT
5920 && (nonzero_bits (XEXP (x, 0), mode) & (1 << (mode_width - 1))) == 0)
5921 {
5922 new = gen_rtx_combine (ASHIFTRT, mode,
5923 make_compound_operation (XEXP (x, 0),
5924 next_code),
5925 XEXP (x, 1));
5926 break;
5927 }
5928
5929 /* ... fall through ... */
5930
5931 case ASHIFTRT:
5932 lhs = XEXP (x, 0);
5933 rhs = XEXP (x, 1);
5934
5935 /* If we have (ashiftrt (ashift foo C1) C2) with C2 >= C1,
5936 this is a SIGN_EXTRACT. */
5937 if (GET_CODE (rhs) == CONST_INT
5938 && GET_CODE (lhs) == ASHIFT
5939 && GET_CODE (XEXP (lhs, 1)) == CONST_INT
5940 && INTVAL (rhs) >= INTVAL (XEXP (lhs, 1)))
5941 {
5942 new = make_compound_operation (XEXP (lhs, 0), next_code);
5943 new = make_extraction (mode, new,
5944 INTVAL (rhs) - INTVAL (XEXP (lhs, 1)),
5945 NULL_RTX, mode_width - INTVAL (rhs),
5946 code == LSHIFTRT, 0, in_code == COMPARE);
5947 }
5948
5949 /* See if we have operations between an ASHIFTRT and an ASHIFT.
5950 If so, try to merge the shifts into a SIGN_EXTEND. We could
5951 also do this for some cases of SIGN_EXTRACT, but it doesn't
5952 seem worth the effort; the case checked for occurs on Alpha. */
5953
5954 if (GET_RTX_CLASS (GET_CODE (lhs)) != 'o'
5955 && ! (GET_CODE (lhs) == SUBREG
5956 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (lhs))) == 'o'))
5957 && GET_CODE (rhs) == CONST_INT
5958 && INTVAL (rhs) < HOST_BITS_PER_WIDE_INT
5959 && (new = extract_left_shift (lhs, INTVAL (rhs))) != 0)
5960 new = make_extraction (mode, make_compound_operation (new, next_code),
5961 0, NULL_RTX, mode_width - INTVAL (rhs),
5962 code == LSHIFTRT, 0, in_code == COMPARE);
5963
5964 break;
5965
5966 case SUBREG:
5967 /* Call ourselves recursively on the inner expression. If we are
5968 narrowing the object and it has a different RTL code from
5969 what it originally did, do this SUBREG as a force_to_mode. */
5970
5971 tem = make_compound_operation (SUBREG_REG (x), in_code);
5972 if (GET_CODE (tem) != GET_CODE (SUBREG_REG (x))
5973 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (tem))
5974 && subreg_lowpart_p (x))
5975 {
5976 rtx newer = force_to_mode (tem, mode,
5977 GET_MODE_MASK (mode), NULL_RTX, 0);
5978
5979 /* If we have something other than a SUBREG, we might have
5980 done an expansion, so rerun outselves. */
5981 if (GET_CODE (newer) != SUBREG)
5982 newer = make_compound_operation (newer, in_code);
5983
5984 return newer;
5985 }
5986 break;
5987
5988 default:
5989 break;
5990 }
5991
5992 if (new)
5993 {
5994 x = gen_lowpart_for_combine (mode, new);
5995 code = GET_CODE (x);
5996 }
5997
5998 /* Now recursively process each operand of this operation. */
5999 fmt = GET_RTX_FORMAT (code);
6000 for (i = 0; i < GET_RTX_LENGTH (code); i++)
6001 if (fmt[i] == 'e')
6002 {
6003 new = make_compound_operation (XEXP (x, i), next_code);
6004 SUBST (XEXP (x, i), new);
6005 }
6006
6007 return x;
6008 }
6009 \f
6010 /* Given M see if it is a value that would select a field of bits
6011 within an item, but not the entire word. Return -1 if not.
6012 Otherwise, return the starting position of the field, where 0 is the
6013 low-order bit.
6014
6015 *PLEN is set to the length of the field. */
6016
6017 static int
6018 get_pos_from_mask (m, plen)
6019 unsigned HOST_WIDE_INT m;
6020 int *plen;
6021 {
6022 /* Get the bit number of the first 1 bit from the right, -1 if none. */
6023 int pos = exact_log2 (m & - m);
6024
6025 if (pos < 0)
6026 return -1;
6027
6028 /* Now shift off the low-order zero bits and see if we have a power of
6029 two minus 1. */
6030 *plen = exact_log2 ((m >> pos) + 1);
6031
6032 if (*plen <= 0)
6033 return -1;
6034
6035 return pos;
6036 }
6037 \f
6038 /* See if X can be simplified knowing that we will only refer to it in
6039 MODE and will only refer to those bits that are nonzero in MASK.
6040 If other bits are being computed or if masking operations are done
6041 that select a superset of the bits in MASK, they can sometimes be
6042 ignored.
6043
6044 Return a possibly simplified expression, but always convert X to
6045 MODE. If X is a CONST_INT, AND the CONST_INT with MASK.
6046
6047 Also, if REG is non-zero and X is a register equal in value to REG,
6048 replace X with REG.
6049
6050 If JUST_SELECT is nonzero, don't optimize by noticing that bits in MASK
6051 are all off in X. This is used when X will be complemented, by either
6052 NOT, NEG, or XOR. */
6053
6054 static rtx
6055 force_to_mode (x, mode, mask, reg, just_select)
6056 rtx x;
6057 enum machine_mode mode;
6058 unsigned HOST_WIDE_INT mask;
6059 rtx reg;
6060 int just_select;
6061 {
6062 enum rtx_code code = GET_CODE (x);
6063 int next_select = just_select || code == XOR || code == NOT || code == NEG;
6064 enum machine_mode op_mode;
6065 unsigned HOST_WIDE_INT fuller_mask, nonzero;
6066 rtx op0, op1, temp;
6067
6068 /* If this is a CALL or ASM_OPERANDS, don't do anything. Some of the
6069 code below will do the wrong thing since the mode of such an
6070 expression is VOIDmode.
6071
6072 Also do nothing if X is a CLOBBER; this can happen if X was
6073 the return value from a call to gen_lowpart_for_combine. */
6074 if (code == CALL || code == ASM_OPERANDS || code == CLOBBER)
6075 return x;
6076
6077 /* We want to perform the operation is its present mode unless we know
6078 that the operation is valid in MODE, in which case we do the operation
6079 in MODE. */
6080 op_mode = ((GET_MODE_CLASS (mode) == GET_MODE_CLASS (GET_MODE (x))
6081 && code_to_optab[(int) code] != 0
6082 && (code_to_optab[(int) code]->handlers[(int) mode].insn_code
6083 != CODE_FOR_nothing))
6084 ? mode : GET_MODE (x));
6085
6086 /* It is not valid to do a right-shift in a narrower mode
6087 than the one it came in with. */
6088 if ((code == LSHIFTRT || code == ASHIFTRT)
6089 && GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (GET_MODE (x)))
6090 op_mode = GET_MODE (x);
6091
6092 /* Truncate MASK to fit OP_MODE. */
6093 if (op_mode)
6094 mask &= GET_MODE_MASK (op_mode);
6095
6096 /* When we have an arithmetic operation, or a shift whose count we
6097 do not know, we need to assume that all bit the up to the highest-order
6098 bit in MASK will be needed. This is how we form such a mask. */
6099 if (op_mode)
6100 fuller_mask = (GET_MODE_BITSIZE (op_mode) >= HOST_BITS_PER_WIDE_INT
6101 ? GET_MODE_MASK (op_mode)
6102 : ((HOST_WIDE_INT) 1 << (floor_log2 (mask) + 1)) - 1);
6103 else
6104 fuller_mask = ~ (HOST_WIDE_INT) 0;
6105
6106 /* Determine what bits of X are guaranteed to be (non)zero. */
6107 nonzero = nonzero_bits (x, mode);
6108
6109 /* If none of the bits in X are needed, return a zero. */
6110 if (! just_select && (nonzero & mask) == 0)
6111 return const0_rtx;
6112
6113 /* If X is a CONST_INT, return a new one. Do this here since the
6114 test below will fail. */
6115 if (GET_CODE (x) == CONST_INT)
6116 {
6117 HOST_WIDE_INT cval = INTVAL (x) & mask;
6118 int width = GET_MODE_BITSIZE (mode);
6119
6120 /* If MODE is narrower that HOST_WIDE_INT and CVAL is a negative
6121 number, sign extend it. */
6122 if (width > 0 && width < HOST_BITS_PER_WIDE_INT
6123 && (cval & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
6124 cval |= (HOST_WIDE_INT) -1 << width;
6125
6126 return GEN_INT (cval);
6127 }
6128
6129 /* If X is narrower than MODE and we want all the bits in X's mode, just
6130 get X in the proper mode. */
6131 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode)
6132 && (GET_MODE_MASK (GET_MODE (x)) & ~ mask) == 0)
6133 return gen_lowpart_for_combine (mode, x);
6134
6135 /* If we aren't changing the mode, X is not a SUBREG, and all zero bits in
6136 MASK are already known to be zero in X, we need not do anything. */
6137 if (GET_MODE (x) == mode && code != SUBREG && (~ mask & nonzero) == 0)
6138 return x;
6139
6140 switch (code)
6141 {
6142 case CLOBBER:
6143 /* If X is a (clobber (const_int)), return it since we know we are
6144 generating something that won't match. */
6145 return x;
6146
6147 case USE:
6148 /* X is a (use (mem ..)) that was made from a bit-field extraction that
6149 spanned the boundary of the MEM. If we are now masking so it is
6150 within that boundary, we don't need the USE any more. */
6151 if (! BITS_BIG_ENDIAN
6152 && (mask & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
6153 return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
6154 break;
6155
6156 case SIGN_EXTEND:
6157 case ZERO_EXTEND:
6158 case ZERO_EXTRACT:
6159 case SIGN_EXTRACT:
6160 x = expand_compound_operation (x);
6161 if (GET_CODE (x) != code)
6162 return force_to_mode (x, mode, mask, reg, next_select);
6163 break;
6164
6165 case REG:
6166 if (reg != 0 && (rtx_equal_p (get_last_value (reg), x)
6167 || rtx_equal_p (reg, get_last_value (x))))
6168 x = reg;
6169 break;
6170
6171 case SUBREG:
6172 if (subreg_lowpart_p (x)
6173 /* We can ignore the effect of this SUBREG if it narrows the mode or
6174 if the constant masks to zero all the bits the mode doesn't
6175 have. */
6176 && ((GET_MODE_SIZE (GET_MODE (x))
6177 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
6178 || (0 == (mask
6179 & GET_MODE_MASK (GET_MODE (x))
6180 & ~ GET_MODE_MASK (GET_MODE (SUBREG_REG (x)))))))
6181 return force_to_mode (SUBREG_REG (x), mode, mask, reg, next_select);
6182 break;
6183
6184 case AND:
6185 /* If this is an AND with a constant, convert it into an AND
6186 whose constant is the AND of that constant with MASK. If it
6187 remains an AND of MASK, delete it since it is redundant. */
6188
6189 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
6190 {
6191 x = simplify_and_const_int (x, op_mode, XEXP (x, 0),
6192 mask & INTVAL (XEXP (x, 1)));
6193
6194 /* If X is still an AND, see if it is an AND with a mask that
6195 is just some low-order bits. If so, and it is MASK, we don't
6196 need it. */
6197
6198 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
6199 && INTVAL (XEXP (x, 1)) == mask)
6200 x = XEXP (x, 0);
6201
6202 /* If it remains an AND, try making another AND with the bits
6203 in the mode mask that aren't in MASK turned on. If the
6204 constant in the AND is wide enough, this might make a
6205 cheaper constant. */
6206
6207 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
6208 && GET_MODE_MASK (GET_MODE (x)) != mask
6209 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
6210 {
6211 HOST_WIDE_INT cval = (INTVAL (XEXP (x, 1))
6212 | (GET_MODE_MASK (GET_MODE (x)) & ~ mask));
6213 int width = GET_MODE_BITSIZE (GET_MODE (x));
6214 rtx y;
6215
6216 /* If MODE is narrower that HOST_WIDE_INT and CVAL is a negative
6217 number, sign extend it. */
6218 if (width > 0 && width < HOST_BITS_PER_WIDE_INT
6219 && (cval & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
6220 cval |= (HOST_WIDE_INT) -1 << width;
6221
6222 y = gen_binary (AND, GET_MODE (x), XEXP (x, 0), GEN_INT (cval));
6223 if (rtx_cost (y, SET) < rtx_cost (x, SET))
6224 x = y;
6225 }
6226
6227 break;
6228 }
6229
6230 goto binop;
6231
6232 case PLUS:
6233 /* In (and (plus FOO C1) M), if M is a mask that just turns off
6234 low-order bits (as in an alignment operation) and FOO is already
6235 aligned to that boundary, mask C1 to that boundary as well.
6236 This may eliminate that PLUS and, later, the AND. */
6237
6238 {
6239 int width = GET_MODE_BITSIZE (mode);
6240 unsigned HOST_WIDE_INT smask = mask;
6241
6242 /* If MODE is narrower than HOST_WIDE_INT and mask is a negative
6243 number, sign extend it. */
6244
6245 if (width < HOST_BITS_PER_WIDE_INT
6246 && (smask & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
6247 smask |= (HOST_WIDE_INT) -1 << width;
6248
6249 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6250 && exact_log2 (- smask) >= 0
6251 && (nonzero_bits (XEXP (x, 0), mode) & ~ mask) == 0
6252 && (INTVAL (XEXP (x, 1)) & ~ mask) != 0)
6253 return force_to_mode (plus_constant (XEXP (x, 0),
6254 INTVAL (XEXP (x, 1)) & mask),
6255 mode, mask, reg, next_select);
6256 }
6257
6258 /* ... fall through ... */
6259
6260 case MINUS:
6261 case MULT:
6262 /* For PLUS, MINUS and MULT, we need any bits less significant than the
6263 most significant bit in MASK since carries from those bits will
6264 affect the bits we are interested in. */
6265 mask = fuller_mask;
6266 goto binop;
6267
6268 case IOR:
6269 case XOR:
6270 /* If X is (ior (lshiftrt FOO C1) C2), try to commute the IOR and
6271 LSHIFTRT so we end up with an (and (lshiftrt (ior ...) ...) ...)
6272 operation which may be a bitfield extraction. Ensure that the
6273 constant we form is not wider than the mode of X. */
6274
6275 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
6276 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6277 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
6278 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
6279 && GET_CODE (XEXP (x, 1)) == CONST_INT
6280 && ((INTVAL (XEXP (XEXP (x, 0), 1))
6281 + floor_log2 (INTVAL (XEXP (x, 1))))
6282 < GET_MODE_BITSIZE (GET_MODE (x)))
6283 && (INTVAL (XEXP (x, 1))
6284 & ~ nonzero_bits (XEXP (x, 0), GET_MODE (x))) == 0)
6285 {
6286 temp = GEN_INT ((INTVAL (XEXP (x, 1)) & mask)
6287 << INTVAL (XEXP (XEXP (x, 0), 1)));
6288 temp = gen_binary (GET_CODE (x), GET_MODE (x),
6289 XEXP (XEXP (x, 0), 0), temp);
6290 x = gen_binary (LSHIFTRT, GET_MODE (x), temp,
6291 XEXP (XEXP (x, 0), 1));
6292 return force_to_mode (x, mode, mask, reg, next_select);
6293 }
6294
6295 binop:
6296 /* For most binary operations, just propagate into the operation and
6297 change the mode if we have an operation of that mode. */
6298
6299 op0 = gen_lowpart_for_combine (op_mode,
6300 force_to_mode (XEXP (x, 0), mode, mask,
6301 reg, next_select));
6302 op1 = gen_lowpart_for_combine (op_mode,
6303 force_to_mode (XEXP (x, 1), mode, mask,
6304 reg, next_select));
6305
6306 /* If OP1 is a CONST_INT and X is an IOR or XOR, clear bits outside
6307 MASK since OP1 might have been sign-extended but we never want
6308 to turn on extra bits, since combine might have previously relied
6309 on them being off. */
6310 if (GET_CODE (op1) == CONST_INT && (code == IOR || code == XOR)
6311 && (INTVAL (op1) & mask) != 0)
6312 op1 = GEN_INT (INTVAL (op1) & mask);
6313
6314 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
6315 x = gen_binary (code, op_mode, op0, op1);
6316 break;
6317
6318 case ASHIFT:
6319 /* For left shifts, do the same, but just for the first operand.
6320 However, we cannot do anything with shifts where we cannot
6321 guarantee that the counts are smaller than the size of the mode
6322 because such a count will have a different meaning in a
6323 wider mode. */
6324
6325 if (! (GET_CODE (XEXP (x, 1)) == CONST_INT
6326 && INTVAL (XEXP (x, 1)) >= 0
6327 && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (mode))
6328 && ! (GET_MODE (XEXP (x, 1)) != VOIDmode
6329 && (nonzero_bits (XEXP (x, 1), GET_MODE (XEXP (x, 1)))
6330 < (unsigned HOST_WIDE_INT) GET_MODE_BITSIZE (mode))))
6331 break;
6332
6333 /* If the shift count is a constant and we can do arithmetic in
6334 the mode of the shift, refine which bits we need. Otherwise, use the
6335 conservative form of the mask. */
6336 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6337 && INTVAL (XEXP (x, 1)) >= 0
6338 && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (op_mode)
6339 && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
6340 mask >>= INTVAL (XEXP (x, 1));
6341 else
6342 mask = fuller_mask;
6343
6344 op0 = gen_lowpart_for_combine (op_mode,
6345 force_to_mode (XEXP (x, 0), op_mode,
6346 mask, reg, next_select));
6347
6348 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
6349 x = gen_binary (code, op_mode, op0, XEXP (x, 1));
6350 break;
6351
6352 case LSHIFTRT:
6353 /* Here we can only do something if the shift count is a constant,
6354 this shift constant is valid for the host, and we can do arithmetic
6355 in OP_MODE. */
6356
6357 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6358 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
6359 && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
6360 {
6361 rtx inner = XEXP (x, 0);
6362
6363 /* Select the mask of the bits we need for the shift operand. */
6364 mask <<= INTVAL (XEXP (x, 1));
6365
6366 /* We can only change the mode of the shift if we can do arithmetic
6367 in the mode of the shift and MASK is no wider than the width of
6368 OP_MODE. */
6369 if (GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT
6370 || (mask & ~ GET_MODE_MASK (op_mode)) != 0)
6371 op_mode = GET_MODE (x);
6372
6373 inner = force_to_mode (inner, op_mode, mask, reg, next_select);
6374
6375 if (GET_MODE (x) != op_mode || inner != XEXP (x, 0))
6376 x = gen_binary (LSHIFTRT, op_mode, inner, XEXP (x, 1));
6377 }
6378
6379 /* If we have (and (lshiftrt FOO C1) C2) where the combination of the
6380 shift and AND produces only copies of the sign bit (C2 is one less
6381 than a power of two), we can do this with just a shift. */
6382
6383 if (GET_CODE (x) == LSHIFTRT
6384 && GET_CODE (XEXP (x, 1)) == CONST_INT
6385 && ((INTVAL (XEXP (x, 1))
6386 + num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0))))
6387 >= GET_MODE_BITSIZE (GET_MODE (x)))
6388 && exact_log2 (mask + 1) >= 0
6389 && (num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
6390 >= exact_log2 (mask + 1)))
6391 x = gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0),
6392 GEN_INT (GET_MODE_BITSIZE (GET_MODE (x))
6393 - exact_log2 (mask + 1)));
6394 break;
6395
6396 case ASHIFTRT:
6397 /* If we are just looking for the sign bit, we don't need this shift at
6398 all, even if it has a variable count. */
6399 if (GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
6400 && (mask == ((HOST_WIDE_INT) 1
6401 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
6402 return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
6403
6404 /* If this is a shift by a constant, get a mask that contains those bits
6405 that are not copies of the sign bit. We then have two cases: If
6406 MASK only includes those bits, this can be a logical shift, which may
6407 allow simplifications. If MASK is a single-bit field not within
6408 those bits, we are requesting a copy of the sign bit and hence can
6409 shift the sign bit to the appropriate location. */
6410
6411 if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) >= 0
6412 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
6413 {
6414 int i = -1;
6415
6416 /* If the considered data is wider then HOST_WIDE_INT, we can't
6417 represent a mask for all its bits in a single scalar.
6418 But we only care about the lower bits, so calculate these. */
6419
6420 if (GET_MODE_BITSIZE (GET_MODE (x)) > HOST_BITS_PER_WIDE_INT)
6421 {
6422 nonzero = ~ (HOST_WIDE_INT) 0;
6423
6424 /* GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1))
6425 is the number of bits a full-width mask would have set.
6426 We need only shift if these are fewer than nonzero can
6427 hold. If not, we must keep all bits set in nonzero. */
6428
6429 if (GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1))
6430 < HOST_BITS_PER_WIDE_INT)
6431 nonzero >>= INTVAL (XEXP (x, 1))
6432 + HOST_BITS_PER_WIDE_INT
6433 - GET_MODE_BITSIZE (GET_MODE (x)) ;
6434 }
6435 else
6436 {
6437 nonzero = GET_MODE_MASK (GET_MODE (x));
6438 nonzero >>= INTVAL (XEXP (x, 1));
6439 }
6440
6441 if ((mask & ~ nonzero) == 0
6442 || (i = exact_log2 (mask)) >= 0)
6443 {
6444 x = simplify_shift_const
6445 (x, LSHIFTRT, GET_MODE (x), XEXP (x, 0),
6446 i < 0 ? INTVAL (XEXP (x, 1))
6447 : GET_MODE_BITSIZE (GET_MODE (x)) - 1 - i);
6448
6449 if (GET_CODE (x) != ASHIFTRT)
6450 return force_to_mode (x, mode, mask, reg, next_select);
6451 }
6452 }
6453
6454 /* If MASK is 1, convert this to a LSHIFTRT. This can be done
6455 even if the shift count isn't a constant. */
6456 if (mask == 1)
6457 x = gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0), XEXP (x, 1));
6458
6459 /* If this is a sign-extension operation that just affects bits
6460 we don't care about, remove it. Be sure the call above returned
6461 something that is still a shift. */
6462
6463 if ((GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ASHIFTRT)
6464 && GET_CODE (XEXP (x, 1)) == CONST_INT
6465 && INTVAL (XEXP (x, 1)) >= 0
6466 && (INTVAL (XEXP (x, 1))
6467 <= GET_MODE_BITSIZE (GET_MODE (x)) - (floor_log2 (mask) + 1))
6468 && GET_CODE (XEXP (x, 0)) == ASHIFT
6469 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6470 && INTVAL (XEXP (XEXP (x, 0), 1)) == INTVAL (XEXP (x, 1)))
6471 return force_to_mode (XEXP (XEXP (x, 0), 0), mode, mask,
6472 reg, next_select);
6473
6474 break;
6475
6476 case ROTATE:
6477 case ROTATERT:
6478 /* If the shift count is constant and we can do computations
6479 in the mode of X, compute where the bits we care about are.
6480 Otherwise, we can't do anything. Don't change the mode of
6481 the shift or propagate MODE into the shift, though. */
6482 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6483 && INTVAL (XEXP (x, 1)) >= 0)
6484 {
6485 temp = simplify_binary_operation (code == ROTATE ? ROTATERT : ROTATE,
6486 GET_MODE (x), GEN_INT (mask),
6487 XEXP (x, 1));
6488 if (temp && GET_CODE(temp) == CONST_INT)
6489 SUBST (XEXP (x, 0),
6490 force_to_mode (XEXP (x, 0), GET_MODE (x),
6491 INTVAL (temp), reg, next_select));
6492 }
6493 break;
6494
6495 case NEG:
6496 /* If we just want the low-order bit, the NEG isn't needed since it
6497 won't change the low-order bit. */
6498 if (mask == 1)
6499 return force_to_mode (XEXP (x, 0), mode, mask, reg, just_select);
6500
6501 /* We need any bits less significant than the most significant bit in
6502 MASK since carries from those bits will affect the bits we are
6503 interested in. */
6504 mask = fuller_mask;
6505 goto unop;
6506
6507 case NOT:
6508 /* (not FOO) is (xor FOO CONST), so if FOO is an LSHIFTRT, we can do the
6509 same as the XOR case above. Ensure that the constant we form is not
6510 wider than the mode of X. */
6511
6512 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
6513 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6514 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
6515 && (INTVAL (XEXP (XEXP (x, 0), 1)) + floor_log2 (mask)
6516 < GET_MODE_BITSIZE (GET_MODE (x)))
6517 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT)
6518 {
6519 temp = GEN_INT (mask << INTVAL (XEXP (XEXP (x, 0), 1)));
6520 temp = gen_binary (XOR, GET_MODE (x), XEXP (XEXP (x, 0), 0), temp);
6521 x = gen_binary (LSHIFTRT, GET_MODE (x), temp, XEXP (XEXP (x, 0), 1));
6522
6523 return force_to_mode (x, mode, mask, reg, next_select);
6524 }
6525
6526 /* (and (not FOO) CONST) is (not (or FOO (not CONST))), so we must
6527 use the full mask inside the NOT. */
6528 mask = fuller_mask;
6529
6530 unop:
6531 op0 = gen_lowpart_for_combine (op_mode,
6532 force_to_mode (XEXP (x, 0), mode, mask,
6533 reg, next_select));
6534 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
6535 x = gen_unary (code, op_mode, op_mode, op0);
6536 break;
6537
6538 case NE:
6539 /* (and (ne FOO 0) CONST) can be (and FOO CONST) if CONST is included
6540 in STORE_FLAG_VALUE and FOO has a single bit that might be nonzero,
6541 which is equal to STORE_FLAG_VALUE. */
6542 if ((mask & ~ STORE_FLAG_VALUE) == 0 && XEXP (x, 1) == const0_rtx
6543 && exact_log2 (nonzero_bits (XEXP (x, 0), mode)) >= 0
6544 && nonzero_bits (XEXP (x, 0), mode) == STORE_FLAG_VALUE)
6545 return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
6546
6547 break;
6548
6549 case IF_THEN_ELSE:
6550 /* We have no way of knowing if the IF_THEN_ELSE can itself be
6551 written in a narrower mode. We play it safe and do not do so. */
6552
6553 SUBST (XEXP (x, 1),
6554 gen_lowpart_for_combine (GET_MODE (x),
6555 force_to_mode (XEXP (x, 1), mode,
6556 mask, reg, next_select)));
6557 SUBST (XEXP (x, 2),
6558 gen_lowpart_for_combine (GET_MODE (x),
6559 force_to_mode (XEXP (x, 2), mode,
6560 mask, reg,next_select)));
6561 break;
6562
6563 default:
6564 break;
6565 }
6566
6567 /* Ensure we return a value of the proper mode. */
6568 return gen_lowpart_for_combine (mode, x);
6569 }
6570 \f
6571 /* Return nonzero if X is an expression that has one of two values depending on
6572 whether some other value is zero or nonzero. In that case, we return the
6573 value that is being tested, *PTRUE is set to the value if the rtx being
6574 returned has a nonzero value, and *PFALSE is set to the other alternative.
6575
6576 If we return zero, we set *PTRUE and *PFALSE to X. */
6577
6578 static rtx
6579 if_then_else_cond (x, ptrue, pfalse)
6580 rtx x;
6581 rtx *ptrue, *pfalse;
6582 {
6583 enum machine_mode mode = GET_MODE (x);
6584 enum rtx_code code = GET_CODE (x);
6585 int size = GET_MODE_BITSIZE (mode);
6586 rtx cond0, cond1, true0, true1, false0, false1;
6587 unsigned HOST_WIDE_INT nz;
6588
6589 /* If this is a unary operation whose operand has one of two values, apply
6590 our opcode to compute those values. */
6591 if (GET_RTX_CLASS (code) == '1'
6592 && (cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0)) != 0)
6593 {
6594 *ptrue = gen_unary (code, mode, GET_MODE (XEXP (x, 0)), true0);
6595 *pfalse = gen_unary (code, mode, GET_MODE (XEXP (x, 0)), false0);
6596 return cond0;
6597 }
6598
6599 /* If this is a COMPARE, do nothing, since the IF_THEN_ELSE we would
6600 make can't possibly match and would suppress other optimizations. */
6601 else if (code == COMPARE)
6602 ;
6603
6604 /* If this is a binary operation, see if either side has only one of two
6605 values. If either one does or if both do and they are conditional on
6606 the same value, compute the new true and false values. */
6607 else if (GET_RTX_CLASS (code) == 'c' || GET_RTX_CLASS (code) == '2'
6608 || GET_RTX_CLASS (code) == '<')
6609 {
6610 cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0);
6611 cond1 = if_then_else_cond (XEXP (x, 1), &true1, &false1);
6612
6613 if ((cond0 != 0 || cond1 != 0)
6614 && ! (cond0 != 0 && cond1 != 0 && ! rtx_equal_p (cond0, cond1)))
6615 {
6616 /* If if_then_else_cond returned zero, then true/false are the
6617 same rtl. We must copy one of them to prevent invalid rtl
6618 sharing. */
6619 if (cond0 == 0)
6620 true0 = copy_rtx (true0);
6621 else if (cond1 == 0)
6622 true1 = copy_rtx (true1);
6623
6624 *ptrue = gen_binary (code, mode, true0, true1);
6625 *pfalse = gen_binary (code, mode, false0, false1);
6626 return cond0 ? cond0 : cond1;
6627 }
6628
6629 /* See if we have PLUS, IOR, XOR, MINUS or UMAX, where one of the
6630 operands is zero when the other is non-zero, and vice-versa,
6631 and STORE_FLAG_VALUE is 1 or -1. */
6632
6633 if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
6634 && (code == PLUS || code == IOR || code == XOR || code == MINUS
6635 || code == UMAX)
6636 && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
6637 {
6638 rtx op0 = XEXP (XEXP (x, 0), 1);
6639 rtx op1 = XEXP (XEXP (x, 1), 1);
6640
6641 cond0 = XEXP (XEXP (x, 0), 0);
6642 cond1 = XEXP (XEXP (x, 1), 0);
6643
6644 if (GET_RTX_CLASS (GET_CODE (cond0)) == '<'
6645 && GET_RTX_CLASS (GET_CODE (cond1)) == '<'
6646 && reversible_comparison_p (cond1)
6647 && ((GET_CODE (cond0) == reverse_condition (GET_CODE (cond1))
6648 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
6649 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
6650 || ((swap_condition (GET_CODE (cond0))
6651 == reverse_condition (GET_CODE (cond1)))
6652 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
6653 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
6654 && ! side_effects_p (x))
6655 {
6656 *ptrue = gen_binary (MULT, mode, op0, const_true_rtx);
6657 *pfalse = gen_binary (MULT, mode,
6658 (code == MINUS
6659 ? gen_unary (NEG, mode, mode, op1) : op1),
6660 const_true_rtx);
6661 return cond0;
6662 }
6663 }
6664
6665 /* Similarly for MULT, AND and UMIN, execpt that for these the result
6666 is always zero. */
6667 if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
6668 && (code == MULT || code == AND || code == UMIN)
6669 && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
6670 {
6671 cond0 = XEXP (XEXP (x, 0), 0);
6672 cond1 = XEXP (XEXP (x, 1), 0);
6673
6674 if (GET_RTX_CLASS (GET_CODE (cond0)) == '<'
6675 && GET_RTX_CLASS (GET_CODE (cond1)) == '<'
6676 && reversible_comparison_p (cond1)
6677 && ((GET_CODE (cond0) == reverse_condition (GET_CODE (cond1))
6678 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
6679 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
6680 || ((swap_condition (GET_CODE (cond0))
6681 == reverse_condition (GET_CODE (cond1)))
6682 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
6683 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
6684 && ! side_effects_p (x))
6685 {
6686 *ptrue = *pfalse = const0_rtx;
6687 return cond0;
6688 }
6689 }
6690 }
6691
6692 else if (code == IF_THEN_ELSE)
6693 {
6694 /* If we have IF_THEN_ELSE already, extract the condition and
6695 canonicalize it if it is NE or EQ. */
6696 cond0 = XEXP (x, 0);
6697 *ptrue = XEXP (x, 1), *pfalse = XEXP (x, 2);
6698 if (GET_CODE (cond0) == NE && XEXP (cond0, 1) == const0_rtx)
6699 return XEXP (cond0, 0);
6700 else if (GET_CODE (cond0) == EQ && XEXP (cond0, 1) == const0_rtx)
6701 {
6702 *ptrue = XEXP (x, 2), *pfalse = XEXP (x, 1);
6703 return XEXP (cond0, 0);
6704 }
6705 else
6706 return cond0;
6707 }
6708
6709 /* If X is a normal SUBREG with both inner and outer modes integral,
6710 we can narrow both the true and false values of the inner expression,
6711 if there is a condition. */
6712 else if (code == SUBREG && GET_MODE_CLASS (mode) == MODE_INT
6713 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
6714 && GET_MODE_SIZE (mode) <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
6715 && 0 != (cond0 = if_then_else_cond (SUBREG_REG (x),
6716 &true0, &false0)))
6717 {
6718 *ptrue = force_to_mode (true0, mode, GET_MODE_MASK (mode), NULL_RTX, 0);
6719 *pfalse
6720 = force_to_mode (false0, mode, GET_MODE_MASK (mode), NULL_RTX, 0);
6721
6722 return cond0;
6723 }
6724
6725 /* If X is a constant, this isn't special and will cause confusions
6726 if we treat it as such. Likewise if it is equivalent to a constant. */
6727 else if (CONSTANT_P (x)
6728 || ((cond0 = get_last_value (x)) != 0 && CONSTANT_P (cond0)))
6729 ;
6730
6731 /* If X is known to be either 0 or -1, those are the true and
6732 false values when testing X. */
6733 else if (num_sign_bit_copies (x, mode) == size)
6734 {
6735 *ptrue = constm1_rtx, *pfalse = const0_rtx;
6736 return x;
6737 }
6738
6739 /* Likewise for 0 or a single bit. */
6740 else if (exact_log2 (nz = nonzero_bits (x, mode)) >= 0)
6741 {
6742 *ptrue = GEN_INT (nz), *pfalse = const0_rtx;
6743 return x;
6744 }
6745
6746 /* Otherwise fail; show no condition with true and false values the same. */
6747 *ptrue = *pfalse = x;
6748 return 0;
6749 }
6750 \f
6751 /* Return the value of expression X given the fact that condition COND
6752 is known to be true when applied to REG as its first operand and VAL
6753 as its second. X is known to not be shared and so can be modified in
6754 place.
6755
6756 We only handle the simplest cases, and specifically those cases that
6757 arise with IF_THEN_ELSE expressions. */
6758
6759 static rtx
6760 known_cond (x, cond, reg, val)
6761 rtx x;
6762 enum rtx_code cond;
6763 rtx reg, val;
6764 {
6765 enum rtx_code code = GET_CODE (x);
6766 rtx temp;
6767 char *fmt;
6768 int i, j;
6769
6770 if (side_effects_p (x))
6771 return x;
6772
6773 if (cond == EQ && rtx_equal_p (x, reg))
6774 return val;
6775
6776 /* If X is (abs REG) and we know something about REG's relationship
6777 with zero, we may be able to simplify this. */
6778
6779 if (code == ABS && rtx_equal_p (XEXP (x, 0), reg) && val == const0_rtx)
6780 switch (cond)
6781 {
6782 case GE: case GT: case EQ:
6783 return XEXP (x, 0);
6784 case LT: case LE:
6785 return gen_unary (NEG, GET_MODE (XEXP (x, 0)), GET_MODE (XEXP (x, 0)),
6786 XEXP (x, 0));
6787 default:
6788 break;
6789 }
6790
6791 /* The only other cases we handle are MIN, MAX, and comparisons if the
6792 operands are the same as REG and VAL. */
6793
6794 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
6795 {
6796 if (rtx_equal_p (XEXP (x, 0), val))
6797 cond = swap_condition (cond), temp = val, val = reg, reg = temp;
6798
6799 if (rtx_equal_p (XEXP (x, 0), reg) && rtx_equal_p (XEXP (x, 1), val))
6800 {
6801 if (GET_RTX_CLASS (code) == '<')
6802 return (comparison_dominates_p (cond, code) ? const_true_rtx
6803 : (comparison_dominates_p (cond,
6804 reverse_condition (code))
6805 ? const0_rtx : x));
6806
6807 else if (code == SMAX || code == SMIN
6808 || code == UMIN || code == UMAX)
6809 {
6810 int unsignedp = (code == UMIN || code == UMAX);
6811
6812 if (code == SMAX || code == UMAX)
6813 cond = reverse_condition (cond);
6814
6815 switch (cond)
6816 {
6817 case GE: case GT:
6818 return unsignedp ? x : XEXP (x, 1);
6819 case LE: case LT:
6820 return unsignedp ? x : XEXP (x, 0);
6821 case GEU: case GTU:
6822 return unsignedp ? XEXP (x, 1) : x;
6823 case LEU: case LTU:
6824 return unsignedp ? XEXP (x, 0) : x;
6825 default:
6826 break;
6827 }
6828 }
6829 }
6830 }
6831
6832 fmt = GET_RTX_FORMAT (code);
6833 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6834 {
6835 if (fmt[i] == 'e')
6836 SUBST (XEXP (x, i), known_cond (XEXP (x, i), cond, reg, val));
6837 else if (fmt[i] == 'E')
6838 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6839 SUBST (XVECEXP (x, i, j), known_cond (XVECEXP (x, i, j),
6840 cond, reg, val));
6841 }
6842
6843 return x;
6844 }
6845 \f
6846 /* See if X and Y are equal for the purposes of seeing if we can rewrite an
6847 assignment as a field assignment. */
6848
6849 static int
6850 rtx_equal_for_field_assignment_p (x, y)
6851 rtx x;
6852 rtx y;
6853 {
6854 rtx last_x, last_y;
6855
6856 if (x == y || rtx_equal_p (x, y))
6857 return 1;
6858
6859 if (x == 0 || y == 0 || GET_MODE (x) != GET_MODE (y))
6860 return 0;
6861
6862 /* Check for a paradoxical SUBREG of a MEM compared with the MEM.
6863 Note that all SUBREGs of MEM are paradoxical; otherwise they
6864 would have been rewritten. */
6865 if (GET_CODE (x) == MEM && GET_CODE (y) == SUBREG
6866 && GET_CODE (SUBREG_REG (y)) == MEM
6867 && rtx_equal_p (SUBREG_REG (y),
6868 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (y)), x)))
6869 return 1;
6870
6871 if (GET_CODE (y) == MEM && GET_CODE (x) == SUBREG
6872 && GET_CODE (SUBREG_REG (x)) == MEM
6873 && rtx_equal_p (SUBREG_REG (x),
6874 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (x)), y)))
6875 return 1;
6876
6877 last_x = get_last_value (x);
6878 last_y = get_last_value (y);
6879
6880 return ((last_x != 0
6881 && GET_CODE (last_x) != CLOBBER
6882 && rtx_equal_for_field_assignment_p (last_x, y))
6883 || (last_y != 0
6884 && GET_CODE (last_y) != CLOBBER
6885 && rtx_equal_for_field_assignment_p (x, last_y))
6886 || (last_x != 0 && last_y != 0
6887 && GET_CODE (last_x) != CLOBBER
6888 && GET_CODE (last_y) != CLOBBER
6889 && rtx_equal_for_field_assignment_p (last_x, last_y)));
6890 }
6891 \f
6892 /* See if X, a SET operation, can be rewritten as a bit-field assignment.
6893 Return that assignment if so.
6894
6895 We only handle the most common cases. */
6896
6897 static rtx
6898 make_field_assignment (x)
6899 rtx x;
6900 {
6901 rtx dest = SET_DEST (x);
6902 rtx src = SET_SRC (x);
6903 rtx assign;
6904 rtx rhs, lhs;
6905 HOST_WIDE_INT c1;
6906 int pos, len;
6907 rtx other;
6908 enum machine_mode mode;
6909
6910 /* If SRC was (and (not (ashift (const_int 1) POS)) DEST), this is
6911 a clear of a one-bit field. We will have changed it to
6912 (and (rotate (const_int -2) POS) DEST), so check for that. Also check
6913 for a SUBREG. */
6914
6915 if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == ROTATE
6916 && GET_CODE (XEXP (XEXP (src, 0), 0)) == CONST_INT
6917 && INTVAL (XEXP (XEXP (src, 0), 0)) == -2
6918 && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
6919 {
6920 assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
6921 1, 1, 1, 0);
6922 if (assign != 0)
6923 return gen_rtx (SET, VOIDmode, assign, const0_rtx);
6924 return x;
6925 }
6926
6927 else if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == SUBREG
6928 && subreg_lowpart_p (XEXP (src, 0))
6929 && (GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
6930 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (src, 0)))))
6931 && GET_CODE (SUBREG_REG (XEXP (src, 0))) == ROTATE
6932 && INTVAL (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == -2
6933 && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
6934 {
6935 assign = make_extraction (VOIDmode, dest, 0,
6936 XEXP (SUBREG_REG (XEXP (src, 0)), 1),
6937 1, 1, 1, 0);
6938 if (assign != 0)
6939 return gen_rtx (SET, VOIDmode, assign, const0_rtx);
6940 return x;
6941 }
6942
6943 /* If SRC is (ior (ashift (const_int 1) POS) DEST), this is a set of a
6944 one-bit field. */
6945 else if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == ASHIFT
6946 && XEXP (XEXP (src, 0), 0) == const1_rtx
6947 && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
6948 {
6949 assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
6950 1, 1, 1, 0);
6951 if (assign != 0)
6952 return gen_rtx (SET, VOIDmode, assign, const1_rtx);
6953 return x;
6954 }
6955
6956 /* The other case we handle is assignments into a constant-position
6957 field. They look like (ior/xor (and DEST C1) OTHER). If C1 represents
6958 a mask that has all one bits except for a group of zero bits and
6959 OTHER is known to have zeros where C1 has ones, this is such an
6960 assignment. Compute the position and length from C1. Shift OTHER
6961 to the appropriate position, force it to the required mode, and
6962 make the extraction. Check for the AND in both operands. */
6963
6964 if (GET_CODE (src) != IOR && GET_CODE (src) != XOR)
6965 return x;
6966
6967 rhs = expand_compound_operation (XEXP (src, 0));
6968 lhs = expand_compound_operation (XEXP (src, 1));
6969
6970 if (GET_CODE (rhs) == AND
6971 && GET_CODE (XEXP (rhs, 1)) == CONST_INT
6972 && rtx_equal_for_field_assignment_p (XEXP (rhs, 0), dest))
6973 c1 = INTVAL (XEXP (rhs, 1)), other = lhs;
6974 else if (GET_CODE (lhs) == AND
6975 && GET_CODE (XEXP (lhs, 1)) == CONST_INT
6976 && rtx_equal_for_field_assignment_p (XEXP (lhs, 0), dest))
6977 c1 = INTVAL (XEXP (lhs, 1)), other = rhs;
6978 else
6979 return x;
6980
6981 pos = get_pos_from_mask ((~ c1) & GET_MODE_MASK (GET_MODE (dest)), &len);
6982 if (pos < 0 || pos + len > GET_MODE_BITSIZE (GET_MODE (dest))
6983 || (GET_MODE_BITSIZE (GET_MODE (other)) <= HOST_BITS_PER_WIDE_INT
6984 && (c1 & nonzero_bits (other, GET_MODE (other))) != 0))
6985 return x;
6986
6987 assign = make_extraction (VOIDmode, dest, pos, NULL_RTX, len, 1, 1, 0);
6988 if (assign == 0)
6989 return x;
6990
6991 /* The mode to use for the source is the mode of the assignment, or of
6992 what is inside a possible STRICT_LOW_PART. */
6993 mode = (GET_CODE (assign) == STRICT_LOW_PART
6994 ? GET_MODE (XEXP (assign, 0)) : GET_MODE (assign));
6995
6996 /* Shift OTHER right POS places and make it the source, restricting it
6997 to the proper length and mode. */
6998
6999 src = force_to_mode (simplify_shift_const (NULL_RTX, LSHIFTRT,
7000 GET_MODE (src), other, pos),
7001 mode,
7002 GET_MODE_BITSIZE (mode) >= HOST_BITS_PER_WIDE_INT
7003 ? GET_MODE_MASK (mode)
7004 : ((HOST_WIDE_INT) 1 << len) - 1,
7005 dest, 0);
7006
7007 return gen_rtx_combine (SET, VOIDmode, assign, src);
7008 }
7009 \f
7010 /* See if X is of the form (+ (* a c) (* b c)) and convert to (* (+ a b) c)
7011 if so. */
7012
7013 static rtx
7014 apply_distributive_law (x)
7015 rtx x;
7016 {
7017 enum rtx_code code = GET_CODE (x);
7018 rtx lhs, rhs, other;
7019 rtx tem;
7020 enum rtx_code inner_code;
7021
7022 /* Distributivity is not true for floating point.
7023 It can change the value. So don't do it.
7024 -- rms and moshier@world.std.com. */
7025 if (FLOAT_MODE_P (GET_MODE (x)))
7026 return x;
7027
7028 /* The outer operation can only be one of the following: */
7029 if (code != IOR && code != AND && code != XOR
7030 && code != PLUS && code != MINUS)
7031 return x;
7032
7033 lhs = XEXP (x, 0), rhs = XEXP (x, 1);
7034
7035 /* If either operand is a primitive we can't do anything, so get out
7036 fast. */
7037 if (GET_RTX_CLASS (GET_CODE (lhs)) == 'o'
7038 || GET_RTX_CLASS (GET_CODE (rhs)) == 'o')
7039 return x;
7040
7041 lhs = expand_compound_operation (lhs);
7042 rhs = expand_compound_operation (rhs);
7043 inner_code = GET_CODE (lhs);
7044 if (inner_code != GET_CODE (rhs))
7045 return x;
7046
7047 /* See if the inner and outer operations distribute. */
7048 switch (inner_code)
7049 {
7050 case LSHIFTRT:
7051 case ASHIFTRT:
7052 case AND:
7053 case IOR:
7054 /* These all distribute except over PLUS. */
7055 if (code == PLUS || code == MINUS)
7056 return x;
7057 break;
7058
7059 case MULT:
7060 if (code != PLUS && code != MINUS)
7061 return x;
7062 break;
7063
7064 case ASHIFT:
7065 /* This is also a multiply, so it distributes over everything. */
7066 break;
7067
7068 case SUBREG:
7069 /* Non-paradoxical SUBREGs distributes over all operations, provided
7070 the inner modes and word numbers are the same, this is an extraction
7071 of a low-order part, we don't convert an fp operation to int or
7072 vice versa, and we would not be converting a single-word
7073 operation into a multi-word operation. The latter test is not
7074 required, but it prevents generating unneeded multi-word operations.
7075 Some of the previous tests are redundant given the latter test, but
7076 are retained because they are required for correctness.
7077
7078 We produce the result slightly differently in this case. */
7079
7080 if (GET_MODE (SUBREG_REG (lhs)) != GET_MODE (SUBREG_REG (rhs))
7081 || SUBREG_WORD (lhs) != SUBREG_WORD (rhs)
7082 || ! subreg_lowpart_p (lhs)
7083 || (GET_MODE_CLASS (GET_MODE (lhs))
7084 != GET_MODE_CLASS (GET_MODE (SUBREG_REG (lhs))))
7085 || (GET_MODE_SIZE (GET_MODE (lhs))
7086 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))))
7087 || GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))) > UNITS_PER_WORD)
7088 return x;
7089
7090 tem = gen_binary (code, GET_MODE (SUBREG_REG (lhs)),
7091 SUBREG_REG (lhs), SUBREG_REG (rhs));
7092 return gen_lowpart_for_combine (GET_MODE (x), tem);
7093
7094 default:
7095 return x;
7096 }
7097
7098 /* Set LHS and RHS to the inner operands (A and B in the example
7099 above) and set OTHER to the common operand (C in the example).
7100 These is only one way to do this unless the inner operation is
7101 commutative. */
7102 if (GET_RTX_CLASS (inner_code) == 'c'
7103 && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 0)))
7104 other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 1);
7105 else if (GET_RTX_CLASS (inner_code) == 'c'
7106 && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 1)))
7107 other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 0);
7108 else if (GET_RTX_CLASS (inner_code) == 'c'
7109 && rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 0)))
7110 other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 1);
7111 else if (rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 1)))
7112 other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 0);
7113 else
7114 return x;
7115
7116 /* Form the new inner operation, seeing if it simplifies first. */
7117 tem = gen_binary (code, GET_MODE (x), lhs, rhs);
7118
7119 /* There is one exception to the general way of distributing:
7120 (a ^ b) | (a ^ c) -> (~a) & (b ^ c) */
7121 if (code == XOR && inner_code == IOR)
7122 {
7123 inner_code = AND;
7124 other = gen_unary (NOT, GET_MODE (x), GET_MODE (x), other);
7125 }
7126
7127 /* We may be able to continuing distributing the result, so call
7128 ourselves recursively on the inner operation before forming the
7129 outer operation, which we return. */
7130 return gen_binary (inner_code, GET_MODE (x),
7131 apply_distributive_law (tem), other);
7132 }
7133 \f
7134 /* We have X, a logical `and' of VAROP with the constant CONSTOP, to be done
7135 in MODE.
7136
7137 Return an equivalent form, if different from X. Otherwise, return X. If
7138 X is zero, we are to always construct the equivalent form. */
7139
7140 static rtx
7141 simplify_and_const_int (x, mode, varop, constop)
7142 rtx x;
7143 enum machine_mode mode;
7144 rtx varop;
7145 unsigned HOST_WIDE_INT constop;
7146 {
7147 unsigned HOST_WIDE_INT nonzero;
7148 int width = GET_MODE_BITSIZE (mode);
7149 int i;
7150
7151 /* Simplify VAROP knowing that we will be only looking at some of the
7152 bits in it. */
7153 varop = force_to_mode (varop, mode, constop, NULL_RTX, 0);
7154
7155 /* If VAROP is a CLOBBER, we will fail so return it; if it is a
7156 CONST_INT, we are done. */
7157 if (GET_CODE (varop) == CLOBBER || GET_CODE (varop) == CONST_INT)
7158 return varop;
7159
7160 /* See what bits may be nonzero in VAROP. Unlike the general case of
7161 a call to nonzero_bits, here we don't care about bits outside
7162 MODE. */
7163
7164 nonzero = nonzero_bits (varop, mode) & GET_MODE_MASK (mode);
7165
7166 /* If this would be an entire word for the target, but is not for
7167 the host, then sign-extend on the host so that the number will look
7168 the same way on the host that it would on the target.
7169
7170 For example, when building a 64 bit alpha hosted 32 bit sparc
7171 targeted compiler, then we want the 32 bit unsigned value -1 to be
7172 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
7173 The later confuses the sparc backend. */
7174
7175 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT && BITS_PER_WORD == width
7176 && (nonzero & ((HOST_WIDE_INT) 1 << (width - 1))))
7177 nonzero |= ((HOST_WIDE_INT) (-1) << width);
7178
7179 /* Turn off all bits in the constant that are known to already be zero.
7180 Thus, if the AND isn't needed at all, we will have CONSTOP == NONZERO_BITS
7181 which is tested below. */
7182
7183 constop &= nonzero;
7184
7185 /* If we don't have any bits left, return zero. */
7186 if (constop == 0)
7187 return const0_rtx;
7188
7189 /* If VAROP is a NEG of something known to be zero or 1 and CONSTOP is
7190 a power of two, we can replace this with a ASHIFT. */
7191 if (GET_CODE (varop) == NEG && nonzero_bits (XEXP (varop, 0), mode) == 1
7192 && (i = exact_log2 (constop)) >= 0)
7193 return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (varop, 0), i);
7194
7195 /* If VAROP is an IOR or XOR, apply the AND to both branches of the IOR
7196 or XOR, then try to apply the distributive law. This may eliminate
7197 operations if either branch can be simplified because of the AND.
7198 It may also make some cases more complex, but those cases probably
7199 won't match a pattern either with or without this. */
7200
7201 if (GET_CODE (varop) == IOR || GET_CODE (varop) == XOR)
7202 return
7203 gen_lowpart_for_combine
7204 (mode,
7205 apply_distributive_law
7206 (gen_binary (GET_CODE (varop), GET_MODE (varop),
7207 simplify_and_const_int (NULL_RTX, GET_MODE (varop),
7208 XEXP (varop, 0), constop),
7209 simplify_and_const_int (NULL_RTX, GET_MODE (varop),
7210 XEXP (varop, 1), constop))));
7211
7212 /* Get VAROP in MODE. Try to get a SUBREG if not. Don't make a new SUBREG
7213 if we already had one (just check for the simplest cases). */
7214 if (x && GET_CODE (XEXP (x, 0)) == SUBREG
7215 && GET_MODE (XEXP (x, 0)) == mode
7216 && SUBREG_REG (XEXP (x, 0)) == varop)
7217 varop = XEXP (x, 0);
7218 else
7219 varop = gen_lowpart_for_combine (mode, varop);
7220
7221 /* If we can't make the SUBREG, try to return what we were given. */
7222 if (GET_CODE (varop) == CLOBBER)
7223 return x ? x : varop;
7224
7225 /* If we are only masking insignificant bits, return VAROP. */
7226 if (constop == nonzero)
7227 x = varop;
7228
7229 /* Otherwise, return an AND. See how much, if any, of X we can use. */
7230 else if (x == 0 || GET_CODE (x) != AND || GET_MODE (x) != mode)
7231 x = gen_binary (AND, mode, varop, GEN_INT (constop));
7232
7233 else
7234 {
7235 if (GET_CODE (XEXP (x, 1)) != CONST_INT
7236 || INTVAL (XEXP (x, 1)) != constop)
7237 SUBST (XEXP (x, 1), GEN_INT (constop));
7238
7239 SUBST (XEXP (x, 0), varop);
7240 }
7241
7242 return x;
7243 }
7244 \f
7245 /* We let num_sign_bit_copies recur into nonzero_bits as that is useful.
7246 We don't let nonzero_bits recur into num_sign_bit_copies, because that
7247 is less useful. We can't allow both, because that results in exponential
7248 run time recursion. There is a nullstone testcase that triggered
7249 this. This macro avoids accidental uses of num_sign_bit_copies. */
7250 #define num_sign_bit_copies()
7251
7252 /* Given an expression, X, compute which bits in X can be non-zero.
7253 We don't care about bits outside of those defined in MODE.
7254
7255 For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
7256 a shift, AND, or zero_extract, we can do better. */
7257
7258 static unsigned HOST_WIDE_INT
7259 nonzero_bits (x, mode)
7260 rtx x;
7261 enum machine_mode mode;
7262 {
7263 unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
7264 unsigned HOST_WIDE_INT inner_nz;
7265 enum rtx_code code;
7266 int mode_width = GET_MODE_BITSIZE (mode);
7267 rtx tem;
7268
7269 /* For floating-point values, assume all bits are needed. */
7270 if (FLOAT_MODE_P (GET_MODE (x)) || FLOAT_MODE_P (mode))
7271 return nonzero;
7272
7273 /* If X is wider than MODE, use its mode instead. */
7274 if (GET_MODE_BITSIZE (GET_MODE (x)) > mode_width)
7275 {
7276 mode = GET_MODE (x);
7277 nonzero = GET_MODE_MASK (mode);
7278 mode_width = GET_MODE_BITSIZE (mode);
7279 }
7280
7281 if (mode_width > HOST_BITS_PER_WIDE_INT)
7282 /* Our only callers in this case look for single bit values. So
7283 just return the mode mask. Those tests will then be false. */
7284 return nonzero;
7285
7286 #ifndef WORD_REGISTER_OPERATIONS
7287 /* If MODE is wider than X, but both are a single word for both the host
7288 and target machines, we can compute this from which bits of the
7289 object might be nonzero in its own mode, taking into account the fact
7290 that on many CISC machines, accessing an object in a wider mode
7291 causes the high-order bits to become undefined. So they are
7292 not known to be zero. */
7293
7294 if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode
7295 && GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD
7296 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
7297 && GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (GET_MODE (x)))
7298 {
7299 nonzero &= nonzero_bits (x, GET_MODE (x));
7300 nonzero |= GET_MODE_MASK (mode) & ~ GET_MODE_MASK (GET_MODE (x));
7301 return nonzero;
7302 }
7303 #endif
7304
7305 code = GET_CODE (x);
7306 switch (code)
7307 {
7308 case REG:
7309 #ifdef POINTERS_EXTEND_UNSIGNED
7310 /* If pointers extend unsigned and this is a pointer in Pmode, say that
7311 all the bits above ptr_mode are known to be zero. */
7312 if (POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
7313 && REGNO_POINTER_FLAG (REGNO (x)))
7314 nonzero &= GET_MODE_MASK (ptr_mode);
7315 #endif
7316
7317 #ifdef STACK_BOUNDARY
7318 /* If this is the stack pointer, we may know something about its
7319 alignment. If PUSH_ROUNDING is defined, it is possible for the
7320 stack to be momentarily aligned only to that amount, so we pick
7321 the least alignment. */
7322
7323 /* We can't check for arg_pointer_rtx here, because it is not
7324 guaranteed to have as much alignment as the stack pointer.
7325 In particular, in the Irix6 n64 ABI, the stack has 128 bit
7326 alignment but the argument pointer has only 64 bit alignment. */
7327
7328 if (x == stack_pointer_rtx || x == frame_pointer_rtx
7329 || x == hard_frame_pointer_rtx
7330 || (REGNO (x) >= FIRST_VIRTUAL_REGISTER
7331 && REGNO (x) <= LAST_VIRTUAL_REGISTER))
7332 {
7333 int sp_alignment = STACK_BOUNDARY / BITS_PER_UNIT;
7334
7335 #ifdef PUSH_ROUNDING
7336 if (REGNO (x) == STACK_POINTER_REGNUM)
7337 sp_alignment = MIN (PUSH_ROUNDING (1), sp_alignment);
7338 #endif
7339
7340 /* We must return here, otherwise we may get a worse result from
7341 one of the choices below. There is nothing useful below as
7342 far as the stack pointer is concerned. */
7343 return nonzero &= ~ (sp_alignment - 1);
7344 }
7345 #endif
7346
7347 /* If X is a register whose nonzero bits value is current, use it.
7348 Otherwise, if X is a register whose value we can find, use that
7349 value. Otherwise, use the previously-computed global nonzero bits
7350 for this register. */
7351
7352 if (reg_last_set_value[REGNO (x)] != 0
7353 && reg_last_set_mode[REGNO (x)] == mode
7354 && (REG_N_SETS (REGNO (x)) == 1
7355 || reg_last_set_label[REGNO (x)] == label_tick)
7356 && INSN_CUID (reg_last_set[REGNO (x)]) < subst_low_cuid)
7357 return reg_last_set_nonzero_bits[REGNO (x)];
7358
7359 tem = get_last_value (x);
7360
7361 if (tem)
7362 {
7363 #ifdef SHORT_IMMEDIATES_SIGN_EXTEND
7364 /* If X is narrower than MODE and TEM is a non-negative
7365 constant that would appear negative in the mode of X,
7366 sign-extend it for use in reg_nonzero_bits because some
7367 machines (maybe most) will actually do the sign-extension
7368 and this is the conservative approach.
7369
7370 ??? For 2.5, try to tighten up the MD files in this regard
7371 instead of this kludge. */
7372
7373 if (GET_MODE_BITSIZE (GET_MODE (x)) < mode_width
7374 && GET_CODE (tem) == CONST_INT
7375 && INTVAL (tem) > 0
7376 && 0 != (INTVAL (tem)
7377 & ((HOST_WIDE_INT) 1
7378 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
7379 tem = GEN_INT (INTVAL (tem)
7380 | ((HOST_WIDE_INT) (-1)
7381 << GET_MODE_BITSIZE (GET_MODE (x))));
7382 #endif
7383 return nonzero_bits (tem, mode);
7384 }
7385 else if (nonzero_sign_valid && reg_nonzero_bits[REGNO (x)])
7386 return reg_nonzero_bits[REGNO (x)] & nonzero;
7387 else
7388 return nonzero;
7389
7390 case CONST_INT:
7391 #ifdef SHORT_IMMEDIATES_SIGN_EXTEND
7392 /* If X is negative in MODE, sign-extend the value. */
7393 if (INTVAL (x) > 0 && mode_width < BITS_PER_WORD
7394 && 0 != (INTVAL (x) & ((HOST_WIDE_INT) 1 << (mode_width - 1))))
7395 return (INTVAL (x) | ((HOST_WIDE_INT) (-1) << mode_width));
7396 #endif
7397
7398 return INTVAL (x);
7399
7400 case MEM:
7401 #ifdef LOAD_EXTEND_OP
7402 /* In many, if not most, RISC machines, reading a byte from memory
7403 zeros the rest of the register. Noticing that fact saves a lot
7404 of extra zero-extends. */
7405 if (LOAD_EXTEND_OP (GET_MODE (x)) == ZERO_EXTEND)
7406 nonzero &= GET_MODE_MASK (GET_MODE (x));
7407 #endif
7408 break;
7409
7410 case EQ: case NE:
7411 case GT: case GTU:
7412 case LT: case LTU:
7413 case GE: case GEU:
7414 case LE: case LEU:
7415
7416 /* If this produces an integer result, we know which bits are set.
7417 Code here used to clear bits outside the mode of X, but that is
7418 now done above. */
7419
7420 if (GET_MODE_CLASS (mode) == MODE_INT
7421 && mode_width <= HOST_BITS_PER_WIDE_INT)
7422 nonzero = STORE_FLAG_VALUE;
7423 break;
7424
7425 case NEG:
7426 #if 0
7427 /* Disabled to avoid exponential mutual recursion between nonzero_bits
7428 and num_sign_bit_copies. */
7429 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
7430 == GET_MODE_BITSIZE (GET_MODE (x)))
7431 nonzero = 1;
7432 #endif
7433
7434 if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
7435 nonzero |= (GET_MODE_MASK (mode) & ~ GET_MODE_MASK (GET_MODE (x)));
7436 break;
7437
7438 case ABS:
7439 #if 0
7440 /* Disabled to avoid exponential mutual recursion between nonzero_bits
7441 and num_sign_bit_copies. */
7442 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
7443 == GET_MODE_BITSIZE (GET_MODE (x)))
7444 nonzero = 1;
7445 #endif
7446 break;
7447
7448 case TRUNCATE:
7449 nonzero &= (nonzero_bits (XEXP (x, 0), mode) & GET_MODE_MASK (mode));
7450 break;
7451
7452 case ZERO_EXTEND:
7453 nonzero &= nonzero_bits (XEXP (x, 0), mode);
7454 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
7455 nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
7456 break;
7457
7458 case SIGN_EXTEND:
7459 /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
7460 Otherwise, show all the bits in the outer mode but not the inner
7461 may be non-zero. */
7462 inner_nz = nonzero_bits (XEXP (x, 0), mode);
7463 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
7464 {
7465 inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
7466 if (inner_nz
7467 & (((HOST_WIDE_INT) 1
7468 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1))))
7469 inner_nz |= (GET_MODE_MASK (mode)
7470 & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
7471 }
7472
7473 nonzero &= inner_nz;
7474 break;
7475
7476 case AND:
7477 nonzero &= (nonzero_bits (XEXP (x, 0), mode)
7478 & nonzero_bits (XEXP (x, 1), mode));
7479 break;
7480
7481 case XOR: case IOR:
7482 case UMIN: case UMAX: case SMIN: case SMAX:
7483 nonzero &= (nonzero_bits (XEXP (x, 0), mode)
7484 | nonzero_bits (XEXP (x, 1), mode));
7485 break;
7486
7487 case PLUS: case MINUS:
7488 case MULT:
7489 case DIV: case UDIV:
7490 case MOD: case UMOD:
7491 /* We can apply the rules of arithmetic to compute the number of
7492 high- and low-order zero bits of these operations. We start by
7493 computing the width (position of the highest-order non-zero bit)
7494 and the number of low-order zero bits for each value. */
7495 {
7496 unsigned HOST_WIDE_INT nz0 = nonzero_bits (XEXP (x, 0), mode);
7497 unsigned HOST_WIDE_INT nz1 = nonzero_bits (XEXP (x, 1), mode);
7498 int width0 = floor_log2 (nz0) + 1;
7499 int width1 = floor_log2 (nz1) + 1;
7500 int low0 = floor_log2 (nz0 & -nz0);
7501 int low1 = floor_log2 (nz1 & -nz1);
7502 HOST_WIDE_INT op0_maybe_minusp
7503 = (nz0 & ((HOST_WIDE_INT) 1 << (mode_width - 1)));
7504 HOST_WIDE_INT op1_maybe_minusp
7505 = (nz1 & ((HOST_WIDE_INT) 1 << (mode_width - 1)));
7506 int result_width = mode_width;
7507 int result_low = 0;
7508
7509 switch (code)
7510 {
7511 case PLUS:
7512 result_width = MAX (width0, width1) + 1;
7513 result_low = MIN (low0, low1);
7514 break;
7515 case MINUS:
7516 result_low = MIN (low0, low1);
7517 break;
7518 case MULT:
7519 result_width = width0 + width1;
7520 result_low = low0 + low1;
7521 break;
7522 case DIV:
7523 if (! op0_maybe_minusp && ! op1_maybe_minusp)
7524 result_width = width0;
7525 break;
7526 case UDIV:
7527 result_width = width0;
7528 break;
7529 case MOD:
7530 if (! op0_maybe_minusp && ! op1_maybe_minusp)
7531 result_width = MIN (width0, width1);
7532 result_low = MIN (low0, low1);
7533 break;
7534 case UMOD:
7535 result_width = MIN (width0, width1);
7536 result_low = MIN (low0, low1);
7537 break;
7538 default:
7539 abort ();
7540 }
7541
7542 if (result_width < mode_width)
7543 nonzero &= ((HOST_WIDE_INT) 1 << result_width) - 1;
7544
7545 if (result_low > 0)
7546 nonzero &= ~ (((HOST_WIDE_INT) 1 << result_low) - 1);
7547 }
7548 break;
7549
7550 case ZERO_EXTRACT:
7551 if (GET_CODE (XEXP (x, 1)) == CONST_INT
7552 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
7553 nonzero &= ((HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
7554 break;
7555
7556 case SUBREG:
7557 /* If this is a SUBREG formed for a promoted variable that has
7558 been zero-extended, we know that at least the high-order bits
7559 are zero, though others might be too. */
7560
7561 if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x))
7562 nonzero = (GET_MODE_MASK (GET_MODE (x))
7563 & nonzero_bits (SUBREG_REG (x), GET_MODE (x)));
7564
7565 /* If the inner mode is a single word for both the host and target
7566 machines, we can compute this from which bits of the inner
7567 object might be nonzero. */
7568 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) <= BITS_PER_WORD
7569 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
7570 <= HOST_BITS_PER_WIDE_INT))
7571 {
7572 nonzero &= nonzero_bits (SUBREG_REG (x), mode);
7573
7574 #ifndef WORD_REGISTER_OPERATIONS
7575 /* On many CISC machines, accessing an object in a wider mode
7576 causes the high-order bits to become undefined. So they are
7577 not known to be zero. */
7578 if (GET_MODE_SIZE (GET_MODE (x))
7579 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
7580 nonzero |= (GET_MODE_MASK (GET_MODE (x))
7581 & ~ GET_MODE_MASK (GET_MODE (SUBREG_REG (x))));
7582 #endif
7583 }
7584 break;
7585
7586 case ASHIFTRT:
7587 case LSHIFTRT:
7588 case ASHIFT:
7589 case ROTATE:
7590 /* The nonzero bits are in two classes: any bits within MODE
7591 that aren't in GET_MODE (x) are always significant. The rest of the
7592 nonzero bits are those that are significant in the operand of
7593 the shift when shifted the appropriate number of bits. This
7594 shows that high-order bits are cleared by the right shift and
7595 low-order bits by left shifts. */
7596 if (GET_CODE (XEXP (x, 1)) == CONST_INT
7597 && INTVAL (XEXP (x, 1)) >= 0
7598 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
7599 {
7600 enum machine_mode inner_mode = GET_MODE (x);
7601 int width = GET_MODE_BITSIZE (inner_mode);
7602 int count = INTVAL (XEXP (x, 1));
7603 unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
7604 unsigned HOST_WIDE_INT op_nonzero = nonzero_bits (XEXP (x, 0), mode);
7605 unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask;
7606 unsigned HOST_WIDE_INT outer = 0;
7607
7608 if (mode_width > width)
7609 outer = (op_nonzero & nonzero & ~ mode_mask);
7610
7611 if (code == LSHIFTRT)
7612 inner >>= count;
7613 else if (code == ASHIFTRT)
7614 {
7615 inner >>= count;
7616
7617 /* If the sign bit may have been nonzero before the shift, we
7618 need to mark all the places it could have been copied to
7619 by the shift as possibly nonzero. */
7620 if (inner & ((HOST_WIDE_INT) 1 << (width - 1 - count)))
7621 inner |= (((HOST_WIDE_INT) 1 << count) - 1) << (width - count);
7622 }
7623 else if (code == ASHIFT)
7624 inner <<= count;
7625 else
7626 inner = ((inner << (count % width)
7627 | (inner >> (width - (count % width)))) & mode_mask);
7628
7629 nonzero &= (outer | inner);
7630 }
7631 break;
7632
7633 case FFS:
7634 /* This is at most the number of bits in the mode. */
7635 nonzero = ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width) + 1)) - 1;
7636 break;
7637
7638 case IF_THEN_ELSE:
7639 nonzero &= (nonzero_bits (XEXP (x, 1), mode)
7640 | nonzero_bits (XEXP (x, 2), mode));
7641 break;
7642
7643 default:
7644 break;
7645 }
7646
7647 return nonzero;
7648 }
7649
7650 /* See the macro definition above. */
7651 #undef num_sign_bit_copies
7652 \f
7653 /* Return the number of bits at the high-order end of X that are known to
7654 be equal to the sign bit. X will be used in mode MODE; if MODE is
7655 VOIDmode, X will be used in its own mode. The returned value will always
7656 be between 1 and the number of bits in MODE. */
7657
7658 static int
7659 num_sign_bit_copies (x, mode)
7660 rtx x;
7661 enum machine_mode mode;
7662 {
7663 enum rtx_code code = GET_CODE (x);
7664 int bitwidth;
7665 int num0, num1, result;
7666 unsigned HOST_WIDE_INT nonzero;
7667 rtx tem;
7668
7669 /* If we weren't given a mode, use the mode of X. If the mode is still
7670 VOIDmode, we don't know anything. Likewise if one of the modes is
7671 floating-point. */
7672
7673 if (mode == VOIDmode)
7674 mode = GET_MODE (x);
7675
7676 if (mode == VOIDmode || FLOAT_MODE_P (mode) || FLOAT_MODE_P (GET_MODE (x)))
7677 return 1;
7678
7679 bitwidth = GET_MODE_BITSIZE (mode);
7680
7681 /* For a smaller object, just ignore the high bits. */
7682 if (bitwidth < GET_MODE_BITSIZE (GET_MODE (x)))
7683 return MAX (1, (num_sign_bit_copies (x, GET_MODE (x))
7684 - (GET_MODE_BITSIZE (GET_MODE (x)) - bitwidth)));
7685
7686 if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_BITSIZE (GET_MODE (x)))
7687 {
7688 #ifndef WORD_REGISTER_OPERATIONS
7689 /* If this machine does not do all register operations on the entire
7690 register and MODE is wider than the mode of X, we can say nothing
7691 at all about the high-order bits. */
7692 return 1;
7693 #else
7694 /* Likewise on machines that do, if the mode of the object is smaller
7695 than a word and loads of that size don't sign extend, we can say
7696 nothing about the high order bits. */
7697 if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
7698 #ifdef LOAD_EXTEND_OP
7699 && LOAD_EXTEND_OP (GET_MODE (x)) != SIGN_EXTEND
7700 #endif
7701 )
7702 return 1;
7703 #endif
7704 }
7705
7706 switch (code)
7707 {
7708 case REG:
7709
7710 #ifdef POINTERS_EXTEND_UNSIGNED
7711 /* If pointers extend signed and this is a pointer in Pmode, say that
7712 all the bits above ptr_mode are known to be sign bit copies. */
7713 if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode && mode == Pmode
7714 && REGNO_POINTER_FLAG (REGNO (x)))
7715 return GET_MODE_BITSIZE (Pmode) - GET_MODE_BITSIZE (ptr_mode) + 1;
7716 #endif
7717
7718 if (reg_last_set_value[REGNO (x)] != 0
7719 && reg_last_set_mode[REGNO (x)] == mode
7720 && (REG_N_SETS (REGNO (x)) == 1
7721 || reg_last_set_label[REGNO (x)] == label_tick)
7722 && INSN_CUID (reg_last_set[REGNO (x)]) < subst_low_cuid)
7723 return reg_last_set_sign_bit_copies[REGNO (x)];
7724
7725 tem = get_last_value (x);
7726 if (tem != 0)
7727 return num_sign_bit_copies (tem, mode);
7728
7729 if (nonzero_sign_valid && reg_sign_bit_copies[REGNO (x)] != 0)
7730 return reg_sign_bit_copies[REGNO (x)];
7731 break;
7732
7733 case MEM:
7734 #ifdef LOAD_EXTEND_OP
7735 /* Some RISC machines sign-extend all loads of smaller than a word. */
7736 if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND)
7737 return MAX (1, bitwidth - GET_MODE_BITSIZE (GET_MODE (x)) + 1);
7738 #endif
7739 break;
7740
7741 case CONST_INT:
7742 /* If the constant is negative, take its 1's complement and remask.
7743 Then see how many zero bits we have. */
7744 nonzero = INTVAL (x) & GET_MODE_MASK (mode);
7745 if (bitwidth <= HOST_BITS_PER_WIDE_INT
7746 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
7747 nonzero = (~ nonzero) & GET_MODE_MASK (mode);
7748
7749 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
7750
7751 case SUBREG:
7752 /* If this is a SUBREG for a promoted object that is sign-extended
7753 and we are looking at it in a wider mode, we know that at least the
7754 high-order bits are known to be sign bit copies. */
7755
7756 if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x))
7757 return MAX (bitwidth - GET_MODE_BITSIZE (GET_MODE (x)) + 1,
7758 num_sign_bit_copies (SUBREG_REG (x), mode));
7759
7760 /* For a smaller object, just ignore the high bits. */
7761 if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
7762 {
7763 num0 = num_sign_bit_copies (SUBREG_REG (x), VOIDmode);
7764 return MAX (1, (num0
7765 - (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
7766 - bitwidth)));
7767 }
7768
7769 #ifdef WORD_REGISTER_OPERATIONS
7770 #ifdef LOAD_EXTEND_OP
7771 /* For paradoxical SUBREGs on machines where all register operations
7772 affect the entire register, just look inside. Note that we are
7773 passing MODE to the recursive call, so the number of sign bit copies
7774 will remain relative to that mode, not the inner mode. */
7775
7776 /* This works only if loads sign extend. Otherwise, if we get a
7777 reload for the inner part, it may be loaded from the stack, and
7778 then we lose all sign bit copies that existed before the store
7779 to the stack. */
7780
7781 if ((GET_MODE_SIZE (GET_MODE (x))
7782 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
7783 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND)
7784 return num_sign_bit_copies (SUBREG_REG (x), mode);
7785 #endif
7786 #endif
7787 break;
7788
7789 case SIGN_EXTRACT:
7790 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
7791 return MAX (1, bitwidth - INTVAL (XEXP (x, 1)));
7792 break;
7793
7794 case SIGN_EXTEND:
7795 return (bitwidth - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
7796 + num_sign_bit_copies (XEXP (x, 0), VOIDmode));
7797
7798 case TRUNCATE:
7799 /* For a smaller object, just ignore the high bits. */
7800 num0 = num_sign_bit_copies (XEXP (x, 0), VOIDmode);
7801 return MAX (1, (num0 - (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
7802 - bitwidth)));
7803
7804 case NOT:
7805 return num_sign_bit_copies (XEXP (x, 0), mode);
7806
7807 case ROTATE: case ROTATERT:
7808 /* If we are rotating left by a number of bits less than the number
7809 of sign bit copies, we can just subtract that amount from the
7810 number. */
7811 if (GET_CODE (XEXP (x, 1)) == CONST_INT
7812 && INTVAL (XEXP (x, 1)) >= 0 && INTVAL (XEXP (x, 1)) < bitwidth)
7813 {
7814 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7815 return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
7816 : bitwidth - INTVAL (XEXP (x, 1))));
7817 }
7818 break;
7819
7820 case NEG:
7821 /* In general, this subtracts one sign bit copy. But if the value
7822 is known to be positive, the number of sign bit copies is the
7823 same as that of the input. Finally, if the input has just one bit
7824 that might be nonzero, all the bits are copies of the sign bit. */
7825 nonzero = nonzero_bits (XEXP (x, 0), mode);
7826 if (nonzero == 1)
7827 return bitwidth;
7828
7829 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7830 if (num0 > 1
7831 && bitwidth <= HOST_BITS_PER_WIDE_INT
7832 && (((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero))
7833 num0--;
7834
7835 return num0;
7836
7837 case IOR: case AND: case XOR:
7838 case SMIN: case SMAX: case UMIN: case UMAX:
7839 /* Logical operations will preserve the number of sign-bit copies.
7840 MIN and MAX operations always return one of the operands. */
7841 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7842 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
7843 return MIN (num0, num1);
7844
7845 case PLUS: case MINUS:
7846 /* For addition and subtraction, we can have a 1-bit carry. However,
7847 if we are subtracting 1 from a positive number, there will not
7848 be such a carry. Furthermore, if the positive number is known to
7849 be 0 or 1, we know the result is either -1 or 0. */
7850
7851 if (code == PLUS && XEXP (x, 1) == constm1_rtx
7852 && bitwidth <= HOST_BITS_PER_WIDE_INT)
7853 {
7854 nonzero = nonzero_bits (XEXP (x, 0), mode);
7855 if ((((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0)
7856 return (nonzero == 1 || nonzero == 0 ? bitwidth
7857 : bitwidth - floor_log2 (nonzero) - 1);
7858 }
7859
7860 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7861 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
7862 return MAX (1, MIN (num0, num1) - 1);
7863
7864 case MULT:
7865 /* The number of bits of the product is the sum of the number of
7866 bits of both terms. However, unless one of the terms if known
7867 to be positive, we must allow for an additional bit since negating
7868 a negative number can remove one sign bit copy. */
7869
7870 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7871 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
7872
7873 result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
7874 if (result > 0
7875 && bitwidth <= HOST_BITS_PER_WIDE_INT
7876 && ((nonzero_bits (XEXP (x, 0), mode)
7877 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
7878 && ((nonzero_bits (XEXP (x, 1), mode)
7879 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
7880 result--;
7881
7882 return MAX (1, result);
7883
7884 case UDIV:
7885 /* The result must be <= the first operand. */
7886 return num_sign_bit_copies (XEXP (x, 0), mode);
7887
7888 case UMOD:
7889 /* The result must be <= the scond operand. */
7890 return num_sign_bit_copies (XEXP (x, 1), mode);
7891
7892 case DIV:
7893 /* Similar to unsigned division, except that we have to worry about
7894 the case where the divisor is negative, in which case we have
7895 to add 1. */
7896 result = num_sign_bit_copies (XEXP (x, 0), mode);
7897 if (result > 1
7898 && bitwidth <= HOST_BITS_PER_WIDE_INT
7899 && (nonzero_bits (XEXP (x, 1), mode)
7900 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
7901 result --;
7902
7903 return result;
7904
7905 case MOD:
7906 result = num_sign_bit_copies (XEXP (x, 1), mode);
7907 if (result > 1
7908 && bitwidth <= HOST_BITS_PER_WIDE_INT
7909 && (nonzero_bits (XEXP (x, 1), mode)
7910 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
7911 result --;
7912
7913 return result;
7914
7915 case ASHIFTRT:
7916 /* Shifts by a constant add to the number of bits equal to the
7917 sign bit. */
7918 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7919 if (GET_CODE (XEXP (x, 1)) == CONST_INT
7920 && INTVAL (XEXP (x, 1)) > 0)
7921 num0 = MIN (bitwidth, num0 + INTVAL (XEXP (x, 1)));
7922
7923 return num0;
7924
7925 case ASHIFT:
7926 /* Left shifts destroy copies. */
7927 if (GET_CODE (XEXP (x, 1)) != CONST_INT
7928 || INTVAL (XEXP (x, 1)) < 0
7929 || INTVAL (XEXP (x, 1)) >= bitwidth)
7930 return 1;
7931
7932 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7933 return MAX (1, num0 - INTVAL (XEXP (x, 1)));
7934
7935 case IF_THEN_ELSE:
7936 num0 = num_sign_bit_copies (XEXP (x, 1), mode);
7937 num1 = num_sign_bit_copies (XEXP (x, 2), mode);
7938 return MIN (num0, num1);
7939
7940 case EQ: case NE: case GE: case GT: case LE: case LT:
7941 case GEU: case GTU: case LEU: case LTU:
7942 if (STORE_FLAG_VALUE == -1)
7943 return bitwidth;
7944 break;
7945
7946 default:
7947 break;
7948 }
7949
7950 /* If we haven't been able to figure it out by one of the above rules,
7951 see if some of the high-order bits are known to be zero. If so,
7952 count those bits and return one less than that amount. If we can't
7953 safely compute the mask for this mode, always return BITWIDTH. */
7954
7955 if (bitwidth > HOST_BITS_PER_WIDE_INT)
7956 return 1;
7957
7958 nonzero = nonzero_bits (x, mode);
7959 return (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))
7960 ? 1 : bitwidth - floor_log2 (nonzero) - 1);
7961 }
7962 \f
7963 /* Return the number of "extended" bits there are in X, when interpreted
7964 as a quantity in MODE whose signedness is indicated by UNSIGNEDP. For
7965 unsigned quantities, this is the number of high-order zero bits.
7966 For signed quantities, this is the number of copies of the sign bit
7967 minus 1. In both case, this function returns the number of "spare"
7968 bits. For example, if two quantities for which this function returns
7969 at least 1 are added, the addition is known not to overflow.
7970
7971 This function will always return 0 unless called during combine, which
7972 implies that it must be called from a define_split. */
7973
7974 int
7975 extended_count (x, mode, unsignedp)
7976 rtx x;
7977 enum machine_mode mode;
7978 int unsignedp;
7979 {
7980 if (nonzero_sign_valid == 0)
7981 return 0;
7982
7983 return (unsignedp
7984 ? (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7985 && (GET_MODE_BITSIZE (mode) - 1
7986 - floor_log2 (nonzero_bits (x, mode))))
7987 : num_sign_bit_copies (x, mode) - 1);
7988 }
7989 \f
7990 /* This function is called from `simplify_shift_const' to merge two
7991 outer operations. Specifically, we have already found that we need
7992 to perform operation *POP0 with constant *PCONST0 at the outermost
7993 position. We would now like to also perform OP1 with constant CONST1
7994 (with *POP0 being done last).
7995
7996 Return 1 if we can do the operation and update *POP0 and *PCONST0 with
7997 the resulting operation. *PCOMP_P is set to 1 if we would need to
7998 complement the innermost operand, otherwise it is unchanged.
7999
8000 MODE is the mode in which the operation will be done. No bits outside
8001 the width of this mode matter. It is assumed that the width of this mode
8002 is smaller than or equal to HOST_BITS_PER_WIDE_INT.
8003
8004 If *POP0 or OP1 are NIL, it means no operation is required. Only NEG, PLUS,
8005 IOR, XOR, and AND are supported. We may set *POP0 to SET if the proper
8006 result is simply *PCONST0.
8007
8008 If the resulting operation cannot be expressed as one operation, we
8009 return 0 and do not change *POP0, *PCONST0, and *PCOMP_P. */
8010
8011 static int
8012 merge_outer_ops (pop0, pconst0, op1, const1, mode, pcomp_p)
8013 enum rtx_code *pop0;
8014 HOST_WIDE_INT *pconst0;
8015 enum rtx_code op1;
8016 HOST_WIDE_INT const1;
8017 enum machine_mode mode;
8018 int *pcomp_p;
8019 {
8020 enum rtx_code op0 = *pop0;
8021 HOST_WIDE_INT const0 = *pconst0;
8022 int width = GET_MODE_BITSIZE (mode);
8023
8024 const0 &= GET_MODE_MASK (mode);
8025 const1 &= GET_MODE_MASK (mode);
8026
8027 /* If OP0 is an AND, clear unimportant bits in CONST1. */
8028 if (op0 == AND)
8029 const1 &= const0;
8030
8031 /* If OP0 or OP1 is NIL, this is easy. Similarly if they are the same or
8032 if OP0 is SET. */
8033
8034 if (op1 == NIL || op0 == SET)
8035 return 1;
8036
8037 else if (op0 == NIL)
8038 op0 = op1, const0 = const1;
8039
8040 else if (op0 == op1)
8041 {
8042 switch (op0)
8043 {
8044 case AND:
8045 const0 &= const1;
8046 break;
8047 case IOR:
8048 const0 |= const1;
8049 break;
8050 case XOR:
8051 const0 ^= const1;
8052 break;
8053 case PLUS:
8054 const0 += const1;
8055 break;
8056 case NEG:
8057 op0 = NIL;
8058 break;
8059 default:
8060 break;
8061 }
8062 }
8063
8064 /* Otherwise, if either is a PLUS or NEG, we can't do anything. */
8065 else if (op0 == PLUS || op1 == PLUS || op0 == NEG || op1 == NEG)
8066 return 0;
8067
8068 /* If the two constants aren't the same, we can't do anything. The
8069 remaining six cases can all be done. */
8070 else if (const0 != const1)
8071 return 0;
8072
8073 else
8074 switch (op0)
8075 {
8076 case IOR:
8077 if (op1 == AND)
8078 /* (a & b) | b == b */
8079 op0 = SET;
8080 else /* op1 == XOR */
8081 /* (a ^ b) | b == a | b */
8082 ;
8083 break;
8084
8085 case XOR:
8086 if (op1 == AND)
8087 /* (a & b) ^ b == (~a) & b */
8088 op0 = AND, *pcomp_p = 1;
8089 else /* op1 == IOR */
8090 /* (a | b) ^ b == a & ~b */
8091 op0 = AND, *pconst0 = ~ const0;
8092 break;
8093
8094 case AND:
8095 if (op1 == IOR)
8096 /* (a | b) & b == b */
8097 op0 = SET;
8098 else /* op1 == XOR */
8099 /* (a ^ b) & b) == (~a) & b */
8100 *pcomp_p = 1;
8101 break;
8102 default:
8103 break;
8104 }
8105
8106 /* Check for NO-OP cases. */
8107 const0 &= GET_MODE_MASK (mode);
8108 if (const0 == 0
8109 && (op0 == IOR || op0 == XOR || op0 == PLUS))
8110 op0 = NIL;
8111 else if (const0 == 0 && op0 == AND)
8112 op0 = SET;
8113 else if (const0 == GET_MODE_MASK (mode) && op0 == AND)
8114 op0 = NIL;
8115
8116 /* If this would be an entire word for the target, but is not for
8117 the host, then sign-extend on the host so that the number will look
8118 the same way on the host that it would on the target.
8119
8120 For example, when building a 64 bit alpha hosted 32 bit sparc
8121 targeted compiler, then we want the 32 bit unsigned value -1 to be
8122 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
8123 The later confuses the sparc backend. */
8124
8125 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT && BITS_PER_WORD == width
8126 && (const0 & ((HOST_WIDE_INT) 1 << (width - 1))))
8127 const0 |= ((HOST_WIDE_INT) (-1) << width);
8128
8129 *pop0 = op0;
8130 *pconst0 = const0;
8131
8132 return 1;
8133 }
8134 \f
8135 /* Simplify a shift of VAROP by COUNT bits. CODE says what kind of shift.
8136 The result of the shift is RESULT_MODE. X, if non-zero, is an expression
8137 that we started with.
8138
8139 The shift is normally computed in the widest mode we find in VAROP, as
8140 long as it isn't a different number of words than RESULT_MODE. Exceptions
8141 are ASHIFTRT and ROTATE, which are always done in their original mode, */
8142
8143 static rtx
8144 simplify_shift_const (x, code, result_mode, varop, count)
8145 rtx x;
8146 enum rtx_code code;
8147 enum machine_mode result_mode;
8148 rtx varop;
8149 int count;
8150 {
8151 enum rtx_code orig_code = code;
8152 int orig_count = count;
8153 enum machine_mode mode = result_mode;
8154 enum machine_mode shift_mode, tmode;
8155 int mode_words
8156 = (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
8157 /* We form (outer_op (code varop count) (outer_const)). */
8158 enum rtx_code outer_op = NIL;
8159 HOST_WIDE_INT outer_const = 0;
8160 rtx const_rtx;
8161 int complement_p = 0;
8162 rtx new;
8163
8164 /* If we were given an invalid count, don't do anything except exactly
8165 what was requested. */
8166
8167 if (count < 0 || count > GET_MODE_BITSIZE (mode))
8168 {
8169 if (x)
8170 return x;
8171
8172 return gen_rtx (code, mode, varop, GEN_INT (count));
8173 }
8174
8175 /* Unless one of the branches of the `if' in this loop does a `continue',
8176 we will `break' the loop after the `if'. */
8177
8178 while (count != 0)
8179 {
8180 /* If we have an operand of (clobber (const_int 0)), just return that
8181 value. */
8182 if (GET_CODE (varop) == CLOBBER)
8183 return varop;
8184
8185 /* If we discovered we had to complement VAROP, leave. Making a NOT
8186 here would cause an infinite loop. */
8187 if (complement_p)
8188 break;
8189
8190 /* Convert ROTATERT to ROTATE. */
8191 if (code == ROTATERT)
8192 code = ROTATE, count = GET_MODE_BITSIZE (result_mode) - count;
8193
8194 /* We need to determine what mode we will do the shift in. If the
8195 shift is a right shift or a ROTATE, we must always do it in the mode
8196 it was originally done in. Otherwise, we can do it in MODE, the
8197 widest mode encountered. */
8198 shift_mode
8199 = (code == ASHIFTRT || code == LSHIFTRT || code == ROTATE
8200 ? result_mode : mode);
8201
8202 /* Handle cases where the count is greater than the size of the mode
8203 minus 1. For ASHIFT, use the size minus one as the count (this can
8204 occur when simplifying (lshiftrt (ashiftrt ..))). For rotates,
8205 take the count modulo the size. For other shifts, the result is
8206 zero.
8207
8208 Since these shifts are being produced by the compiler by combining
8209 multiple operations, each of which are defined, we know what the
8210 result is supposed to be. */
8211
8212 if (count > GET_MODE_BITSIZE (shift_mode) - 1)
8213 {
8214 if (code == ASHIFTRT)
8215 count = GET_MODE_BITSIZE (shift_mode) - 1;
8216 else if (code == ROTATE || code == ROTATERT)
8217 count %= GET_MODE_BITSIZE (shift_mode);
8218 else
8219 {
8220 /* We can't simply return zero because there may be an
8221 outer op. */
8222 varop = const0_rtx;
8223 count = 0;
8224 break;
8225 }
8226 }
8227
8228 /* Negative counts are invalid and should not have been made (a
8229 programmer-specified negative count should have been handled
8230 above). */
8231 else if (count < 0)
8232 abort ();
8233
8234 /* An arithmetic right shift of a quantity known to be -1 or 0
8235 is a no-op. */
8236 if (code == ASHIFTRT
8237 && (num_sign_bit_copies (varop, shift_mode)
8238 == GET_MODE_BITSIZE (shift_mode)))
8239 {
8240 count = 0;
8241 break;
8242 }
8243
8244 /* If we are doing an arithmetic right shift and discarding all but
8245 the sign bit copies, this is equivalent to doing a shift by the
8246 bitsize minus one. Convert it into that shift because it will often
8247 allow other simplifications. */
8248
8249 if (code == ASHIFTRT
8250 && (count + num_sign_bit_copies (varop, shift_mode)
8251 >= GET_MODE_BITSIZE (shift_mode)))
8252 count = GET_MODE_BITSIZE (shift_mode) - 1;
8253
8254 /* We simplify the tests below and elsewhere by converting
8255 ASHIFTRT to LSHIFTRT if we know the sign bit is clear.
8256 `make_compound_operation' will convert it to a ASHIFTRT for
8257 those machines (such as Vax) that don't have a LSHIFTRT. */
8258 if (GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT
8259 && code == ASHIFTRT
8260 && ((nonzero_bits (varop, shift_mode)
8261 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (shift_mode) - 1)))
8262 == 0))
8263 code = LSHIFTRT;
8264
8265 switch (GET_CODE (varop))
8266 {
8267 case SIGN_EXTEND:
8268 case ZERO_EXTEND:
8269 case SIGN_EXTRACT:
8270 case ZERO_EXTRACT:
8271 new = expand_compound_operation (varop);
8272 if (new != varop)
8273 {
8274 varop = new;
8275 continue;
8276 }
8277 break;
8278
8279 case MEM:
8280 /* If we have (xshiftrt (mem ...) C) and C is MODE_WIDTH
8281 minus the width of a smaller mode, we can do this with a
8282 SIGN_EXTEND or ZERO_EXTEND from the narrower memory location. */
8283 if ((code == ASHIFTRT || code == LSHIFTRT)
8284 && ! mode_dependent_address_p (XEXP (varop, 0))
8285 && ! MEM_VOLATILE_P (varop)
8286 && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
8287 MODE_INT, 1)) != BLKmode)
8288 {
8289 if (BYTES_BIG_ENDIAN)
8290 new = gen_rtx (MEM, tmode, XEXP (varop, 0));
8291 else
8292 new = gen_rtx (MEM, tmode,
8293 plus_constant (XEXP (varop, 0),
8294 count / BITS_PER_UNIT));
8295 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (varop);
8296 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (varop);
8297 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (varop);
8298 varop = gen_rtx_combine (code == ASHIFTRT ? SIGN_EXTEND
8299 : ZERO_EXTEND, mode, new);
8300 count = 0;
8301 continue;
8302 }
8303 break;
8304
8305 case USE:
8306 /* Similar to the case above, except that we can only do this if
8307 the resulting mode is the same as that of the underlying
8308 MEM and adjust the address depending on the *bits* endianness
8309 because of the way that bit-field extract insns are defined. */
8310 if ((code == ASHIFTRT || code == LSHIFTRT)
8311 && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
8312 MODE_INT, 1)) != BLKmode
8313 && tmode == GET_MODE (XEXP (varop, 0)))
8314 {
8315 if (BITS_BIG_ENDIAN)
8316 new = XEXP (varop, 0);
8317 else
8318 {
8319 new = copy_rtx (XEXP (varop, 0));
8320 SUBST (XEXP (new, 0),
8321 plus_constant (XEXP (new, 0),
8322 count / BITS_PER_UNIT));
8323 }
8324
8325 varop = gen_rtx_combine (code == ASHIFTRT ? SIGN_EXTEND
8326 : ZERO_EXTEND, mode, new);
8327 count = 0;
8328 continue;
8329 }
8330 break;
8331
8332 case SUBREG:
8333 /* If VAROP is a SUBREG, strip it as long as the inner operand has
8334 the same number of words as what we've seen so far. Then store
8335 the widest mode in MODE. */
8336 if (subreg_lowpart_p (varop)
8337 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
8338 > GET_MODE_SIZE (GET_MODE (varop)))
8339 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
8340 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
8341 == mode_words))
8342 {
8343 varop = SUBREG_REG (varop);
8344 if (GET_MODE_SIZE (GET_MODE (varop)) > GET_MODE_SIZE (mode))
8345 mode = GET_MODE (varop);
8346 continue;
8347 }
8348 break;
8349
8350 case MULT:
8351 /* Some machines use MULT instead of ASHIFT because MULT
8352 is cheaper. But it is still better on those machines to
8353 merge two shifts into one. */
8354 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
8355 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
8356 {
8357 varop = gen_binary (ASHIFT, GET_MODE (varop), XEXP (varop, 0),
8358 GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));;
8359 continue;
8360 }
8361 break;
8362
8363 case UDIV:
8364 /* Similar, for when divides are cheaper. */
8365 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
8366 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
8367 {
8368 varop = gen_binary (LSHIFTRT, GET_MODE (varop), XEXP (varop, 0),
8369 GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));
8370 continue;
8371 }
8372 break;
8373
8374 case ASHIFTRT:
8375 /* If we are extracting just the sign bit of an arithmetic right
8376 shift, that shift is not needed. */
8377 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1)
8378 {
8379 varop = XEXP (varop, 0);
8380 continue;
8381 }
8382
8383 /* ... fall through ... */
8384
8385 case LSHIFTRT:
8386 case ASHIFT:
8387 case ROTATE:
8388 /* Here we have two nested shifts. The result is usually the
8389 AND of a new shift with a mask. We compute the result below. */
8390 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
8391 && INTVAL (XEXP (varop, 1)) >= 0
8392 && INTVAL (XEXP (varop, 1)) < GET_MODE_BITSIZE (GET_MODE (varop))
8393 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
8394 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
8395 {
8396 enum rtx_code first_code = GET_CODE (varop);
8397 int first_count = INTVAL (XEXP (varop, 1));
8398 unsigned HOST_WIDE_INT mask;
8399 rtx mask_rtx;
8400
8401 /* We have one common special case. We can't do any merging if
8402 the inner code is an ASHIFTRT of a smaller mode. However, if
8403 we have (ashift:M1 (subreg:M1 (ashiftrt:M2 FOO C1) 0) C2)
8404 with C2 == GET_MODE_BITSIZE (M1) - GET_MODE_BITSIZE (M2),
8405 we can convert it to
8406 (ashiftrt:M1 (ashift:M1 (and:M1 (subreg:M1 FOO 0 C2) C3) C1).
8407 This simplifies certain SIGN_EXTEND operations. */
8408 if (code == ASHIFT && first_code == ASHIFTRT
8409 && (GET_MODE_BITSIZE (result_mode)
8410 - GET_MODE_BITSIZE (GET_MODE (varop))) == count)
8411 {
8412 /* C3 has the low-order C1 bits zero. */
8413
8414 mask = (GET_MODE_MASK (mode)
8415 & ~ (((HOST_WIDE_INT) 1 << first_count) - 1));
8416
8417 varop = simplify_and_const_int (NULL_RTX, result_mode,
8418 XEXP (varop, 0), mask);
8419 varop = simplify_shift_const (NULL_RTX, ASHIFT, result_mode,
8420 varop, count);
8421 count = first_count;
8422 code = ASHIFTRT;
8423 continue;
8424 }
8425
8426 /* If this was (ashiftrt (ashift foo C1) C2) and FOO has more
8427 than C1 high-order bits equal to the sign bit, we can convert
8428 this to either an ASHIFT or a ASHIFTRT depending on the
8429 two counts.
8430
8431 We cannot do this if VAROP's mode is not SHIFT_MODE. */
8432
8433 if (code == ASHIFTRT && first_code == ASHIFT
8434 && GET_MODE (varop) == shift_mode
8435 && (num_sign_bit_copies (XEXP (varop, 0), shift_mode)
8436 > first_count))
8437 {
8438 count -= first_count;
8439 if (count < 0)
8440 count = - count, code = ASHIFT;
8441 varop = XEXP (varop, 0);
8442 continue;
8443 }
8444
8445 /* There are some cases we can't do. If CODE is ASHIFTRT,
8446 we can only do this if FIRST_CODE is also ASHIFTRT.
8447
8448 We can't do the case when CODE is ROTATE and FIRST_CODE is
8449 ASHIFTRT.
8450
8451 If the mode of this shift is not the mode of the outer shift,
8452 we can't do this if either shift is a right shift or ROTATE.
8453
8454 Finally, we can't do any of these if the mode is too wide
8455 unless the codes are the same.
8456
8457 Handle the case where the shift codes are the same
8458 first. */
8459
8460 if (code == first_code)
8461 {
8462 if (GET_MODE (varop) != result_mode
8463 && (code == ASHIFTRT || code == LSHIFTRT
8464 || code == ROTATE))
8465 break;
8466
8467 count += first_count;
8468 varop = XEXP (varop, 0);
8469 continue;
8470 }
8471
8472 if (code == ASHIFTRT
8473 || (code == ROTATE && first_code == ASHIFTRT)
8474 || GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT
8475 || (GET_MODE (varop) != result_mode
8476 && (first_code == ASHIFTRT || first_code == LSHIFTRT
8477 || first_code == ROTATE
8478 || code == ROTATE)))
8479 break;
8480
8481 /* To compute the mask to apply after the shift, shift the
8482 nonzero bits of the inner shift the same way the
8483 outer shift will. */
8484
8485 mask_rtx = GEN_INT (nonzero_bits (varop, GET_MODE (varop)));
8486
8487 mask_rtx
8488 = simplify_binary_operation (code, result_mode, mask_rtx,
8489 GEN_INT (count));
8490
8491 /* Give up if we can't compute an outer operation to use. */
8492 if (mask_rtx == 0
8493 || GET_CODE (mask_rtx) != CONST_INT
8494 || ! merge_outer_ops (&outer_op, &outer_const, AND,
8495 INTVAL (mask_rtx),
8496 result_mode, &complement_p))
8497 break;
8498
8499 /* If the shifts are in the same direction, we add the
8500 counts. Otherwise, we subtract them. */
8501 if ((code == ASHIFTRT || code == LSHIFTRT)
8502 == (first_code == ASHIFTRT || first_code == LSHIFTRT))
8503 count += first_count;
8504 else
8505 count -= first_count;
8506
8507 /* If COUNT is positive, the new shift is usually CODE,
8508 except for the two exceptions below, in which case it is
8509 FIRST_CODE. If the count is negative, FIRST_CODE should
8510 always be used */
8511 if (count > 0
8512 && ((first_code == ROTATE && code == ASHIFT)
8513 || (first_code == ASHIFTRT && code == LSHIFTRT)))
8514 code = first_code;
8515 else if (count < 0)
8516 code = first_code, count = - count;
8517
8518 varop = XEXP (varop, 0);
8519 continue;
8520 }
8521
8522 /* If we have (A << B << C) for any shift, we can convert this to
8523 (A << C << B). This wins if A is a constant. Only try this if
8524 B is not a constant. */
8525
8526 else if (GET_CODE (varop) == code
8527 && GET_CODE (XEXP (varop, 1)) != CONST_INT
8528 && 0 != (new
8529 = simplify_binary_operation (code, mode,
8530 XEXP (varop, 0),
8531 GEN_INT (count))))
8532 {
8533 varop = gen_rtx_combine (code, mode, new, XEXP (varop, 1));
8534 count = 0;
8535 continue;
8536 }
8537 break;
8538
8539 case NOT:
8540 /* Make this fit the case below. */
8541 varop = gen_rtx_combine (XOR, mode, XEXP (varop, 0),
8542 GEN_INT (GET_MODE_MASK (mode)));
8543 continue;
8544
8545 case IOR:
8546 case AND:
8547 case XOR:
8548 /* If we have (xshiftrt (ior (plus X (const_int -1)) X) C)
8549 with C the size of VAROP - 1 and the shift is logical if
8550 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
8551 we have an (le X 0) operation. If we have an arithmetic shift
8552 and STORE_FLAG_VALUE is 1 or we have a logical shift with
8553 STORE_FLAG_VALUE of -1, we have a (neg (le X 0)) operation. */
8554
8555 if (GET_CODE (varop) == IOR && GET_CODE (XEXP (varop, 0)) == PLUS
8556 && XEXP (XEXP (varop, 0), 1) == constm1_rtx
8557 && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
8558 && (code == LSHIFTRT || code == ASHIFTRT)
8559 && count == GET_MODE_BITSIZE (GET_MODE (varop)) - 1
8560 && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
8561 {
8562 count = 0;
8563 varop = gen_rtx_combine (LE, GET_MODE (varop), XEXP (varop, 1),
8564 const0_rtx);
8565
8566 if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
8567 varop = gen_rtx_combine (NEG, GET_MODE (varop), varop);
8568
8569 continue;
8570 }
8571
8572 /* If we have (shift (logical)), move the logical to the outside
8573 to allow it to possibly combine with another logical and the
8574 shift to combine with another shift. This also canonicalizes to
8575 what a ZERO_EXTRACT looks like. Also, some machines have
8576 (and (shift)) insns. */
8577
8578 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
8579 && (new = simplify_binary_operation (code, result_mode,
8580 XEXP (varop, 1),
8581 GEN_INT (count))) != 0
8582 && GET_CODE(new) == CONST_INT
8583 && merge_outer_ops (&outer_op, &outer_const, GET_CODE (varop),
8584 INTVAL (new), result_mode, &complement_p))
8585 {
8586 varop = XEXP (varop, 0);
8587 continue;
8588 }
8589
8590 /* If we can't do that, try to simplify the shift in each arm of the
8591 logical expression, make a new logical expression, and apply
8592 the inverse distributive law. */
8593 {
8594 rtx lhs = simplify_shift_const (NULL_RTX, code, shift_mode,
8595 XEXP (varop, 0), count);
8596 rtx rhs = simplify_shift_const (NULL_RTX, code, shift_mode,
8597 XEXP (varop, 1), count);
8598
8599 varop = gen_binary (GET_CODE (varop), shift_mode, lhs, rhs);
8600 varop = apply_distributive_law (varop);
8601
8602 count = 0;
8603 }
8604 break;
8605
8606 case EQ:
8607 /* convert (lshiftrt (eq FOO 0) C) to (xor FOO 1) if STORE_FLAG_VALUE
8608 says that the sign bit can be tested, FOO has mode MODE, C is
8609 GET_MODE_BITSIZE (MODE) - 1, and FOO has only its low-order bit
8610 that may be nonzero. */
8611 if (code == LSHIFTRT
8612 && XEXP (varop, 1) == const0_rtx
8613 && GET_MODE (XEXP (varop, 0)) == result_mode
8614 && count == GET_MODE_BITSIZE (result_mode) - 1
8615 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
8616 && ((STORE_FLAG_VALUE
8617 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (result_mode) - 1))))
8618 && nonzero_bits (XEXP (varop, 0), result_mode) == 1
8619 && merge_outer_ops (&outer_op, &outer_const, XOR,
8620 (HOST_WIDE_INT) 1, result_mode,
8621 &complement_p))
8622 {
8623 varop = XEXP (varop, 0);
8624 count = 0;
8625 continue;
8626 }
8627 break;
8628
8629 case NEG:
8630 /* (lshiftrt (neg A) C) where A is either 0 or 1 and C is one less
8631 than the number of bits in the mode is equivalent to A. */
8632 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1
8633 && nonzero_bits (XEXP (varop, 0), result_mode) == 1)
8634 {
8635 varop = XEXP (varop, 0);
8636 count = 0;
8637 continue;
8638 }
8639
8640 /* NEG commutes with ASHIFT since it is multiplication. Move the
8641 NEG outside to allow shifts to combine. */
8642 if (code == ASHIFT
8643 && merge_outer_ops (&outer_op, &outer_const, NEG,
8644 (HOST_WIDE_INT) 0, result_mode,
8645 &complement_p))
8646 {
8647 varop = XEXP (varop, 0);
8648 continue;
8649 }
8650 break;
8651
8652 case PLUS:
8653 /* (lshiftrt (plus A -1) C) where A is either 0 or 1 and C
8654 is one less than the number of bits in the mode is
8655 equivalent to (xor A 1). */
8656 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1
8657 && XEXP (varop, 1) == constm1_rtx
8658 && nonzero_bits (XEXP (varop, 0), result_mode) == 1
8659 && merge_outer_ops (&outer_op, &outer_const, XOR,
8660 (HOST_WIDE_INT) 1, result_mode,
8661 &complement_p))
8662 {
8663 count = 0;
8664 varop = XEXP (varop, 0);
8665 continue;
8666 }
8667
8668 /* If we have (xshiftrt (plus FOO BAR) C), and the only bits
8669 that might be nonzero in BAR are those being shifted out and those
8670 bits are known zero in FOO, we can replace the PLUS with FOO.
8671 Similarly in the other operand order. This code occurs when
8672 we are computing the size of a variable-size array. */
8673
8674 if ((code == ASHIFTRT || code == LSHIFTRT)
8675 && count < HOST_BITS_PER_WIDE_INT
8676 && nonzero_bits (XEXP (varop, 1), result_mode) >> count == 0
8677 && (nonzero_bits (XEXP (varop, 1), result_mode)
8678 & nonzero_bits (XEXP (varop, 0), result_mode)) == 0)
8679 {
8680 varop = XEXP (varop, 0);
8681 continue;
8682 }
8683 else if ((code == ASHIFTRT || code == LSHIFTRT)
8684 && count < HOST_BITS_PER_WIDE_INT
8685 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
8686 && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
8687 >> count)
8688 && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
8689 & nonzero_bits (XEXP (varop, 1),
8690 result_mode)))
8691 {
8692 varop = XEXP (varop, 1);
8693 continue;
8694 }
8695
8696 /* (ashift (plus foo C) N) is (plus (ashift foo N) C'). */
8697 if (code == ASHIFT
8698 && GET_CODE (XEXP (varop, 1)) == CONST_INT
8699 && (new = simplify_binary_operation (ASHIFT, result_mode,
8700 XEXP (varop, 1),
8701 GEN_INT (count))) != 0
8702 && GET_CODE(new) == CONST_INT
8703 && merge_outer_ops (&outer_op, &outer_const, PLUS,
8704 INTVAL (new), result_mode, &complement_p))
8705 {
8706 varop = XEXP (varop, 0);
8707 continue;
8708 }
8709 break;
8710
8711 case MINUS:
8712 /* If we have (xshiftrt (minus (ashiftrt X C)) X) C)
8713 with C the size of VAROP - 1 and the shift is logical if
8714 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
8715 we have a (gt X 0) operation. If the shift is arithmetic with
8716 STORE_FLAG_VALUE of 1 or logical with STORE_FLAG_VALUE == -1,
8717 we have a (neg (gt X 0)) operation. */
8718
8719 if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
8720 && GET_CODE (XEXP (varop, 0)) == ASHIFTRT
8721 && count == GET_MODE_BITSIZE (GET_MODE (varop)) - 1
8722 && (code == LSHIFTRT || code == ASHIFTRT)
8723 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
8724 && INTVAL (XEXP (XEXP (varop, 0), 1)) == count
8725 && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
8726 {
8727 count = 0;
8728 varop = gen_rtx_combine (GT, GET_MODE (varop), XEXP (varop, 1),
8729 const0_rtx);
8730
8731 if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
8732 varop = gen_rtx_combine (NEG, GET_MODE (varop), varop);
8733
8734 continue;
8735 }
8736 break;
8737
8738 default:
8739 break;
8740 }
8741
8742 break;
8743 }
8744
8745 /* We need to determine what mode to do the shift in. If the shift is
8746 a right shift or ROTATE, we must always do it in the mode it was
8747 originally done in. Otherwise, we can do it in MODE, the widest mode
8748 encountered. The code we care about is that of the shift that will
8749 actually be done, not the shift that was originally requested. */
8750 shift_mode
8751 = (code == ASHIFTRT || code == LSHIFTRT || code == ROTATE
8752 ? result_mode : mode);
8753
8754 /* We have now finished analyzing the shift. The result should be
8755 a shift of type CODE with SHIFT_MODE shifting VAROP COUNT places. If
8756 OUTER_OP is non-NIL, it is an operation that needs to be applied
8757 to the result of the shift. OUTER_CONST is the relevant constant,
8758 but we must turn off all bits turned off in the shift.
8759
8760 If we were passed a value for X, see if we can use any pieces of
8761 it. If not, make new rtx. */
8762
8763 if (x && GET_RTX_CLASS (GET_CODE (x)) == '2'
8764 && GET_CODE (XEXP (x, 1)) == CONST_INT
8765 && INTVAL (XEXP (x, 1)) == count)
8766 const_rtx = XEXP (x, 1);
8767 else
8768 const_rtx = GEN_INT (count);
8769
8770 if (x && GET_CODE (XEXP (x, 0)) == SUBREG
8771 && GET_MODE (XEXP (x, 0)) == shift_mode
8772 && SUBREG_REG (XEXP (x, 0)) == varop)
8773 varop = XEXP (x, 0);
8774 else if (GET_MODE (varop) != shift_mode)
8775 varop = gen_lowpart_for_combine (shift_mode, varop);
8776
8777 /* If we can't make the SUBREG, try to return what we were given. */
8778 if (GET_CODE (varop) == CLOBBER)
8779 return x ? x : varop;
8780
8781 new = simplify_binary_operation (code, shift_mode, varop, const_rtx);
8782 if (new != 0)
8783 x = new;
8784 else
8785 {
8786 if (x == 0 || GET_CODE (x) != code || GET_MODE (x) != shift_mode)
8787 x = gen_rtx_combine (code, shift_mode, varop, const_rtx);
8788
8789 SUBST (XEXP (x, 0), varop);
8790 SUBST (XEXP (x, 1), const_rtx);
8791 }
8792
8793 /* If we have an outer operation and we just made a shift, it is
8794 possible that we could have simplified the shift were it not
8795 for the outer operation. So try to do the simplification
8796 recursively. */
8797
8798 if (outer_op != NIL && GET_CODE (x) == code
8799 && GET_CODE (XEXP (x, 1)) == CONST_INT)
8800 x = simplify_shift_const (x, code, shift_mode, XEXP (x, 0),
8801 INTVAL (XEXP (x, 1)));
8802
8803 /* If we were doing a LSHIFTRT in a wider mode than it was originally,
8804 turn off all the bits that the shift would have turned off. */
8805 if (orig_code == LSHIFTRT && result_mode != shift_mode)
8806 x = simplify_and_const_int (NULL_RTX, shift_mode, x,
8807 GET_MODE_MASK (result_mode) >> orig_count);
8808
8809 /* Do the remainder of the processing in RESULT_MODE. */
8810 x = gen_lowpart_for_combine (result_mode, x);
8811
8812 /* If COMPLEMENT_P is set, we have to complement X before doing the outer
8813 operation. */
8814 if (complement_p)
8815 x = gen_unary (NOT, result_mode, result_mode, x);
8816
8817 if (outer_op != NIL)
8818 {
8819 if (GET_MODE_BITSIZE (result_mode) < HOST_BITS_PER_WIDE_INT)
8820 {
8821 int width = GET_MODE_BITSIZE (result_mode);
8822
8823 outer_const &= GET_MODE_MASK (result_mode);
8824
8825 /* If this would be an entire word for the target, but is not for
8826 the host, then sign-extend on the host so that the number will
8827 look the same way on the host that it would on the target.
8828
8829 For example, when building a 64 bit alpha hosted 32 bit sparc
8830 targeted compiler, then we want the 32 bit unsigned value -1 to be
8831 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
8832 The later confuses the sparc backend. */
8833
8834 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT && BITS_PER_WORD == width
8835 && (outer_const & ((HOST_WIDE_INT) 1 << (width - 1))))
8836 outer_const |= ((HOST_WIDE_INT) (-1) << width);
8837 }
8838
8839 if (outer_op == AND)
8840 x = simplify_and_const_int (NULL_RTX, result_mode, x, outer_const);
8841 else if (outer_op == SET)
8842 /* This means that we have determined that the result is
8843 equivalent to a constant. This should be rare. */
8844 x = GEN_INT (outer_const);
8845 else if (GET_RTX_CLASS (outer_op) == '1')
8846 x = gen_unary (outer_op, result_mode, result_mode, x);
8847 else
8848 x = gen_binary (outer_op, result_mode, x, GEN_INT (outer_const));
8849 }
8850
8851 return x;
8852 }
8853 \f
8854 /* Like recog, but we receive the address of a pointer to a new pattern.
8855 We try to match the rtx that the pointer points to.
8856 If that fails, we may try to modify or replace the pattern,
8857 storing the replacement into the same pointer object.
8858
8859 Modifications include deletion or addition of CLOBBERs.
8860
8861 PNOTES is a pointer to a location where any REG_UNUSED notes added for
8862 the CLOBBERs are placed.
8863
8864 PADDED_SCRATCHES is set to the number of (clobber (scratch)) patterns
8865 we had to add.
8866
8867 The value is the final insn code from the pattern ultimately matched,
8868 or -1. */
8869
8870 static int
8871 recog_for_combine (pnewpat, insn, pnotes, padded_scratches)
8872 rtx *pnewpat;
8873 rtx insn;
8874 rtx *pnotes;
8875 int *padded_scratches;
8876 {
8877 register rtx pat = *pnewpat;
8878 int insn_code_number;
8879 int num_clobbers_to_add = 0;
8880 int i;
8881 rtx notes = 0;
8882
8883 *padded_scratches = 0;
8884
8885 /* If PAT is a PARALLEL, check to see if it contains the CLOBBER
8886 we use to indicate that something didn't match. If we find such a
8887 thing, force rejection. */
8888 if (GET_CODE (pat) == PARALLEL)
8889 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
8890 if (GET_CODE (XVECEXP (pat, 0, i)) == CLOBBER
8891 && XEXP (XVECEXP (pat, 0, i), 0) == const0_rtx)
8892 return -1;
8893
8894 /* Is the result of combination a valid instruction? */
8895 insn_code_number = recog (pat, insn, &num_clobbers_to_add);
8896
8897 /* If it isn't, there is the possibility that we previously had an insn
8898 that clobbered some register as a side effect, but the combined
8899 insn doesn't need to do that. So try once more without the clobbers
8900 unless this represents an ASM insn. */
8901
8902 if (insn_code_number < 0 && ! check_asm_operands (pat)
8903 && GET_CODE (pat) == PARALLEL)
8904 {
8905 int pos;
8906
8907 for (pos = 0, i = 0; i < XVECLEN (pat, 0); i++)
8908 if (GET_CODE (XVECEXP (pat, 0, i)) != CLOBBER)
8909 {
8910 if (i != pos)
8911 SUBST (XVECEXP (pat, 0, pos), XVECEXP (pat, 0, i));
8912 pos++;
8913 }
8914
8915 SUBST_INT (XVECLEN (pat, 0), pos);
8916
8917 if (pos == 1)
8918 pat = XVECEXP (pat, 0, 0);
8919
8920 insn_code_number = recog (pat, insn, &num_clobbers_to_add);
8921 }
8922
8923 /* If we had any clobbers to add, make a new pattern than contains
8924 them. Then check to make sure that all of them are dead. */
8925 if (num_clobbers_to_add)
8926 {
8927 rtx newpat = gen_rtx (PARALLEL, VOIDmode,
8928 gen_rtvec (GET_CODE (pat) == PARALLEL
8929 ? XVECLEN (pat, 0) + num_clobbers_to_add
8930 : num_clobbers_to_add + 1));
8931
8932 if (GET_CODE (pat) == PARALLEL)
8933 for (i = 0; i < XVECLEN (pat, 0); i++)
8934 XVECEXP (newpat, 0, i) = XVECEXP (pat, 0, i);
8935 else
8936 XVECEXP (newpat, 0, 0) = pat;
8937
8938 add_clobbers (newpat, insn_code_number);
8939
8940 for (i = XVECLEN (newpat, 0) - num_clobbers_to_add;
8941 i < XVECLEN (newpat, 0); i++)
8942 {
8943 if (GET_CODE (XEXP (XVECEXP (newpat, 0, i), 0)) == REG
8944 && ! reg_dead_at_p (XEXP (XVECEXP (newpat, 0, i), 0), insn))
8945 return -1;
8946 else if (GET_CODE (XEXP (XVECEXP (newpat, 0, i), 0)) == SCRATCH)
8947 (*padded_scratches)++;
8948 notes = gen_rtx (EXPR_LIST, REG_UNUSED,
8949 XEXP (XVECEXP (newpat, 0, i), 0), notes);
8950 }
8951 pat = newpat;
8952 }
8953
8954 *pnewpat = pat;
8955 *pnotes = notes;
8956
8957 return insn_code_number;
8958 }
8959 \f
8960 /* Like gen_lowpart but for use by combine. In combine it is not possible
8961 to create any new pseudoregs. However, it is safe to create
8962 invalid memory addresses, because combine will try to recognize
8963 them and all they will do is make the combine attempt fail.
8964
8965 If for some reason this cannot do its job, an rtx
8966 (clobber (const_int 0)) is returned.
8967 An insn containing that will not be recognized. */
8968
8969 #undef gen_lowpart
8970
8971 static rtx
8972 gen_lowpart_for_combine (mode, x)
8973 enum machine_mode mode;
8974 register rtx x;
8975 {
8976 rtx result;
8977
8978 if (GET_MODE (x) == mode)
8979 return x;
8980
8981 /* We can only support MODE being wider than a word if X is a
8982 constant integer or has a mode the same size. */
8983
8984 if (GET_MODE_SIZE (mode) > UNITS_PER_WORD
8985 && ! ((GET_MODE (x) == VOIDmode
8986 && (GET_CODE (x) == CONST_INT
8987 || GET_CODE (x) == CONST_DOUBLE))
8988 || GET_MODE_SIZE (GET_MODE (x)) == GET_MODE_SIZE (mode)))
8989 return gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
8990
8991 /* X might be a paradoxical (subreg (mem)). In that case, gen_lowpart
8992 won't know what to do. So we will strip off the SUBREG here and
8993 process normally. */
8994 if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
8995 {
8996 x = SUBREG_REG (x);
8997 if (GET_MODE (x) == mode)
8998 return x;
8999 }
9000
9001 result = gen_lowpart_common (mode, x);
9002 if (result != 0
9003 && GET_CODE (result) == SUBREG
9004 && GET_CODE (SUBREG_REG (result)) == REG
9005 && REGNO (SUBREG_REG (result)) >= FIRST_PSEUDO_REGISTER
9006 && (GET_MODE_SIZE (GET_MODE (result))
9007 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (result)))))
9008 REG_CHANGES_SIZE (REGNO (SUBREG_REG (result))) = 1;
9009
9010 if (result)
9011 return result;
9012
9013 if (GET_CODE (x) == MEM)
9014 {
9015 register int offset = 0;
9016 rtx new;
9017
9018 /* Refuse to work on a volatile memory ref or one with a mode-dependent
9019 address. */
9020 if (MEM_VOLATILE_P (x) || mode_dependent_address_p (XEXP (x, 0)))
9021 return gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
9022
9023 /* If we want to refer to something bigger than the original memref,
9024 generate a perverse subreg instead. That will force a reload
9025 of the original memref X. */
9026 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode))
9027 return gen_rtx (SUBREG, mode, x, 0);
9028
9029 if (WORDS_BIG_ENDIAN)
9030 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
9031 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
9032 if (BYTES_BIG_ENDIAN)
9033 {
9034 /* Adjust the address so that the address-after-the-data is
9035 unchanged. */
9036 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
9037 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
9038 }
9039 new = gen_rtx (MEM, mode, plus_constant (XEXP (x, 0), offset));
9040 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
9041 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
9042 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
9043 return new;
9044 }
9045
9046 /* If X is a comparison operator, rewrite it in a new mode. This
9047 probably won't match, but may allow further simplifications. */
9048 else if (GET_RTX_CLASS (GET_CODE (x)) == '<')
9049 return gen_rtx_combine (GET_CODE (x), mode, XEXP (x, 0), XEXP (x, 1));
9050
9051 /* If we couldn't simplify X any other way, just enclose it in a
9052 SUBREG. Normally, this SUBREG won't match, but some patterns may
9053 include an explicit SUBREG or we may simplify it further in combine. */
9054 else
9055 {
9056 int word = 0;
9057
9058 if (WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD)
9059 word = ((GET_MODE_SIZE (GET_MODE (x))
9060 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD))
9061 / UNITS_PER_WORD);
9062 return gen_rtx (SUBREG, mode, x, word);
9063 }
9064 }
9065 \f
9066 /* Make an rtx expression. This is a subset of gen_rtx and only supports
9067 expressions of 1, 2, or 3 operands, each of which are rtx expressions.
9068
9069 If the identical expression was previously in the insn (in the undobuf),
9070 it will be returned. Only if it is not found will a new expression
9071 be made. */
9072
9073 /*VARARGS2*/
9074 static rtx
9075 gen_rtx_combine VPROTO((enum rtx_code code, enum machine_mode mode, ...))
9076 {
9077 #ifndef __STDC__
9078 enum rtx_code code;
9079 enum machine_mode mode;
9080 #endif
9081 va_list p;
9082 int n_args;
9083 rtx args[3];
9084 int i, j;
9085 char *fmt;
9086 rtx rt;
9087 struct undo *undo;
9088
9089 VA_START (p, mode);
9090
9091 #ifndef __STDC__
9092 code = va_arg (p, enum rtx_code);
9093 mode = va_arg (p, enum machine_mode);
9094 #endif
9095
9096 n_args = GET_RTX_LENGTH (code);
9097 fmt = GET_RTX_FORMAT (code);
9098
9099 if (n_args == 0 || n_args > 3)
9100 abort ();
9101
9102 /* Get each arg and verify that it is supposed to be an expression. */
9103 for (j = 0; j < n_args; j++)
9104 {
9105 if (*fmt++ != 'e')
9106 abort ();
9107
9108 args[j] = va_arg (p, rtx);
9109 }
9110
9111 /* See if this is in undobuf. Be sure we don't use objects that came
9112 from another insn; this could produce circular rtl structures. */
9113
9114 for (undo = undobuf.undos; undo != undobuf.previous_undos; undo = undo->next)
9115 if (!undo->is_int
9116 && GET_CODE (undo->old_contents.r) == code
9117 && GET_MODE (undo->old_contents.r) == mode)
9118 {
9119 for (j = 0; j < n_args; j++)
9120 if (XEXP (undo->old_contents.r, j) != args[j])
9121 break;
9122
9123 if (j == n_args)
9124 return undo->old_contents.r;
9125 }
9126
9127 /* Otherwise make a new rtx. We know we have 1, 2, or 3 args.
9128 Use rtx_alloc instead of gen_rtx because it's faster on RISC. */
9129 rt = rtx_alloc (code);
9130 PUT_MODE (rt, mode);
9131 XEXP (rt, 0) = args[0];
9132 if (n_args > 1)
9133 {
9134 XEXP (rt, 1) = args[1];
9135 if (n_args > 2)
9136 XEXP (rt, 2) = args[2];
9137 }
9138 return rt;
9139 }
9140
9141 /* These routines make binary and unary operations by first seeing if they
9142 fold; if not, a new expression is allocated. */
9143
9144 static rtx
9145 gen_binary (code, mode, op0, op1)
9146 enum rtx_code code;
9147 enum machine_mode mode;
9148 rtx op0, op1;
9149 {
9150 rtx result;
9151 rtx tem;
9152
9153 if (GET_RTX_CLASS (code) == 'c'
9154 && (GET_CODE (op0) == CONST_INT
9155 || (CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)))
9156 tem = op0, op0 = op1, op1 = tem;
9157
9158 if (GET_RTX_CLASS (code) == '<')
9159 {
9160 enum machine_mode op_mode = GET_MODE (op0);
9161
9162 /* Strip the COMPARE from (REL_OP (compare X Y) 0) to get
9163 just (REL_OP X Y). */
9164 if (GET_CODE (op0) == COMPARE && op1 == const0_rtx)
9165 {
9166 op1 = XEXP (op0, 1);
9167 op0 = XEXP (op0, 0);
9168 op_mode = GET_MODE (op0);
9169 }
9170
9171 if (op_mode == VOIDmode)
9172 op_mode = GET_MODE (op1);
9173 result = simplify_relational_operation (code, op_mode, op0, op1);
9174 }
9175 else
9176 result = simplify_binary_operation (code, mode, op0, op1);
9177
9178 if (result)
9179 return result;
9180
9181 /* Put complex operands first and constants second. */
9182 if (GET_RTX_CLASS (code) == 'c'
9183 && ((CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)
9184 || (GET_RTX_CLASS (GET_CODE (op0)) == 'o'
9185 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')
9186 || (GET_CODE (op0) == SUBREG
9187 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op0))) == 'o'
9188 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')))
9189 return gen_rtx_combine (code, mode, op1, op0);
9190
9191 return gen_rtx_combine (code, mode, op0, op1);
9192 }
9193
9194 static rtx
9195 gen_unary (code, mode, op0_mode, op0)
9196 enum rtx_code code;
9197 enum machine_mode mode, op0_mode;
9198 rtx op0;
9199 {
9200 rtx result = simplify_unary_operation (code, mode, op0, op0_mode);
9201
9202 if (result)
9203 return result;
9204
9205 return gen_rtx_combine (code, mode, op0);
9206 }
9207 \f
9208 /* Simplify a comparison between *POP0 and *POP1 where CODE is the
9209 comparison code that will be tested.
9210
9211 The result is a possibly different comparison code to use. *POP0 and
9212 *POP1 may be updated.
9213
9214 It is possible that we might detect that a comparison is either always
9215 true or always false. However, we do not perform general constant
9216 folding in combine, so this knowledge isn't useful. Such tautologies
9217 should have been detected earlier. Hence we ignore all such cases. */
9218
9219 static enum rtx_code
9220 simplify_comparison (code, pop0, pop1)
9221 enum rtx_code code;
9222 rtx *pop0;
9223 rtx *pop1;
9224 {
9225 rtx op0 = *pop0;
9226 rtx op1 = *pop1;
9227 rtx tem, tem1;
9228 int i;
9229 enum machine_mode mode, tmode;
9230
9231 /* Try a few ways of applying the same transformation to both operands. */
9232 while (1)
9233 {
9234 #ifndef WORD_REGISTER_OPERATIONS
9235 /* The test below this one won't handle SIGN_EXTENDs on these machines,
9236 so check specially. */
9237 if (code != GTU && code != GEU && code != LTU && code != LEU
9238 && GET_CODE (op0) == ASHIFTRT && GET_CODE (op1) == ASHIFTRT
9239 && GET_CODE (XEXP (op0, 0)) == ASHIFT
9240 && GET_CODE (XEXP (op1, 0)) == ASHIFT
9241 && GET_CODE (XEXP (XEXP (op0, 0), 0)) == SUBREG
9242 && GET_CODE (XEXP (XEXP (op1, 0), 0)) == SUBREG
9243 && (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0)))
9244 == GET_MODE (SUBREG_REG (XEXP (XEXP (op1, 0), 0))))
9245 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9246 && GET_CODE (XEXP (op1, 1)) == CONST_INT
9247 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
9248 && GET_CODE (XEXP (XEXP (op1, 0), 1)) == CONST_INT
9249 && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (op1, 1))
9250 && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (XEXP (op0, 0), 1))
9251 && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (XEXP (op1, 0), 1))
9252 && (INTVAL (XEXP (op0, 1))
9253 == (GET_MODE_BITSIZE (GET_MODE (op0))
9254 - (GET_MODE_BITSIZE
9255 (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0))))))))
9256 {
9257 op0 = SUBREG_REG (XEXP (XEXP (op0, 0), 0));
9258 op1 = SUBREG_REG (XEXP (XEXP (op1, 0), 0));
9259 }
9260 #endif
9261
9262 /* If both operands are the same constant shift, see if we can ignore the
9263 shift. We can if the shift is a rotate or if the bits shifted out of
9264 this shift are known to be zero for both inputs and if the type of
9265 comparison is compatible with the shift. */
9266 if (GET_CODE (op0) == GET_CODE (op1)
9267 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
9268 && ((GET_CODE (op0) == ROTATE && (code == NE || code == EQ))
9269 || ((GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFT)
9270 && (code != GT && code != LT && code != GE && code != LE))
9271 || (GET_CODE (op0) == ASHIFTRT
9272 && (code != GTU && code != LTU
9273 && code != GEU && code != GEU)))
9274 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9275 && INTVAL (XEXP (op0, 1)) >= 0
9276 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
9277 && XEXP (op0, 1) == XEXP (op1, 1))
9278 {
9279 enum machine_mode mode = GET_MODE (op0);
9280 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
9281 int shift_count = INTVAL (XEXP (op0, 1));
9282
9283 if (GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFTRT)
9284 mask &= (mask >> shift_count) << shift_count;
9285 else if (GET_CODE (op0) == ASHIFT)
9286 mask = (mask & (mask << shift_count)) >> shift_count;
9287
9288 if ((nonzero_bits (XEXP (op0, 0), mode) & ~ mask) == 0
9289 && (nonzero_bits (XEXP (op1, 0), mode) & ~ mask) == 0)
9290 op0 = XEXP (op0, 0), op1 = XEXP (op1, 0);
9291 else
9292 break;
9293 }
9294
9295 /* If both operands are AND's of a paradoxical SUBREG by constant, the
9296 SUBREGs are of the same mode, and, in both cases, the AND would
9297 be redundant if the comparison was done in the narrower mode,
9298 do the comparison in the narrower mode (e.g., we are AND'ing with 1
9299 and the operand's possibly nonzero bits are 0xffffff01; in that case
9300 if we only care about QImode, we don't need the AND). This case
9301 occurs if the output mode of an scc insn is not SImode and
9302 STORE_FLAG_VALUE == 1 (e.g., the 386).
9303
9304 Similarly, check for a case where the AND's are ZERO_EXTEND
9305 operations from some narrower mode even though a SUBREG is not
9306 present. */
9307
9308 else if (GET_CODE (op0) == AND && GET_CODE (op1) == AND
9309 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9310 && GET_CODE (XEXP (op1, 1)) == CONST_INT)
9311 {
9312 rtx inner_op0 = XEXP (op0, 0);
9313 rtx inner_op1 = XEXP (op1, 0);
9314 HOST_WIDE_INT c0 = INTVAL (XEXP (op0, 1));
9315 HOST_WIDE_INT c1 = INTVAL (XEXP (op1, 1));
9316 int changed = 0;
9317
9318 if (GET_CODE (inner_op0) == SUBREG && GET_CODE (inner_op1) == SUBREG
9319 && (GET_MODE_SIZE (GET_MODE (inner_op0))
9320 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (inner_op0))))
9321 && (GET_MODE (SUBREG_REG (inner_op0))
9322 == GET_MODE (SUBREG_REG (inner_op1)))
9323 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
9324 <= HOST_BITS_PER_WIDE_INT)
9325 && (0 == ((~c0) & nonzero_bits (SUBREG_REG (inner_op0),
9326 GET_MODE (SUBREG_REG (op0)))))
9327 && (0 == ((~c1) & nonzero_bits (SUBREG_REG (inner_op1),
9328 GET_MODE (SUBREG_REG (inner_op1))))))
9329 {
9330 op0 = SUBREG_REG (inner_op0);
9331 op1 = SUBREG_REG (inner_op1);
9332
9333 /* The resulting comparison is always unsigned since we masked
9334 off the original sign bit. */
9335 code = unsigned_condition (code);
9336
9337 changed = 1;
9338 }
9339
9340 else if (c0 == c1)
9341 for (tmode = GET_CLASS_NARROWEST_MODE
9342 (GET_MODE_CLASS (GET_MODE (op0)));
9343 tmode != GET_MODE (op0); tmode = GET_MODE_WIDER_MODE (tmode))
9344 if (c0 == GET_MODE_MASK (tmode))
9345 {
9346 op0 = gen_lowpart_for_combine (tmode, inner_op0);
9347 op1 = gen_lowpart_for_combine (tmode, inner_op1);
9348 code = unsigned_condition (code);
9349 changed = 1;
9350 break;
9351 }
9352
9353 if (! changed)
9354 break;
9355 }
9356
9357 /* If both operands are NOT, we can strip off the outer operation
9358 and adjust the comparison code for swapped operands; similarly for
9359 NEG, except that this must be an equality comparison. */
9360 else if ((GET_CODE (op0) == NOT && GET_CODE (op1) == NOT)
9361 || (GET_CODE (op0) == NEG && GET_CODE (op1) == NEG
9362 && (code == EQ || code == NE)))
9363 op0 = XEXP (op0, 0), op1 = XEXP (op1, 0), code = swap_condition (code);
9364
9365 else
9366 break;
9367 }
9368
9369 /* If the first operand is a constant, swap the operands and adjust the
9370 comparison code appropriately, but don't do this if the second operand
9371 is already a constant integer. */
9372 if (CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)
9373 {
9374 tem = op0, op0 = op1, op1 = tem;
9375 code = swap_condition (code);
9376 }
9377
9378 /* We now enter a loop during which we will try to simplify the comparison.
9379 For the most part, we only are concerned with comparisons with zero,
9380 but some things may really be comparisons with zero but not start
9381 out looking that way. */
9382
9383 while (GET_CODE (op1) == CONST_INT)
9384 {
9385 enum machine_mode mode = GET_MODE (op0);
9386 int mode_width = GET_MODE_BITSIZE (mode);
9387 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
9388 int equality_comparison_p;
9389 int sign_bit_comparison_p;
9390 int unsigned_comparison_p;
9391 HOST_WIDE_INT const_op;
9392
9393 /* We only want to handle integral modes. This catches VOIDmode,
9394 CCmode, and the floating-point modes. An exception is that we
9395 can handle VOIDmode if OP0 is a COMPARE or a comparison
9396 operation. */
9397
9398 if (GET_MODE_CLASS (mode) != MODE_INT
9399 && ! (mode == VOIDmode
9400 && (GET_CODE (op0) == COMPARE
9401 || GET_RTX_CLASS (GET_CODE (op0)) == '<')))
9402 break;
9403
9404 /* Get the constant we are comparing against and turn off all bits
9405 not on in our mode. */
9406 const_op = INTVAL (op1);
9407 if (mode_width <= HOST_BITS_PER_WIDE_INT)
9408 const_op &= mask;
9409
9410 /* If we are comparing against a constant power of two and the value
9411 being compared can only have that single bit nonzero (e.g., it was
9412 `and'ed with that bit), we can replace this with a comparison
9413 with zero. */
9414 if (const_op
9415 && (code == EQ || code == NE || code == GE || code == GEU
9416 || code == LT || code == LTU)
9417 && mode_width <= HOST_BITS_PER_WIDE_INT
9418 && exact_log2 (const_op) >= 0
9419 && nonzero_bits (op0, mode) == const_op)
9420 {
9421 code = (code == EQ || code == GE || code == GEU ? NE : EQ);
9422 op1 = const0_rtx, const_op = 0;
9423 }
9424
9425 /* Similarly, if we are comparing a value known to be either -1 or
9426 0 with -1, change it to the opposite comparison against zero. */
9427
9428 if (const_op == -1
9429 && (code == EQ || code == NE || code == GT || code == LE
9430 || code == GEU || code == LTU)
9431 && num_sign_bit_copies (op0, mode) == mode_width)
9432 {
9433 code = (code == EQ || code == LE || code == GEU ? NE : EQ);
9434 op1 = const0_rtx, const_op = 0;
9435 }
9436
9437 /* Do some canonicalizations based on the comparison code. We prefer
9438 comparisons against zero and then prefer equality comparisons.
9439 If we can reduce the size of a constant, we will do that too. */
9440
9441 switch (code)
9442 {
9443 case LT:
9444 /* < C is equivalent to <= (C - 1) */
9445 if (const_op > 0)
9446 {
9447 const_op -= 1;
9448 op1 = GEN_INT (const_op);
9449 code = LE;
9450 /* ... fall through to LE case below. */
9451 }
9452 else
9453 break;
9454
9455 case LE:
9456 /* <= C is equivalent to < (C + 1); we do this for C < 0 */
9457 if (const_op < 0)
9458 {
9459 const_op += 1;
9460 op1 = GEN_INT (const_op);
9461 code = LT;
9462 }
9463
9464 /* If we are doing a <= 0 comparison on a value known to have
9465 a zero sign bit, we can replace this with == 0. */
9466 else if (const_op == 0
9467 && mode_width <= HOST_BITS_PER_WIDE_INT
9468 && (nonzero_bits (op0, mode)
9469 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
9470 code = EQ;
9471 break;
9472
9473 case GE:
9474 /* >= C is equivalent to > (C - 1). */
9475 if (const_op > 0)
9476 {
9477 const_op -= 1;
9478 op1 = GEN_INT (const_op);
9479 code = GT;
9480 /* ... fall through to GT below. */
9481 }
9482 else
9483 break;
9484
9485 case GT:
9486 /* > C is equivalent to >= (C + 1); we do this for C < 0*/
9487 if (const_op < 0)
9488 {
9489 const_op += 1;
9490 op1 = GEN_INT (const_op);
9491 code = GE;
9492 }
9493
9494 /* If we are doing a > 0 comparison on a value known to have
9495 a zero sign bit, we can replace this with != 0. */
9496 else if (const_op == 0
9497 && mode_width <= HOST_BITS_PER_WIDE_INT
9498 && (nonzero_bits (op0, mode)
9499 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
9500 code = NE;
9501 break;
9502
9503 case LTU:
9504 /* < C is equivalent to <= (C - 1). */
9505 if (const_op > 0)
9506 {
9507 const_op -= 1;
9508 op1 = GEN_INT (const_op);
9509 code = LEU;
9510 /* ... fall through ... */
9511 }
9512
9513 /* (unsigned) < 0x80000000 is equivalent to >= 0. */
9514 else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
9515 && (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1)))
9516 {
9517 const_op = 0, op1 = const0_rtx;
9518 code = GE;
9519 break;
9520 }
9521 else
9522 break;
9523
9524 case LEU:
9525 /* unsigned <= 0 is equivalent to == 0 */
9526 if (const_op == 0)
9527 code = EQ;
9528
9529 /* (unsigned) <= 0x7fffffff is equivalent to >= 0. */
9530 else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
9531 && (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1))
9532 {
9533 const_op = 0, op1 = const0_rtx;
9534 code = GE;
9535 }
9536 break;
9537
9538 case GEU:
9539 /* >= C is equivalent to < (C - 1). */
9540 if (const_op > 1)
9541 {
9542 const_op -= 1;
9543 op1 = GEN_INT (const_op);
9544 code = GTU;
9545 /* ... fall through ... */
9546 }
9547
9548 /* (unsigned) >= 0x80000000 is equivalent to < 0. */
9549 else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
9550 && (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1)))
9551 {
9552 const_op = 0, op1 = const0_rtx;
9553 code = LT;
9554 break;
9555 }
9556 else
9557 break;
9558
9559 case GTU:
9560 /* unsigned > 0 is equivalent to != 0 */
9561 if (const_op == 0)
9562 code = NE;
9563
9564 /* (unsigned) > 0x7fffffff is equivalent to < 0. */
9565 else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
9566 && (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1))
9567 {
9568 const_op = 0, op1 = const0_rtx;
9569 code = LT;
9570 }
9571 break;
9572
9573 default:
9574 break;
9575 }
9576
9577 /* Compute some predicates to simplify code below. */
9578
9579 equality_comparison_p = (code == EQ || code == NE);
9580 sign_bit_comparison_p = ((code == LT || code == GE) && const_op == 0);
9581 unsigned_comparison_p = (code == LTU || code == LEU || code == GTU
9582 || code == LEU);
9583
9584 /* If this is a sign bit comparison and we can do arithmetic in
9585 MODE, say that we will only be needing the sign bit of OP0. */
9586 if (sign_bit_comparison_p
9587 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
9588 op0 = force_to_mode (op0, mode,
9589 ((HOST_WIDE_INT) 1
9590 << (GET_MODE_BITSIZE (mode) - 1)),
9591 NULL_RTX, 0);
9592
9593 /* Now try cases based on the opcode of OP0. If none of the cases
9594 does a "continue", we exit this loop immediately after the
9595 switch. */
9596
9597 switch (GET_CODE (op0))
9598 {
9599 case ZERO_EXTRACT:
9600 /* If we are extracting a single bit from a variable position in
9601 a constant that has only a single bit set and are comparing it
9602 with zero, we can convert this into an equality comparison
9603 between the position and the location of the single bit. */
9604
9605 if (GET_CODE (XEXP (op0, 0)) == CONST_INT
9606 && XEXP (op0, 1) == const1_rtx
9607 && equality_comparison_p && const_op == 0
9608 && (i = exact_log2 (INTVAL (XEXP (op0, 0)))) >= 0)
9609 {
9610 if (BITS_BIG_ENDIAN)
9611 #ifdef HAVE_extzv
9612 i = (GET_MODE_BITSIZE
9613 (insn_operand_mode[(int) CODE_FOR_extzv][1]) - 1 - i);
9614 #else
9615 i = BITS_PER_WORD - 1 - i;
9616 #endif
9617
9618 op0 = XEXP (op0, 2);
9619 op1 = GEN_INT (i);
9620 const_op = i;
9621
9622 /* Result is nonzero iff shift count is equal to I. */
9623 code = reverse_condition (code);
9624 continue;
9625 }
9626
9627 /* ... fall through ... */
9628
9629 case SIGN_EXTRACT:
9630 tem = expand_compound_operation (op0);
9631 if (tem != op0)
9632 {
9633 op0 = tem;
9634 continue;
9635 }
9636 break;
9637
9638 case NOT:
9639 /* If testing for equality, we can take the NOT of the constant. */
9640 if (equality_comparison_p
9641 && (tem = simplify_unary_operation (NOT, mode, op1, mode)) != 0)
9642 {
9643 op0 = XEXP (op0, 0);
9644 op1 = tem;
9645 continue;
9646 }
9647
9648 /* If just looking at the sign bit, reverse the sense of the
9649 comparison. */
9650 if (sign_bit_comparison_p)
9651 {
9652 op0 = XEXP (op0, 0);
9653 code = (code == GE ? LT : GE);
9654 continue;
9655 }
9656 break;
9657
9658 case NEG:
9659 /* If testing for equality, we can take the NEG of the constant. */
9660 if (equality_comparison_p
9661 && (tem = simplify_unary_operation (NEG, mode, op1, mode)) != 0)
9662 {
9663 op0 = XEXP (op0, 0);
9664 op1 = tem;
9665 continue;
9666 }
9667
9668 /* The remaining cases only apply to comparisons with zero. */
9669 if (const_op != 0)
9670 break;
9671
9672 /* When X is ABS or is known positive,
9673 (neg X) is < 0 if and only if X != 0. */
9674
9675 if (sign_bit_comparison_p
9676 && (GET_CODE (XEXP (op0, 0)) == ABS
9677 || (mode_width <= HOST_BITS_PER_WIDE_INT
9678 && (nonzero_bits (XEXP (op0, 0), mode)
9679 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)))
9680 {
9681 op0 = XEXP (op0, 0);
9682 code = (code == LT ? NE : EQ);
9683 continue;
9684 }
9685
9686 /* If we have NEG of something whose two high-order bits are the
9687 same, we know that "(-a) < 0" is equivalent to "a > 0". */
9688 if (num_sign_bit_copies (op0, mode) >= 2)
9689 {
9690 op0 = XEXP (op0, 0);
9691 code = swap_condition (code);
9692 continue;
9693 }
9694 break;
9695
9696 case ROTATE:
9697 /* If we are testing equality and our count is a constant, we
9698 can perform the inverse operation on our RHS. */
9699 if (equality_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
9700 && (tem = simplify_binary_operation (ROTATERT, mode,
9701 op1, XEXP (op0, 1))) != 0)
9702 {
9703 op0 = XEXP (op0, 0);
9704 op1 = tem;
9705 continue;
9706 }
9707
9708 /* If we are doing a < 0 or >= 0 comparison, it means we are testing
9709 a particular bit. Convert it to an AND of a constant of that
9710 bit. This will be converted into a ZERO_EXTRACT. */
9711 if (const_op == 0 && sign_bit_comparison_p
9712 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9713 && mode_width <= HOST_BITS_PER_WIDE_INT)
9714 {
9715 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
9716 ((HOST_WIDE_INT) 1
9717 << (mode_width - 1
9718 - INTVAL (XEXP (op0, 1)))));
9719 code = (code == LT ? NE : EQ);
9720 continue;
9721 }
9722
9723 /* ... fall through ... */
9724
9725 case ABS:
9726 /* ABS is ignorable inside an equality comparison with zero. */
9727 if (const_op == 0 && equality_comparison_p)
9728 {
9729 op0 = XEXP (op0, 0);
9730 continue;
9731 }
9732 break;
9733
9734
9735 case SIGN_EXTEND:
9736 /* Can simplify (compare (zero/sign_extend FOO) CONST)
9737 to (compare FOO CONST) if CONST fits in FOO's mode and we
9738 are either testing inequality or have an unsigned comparison
9739 with ZERO_EXTEND or a signed comparison with SIGN_EXTEND. */
9740 if (! unsigned_comparison_p
9741 && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
9742 <= HOST_BITS_PER_WIDE_INT)
9743 && ((unsigned HOST_WIDE_INT) const_op
9744 < (((HOST_WIDE_INT) 1
9745 << (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0))) - 1)))))
9746 {
9747 op0 = XEXP (op0, 0);
9748 continue;
9749 }
9750 break;
9751
9752 case SUBREG:
9753 /* Check for the case where we are comparing A - C1 with C2,
9754 both constants are smaller than 1/2 the maximum positive
9755 value in MODE, and the comparison is equality or unsigned.
9756 In that case, if A is either zero-extended to MODE or has
9757 sufficient sign bits so that the high-order bit in MODE
9758 is a copy of the sign in the inner mode, we can prove that it is
9759 safe to do the operation in the wider mode. This simplifies
9760 many range checks. */
9761
9762 if (mode_width <= HOST_BITS_PER_WIDE_INT
9763 && subreg_lowpart_p (op0)
9764 && GET_CODE (SUBREG_REG (op0)) == PLUS
9765 && GET_CODE (XEXP (SUBREG_REG (op0), 1)) == CONST_INT
9766 && INTVAL (XEXP (SUBREG_REG (op0), 1)) < 0
9767 && (- INTVAL (XEXP (SUBREG_REG (op0), 1))
9768 < GET_MODE_MASK (mode) / 2)
9769 && (unsigned HOST_WIDE_INT) const_op < GET_MODE_MASK (mode) / 2
9770 && (0 == (nonzero_bits (XEXP (SUBREG_REG (op0), 0),
9771 GET_MODE (SUBREG_REG (op0)))
9772 & ~ GET_MODE_MASK (mode))
9773 || (num_sign_bit_copies (XEXP (SUBREG_REG (op0), 0),
9774 GET_MODE (SUBREG_REG (op0)))
9775 > (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
9776 - GET_MODE_BITSIZE (mode)))))
9777 {
9778 op0 = SUBREG_REG (op0);
9779 continue;
9780 }
9781
9782 /* If the inner mode is narrower and we are extracting the low part,
9783 we can treat the SUBREG as if it were a ZERO_EXTEND. */
9784 if (subreg_lowpart_p (op0)
9785 && GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) < mode_width)
9786 /* Fall through */ ;
9787 else
9788 break;
9789
9790 /* ... fall through ... */
9791
9792 case ZERO_EXTEND:
9793 if ((unsigned_comparison_p || equality_comparison_p)
9794 && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
9795 <= HOST_BITS_PER_WIDE_INT)
9796 && ((unsigned HOST_WIDE_INT) const_op
9797 < GET_MODE_MASK (GET_MODE (XEXP (op0, 0)))))
9798 {
9799 op0 = XEXP (op0, 0);
9800 continue;
9801 }
9802 break;
9803
9804 case PLUS:
9805 /* (eq (plus X A) B) -> (eq X (minus B A)). We can only do
9806 this for equality comparisons due to pathological cases involving
9807 overflows. */
9808 if (equality_comparison_p
9809 && 0 != (tem = simplify_binary_operation (MINUS, mode,
9810 op1, XEXP (op0, 1))))
9811 {
9812 op0 = XEXP (op0, 0);
9813 op1 = tem;
9814 continue;
9815 }
9816
9817 /* (plus (abs X) (const_int -1)) is < 0 if and only if X == 0. */
9818 if (const_op == 0 && XEXP (op0, 1) == constm1_rtx
9819 && GET_CODE (XEXP (op0, 0)) == ABS && sign_bit_comparison_p)
9820 {
9821 op0 = XEXP (XEXP (op0, 0), 0);
9822 code = (code == LT ? EQ : NE);
9823 continue;
9824 }
9825 break;
9826
9827 case MINUS:
9828 /* (eq (minus A B) C) -> (eq A (plus B C)) or
9829 (eq B (minus A C)), whichever simplifies. We can only do
9830 this for equality comparisons due to pathological cases involving
9831 overflows. */
9832 if (equality_comparison_p
9833 && 0 != (tem = simplify_binary_operation (PLUS, mode,
9834 XEXP (op0, 1), op1)))
9835 {
9836 op0 = XEXP (op0, 0);
9837 op1 = tem;
9838 continue;
9839 }
9840
9841 if (equality_comparison_p
9842 && 0 != (tem = simplify_binary_operation (MINUS, mode,
9843 XEXP (op0, 0), op1)))
9844 {
9845 op0 = XEXP (op0, 1);
9846 op1 = tem;
9847 continue;
9848 }
9849
9850 /* The sign bit of (minus (ashiftrt X C) X), where C is the number
9851 of bits in X minus 1, is one iff X > 0. */
9852 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == ASHIFTRT
9853 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
9854 && INTVAL (XEXP (XEXP (op0, 0), 1)) == mode_width - 1
9855 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
9856 {
9857 op0 = XEXP (op0, 1);
9858 code = (code == GE ? LE : GT);
9859 continue;
9860 }
9861 break;
9862
9863 case XOR:
9864 /* (eq (xor A B) C) -> (eq A (xor B C)). This is a simplification
9865 if C is zero or B is a constant. */
9866 if (equality_comparison_p
9867 && 0 != (tem = simplify_binary_operation (XOR, mode,
9868 XEXP (op0, 1), op1)))
9869 {
9870 op0 = XEXP (op0, 0);
9871 op1 = tem;
9872 continue;
9873 }
9874 break;
9875
9876 case EQ: case NE:
9877 case LT: case LTU: case LE: case LEU:
9878 case GT: case GTU: case GE: case GEU:
9879 /* We can't do anything if OP0 is a condition code value, rather
9880 than an actual data value. */
9881 if (const_op != 0
9882 #ifdef HAVE_cc0
9883 || XEXP (op0, 0) == cc0_rtx
9884 #endif
9885 || GET_MODE_CLASS (GET_MODE (XEXP (op0, 0))) == MODE_CC)
9886 break;
9887
9888 /* Get the two operands being compared. */
9889 if (GET_CODE (XEXP (op0, 0)) == COMPARE)
9890 tem = XEXP (XEXP (op0, 0), 0), tem1 = XEXP (XEXP (op0, 0), 1);
9891 else
9892 tem = XEXP (op0, 0), tem1 = XEXP (op0, 1);
9893
9894 /* Check for the cases where we simply want the result of the
9895 earlier test or the opposite of that result. */
9896 if (code == NE
9897 || (code == EQ && reversible_comparison_p (op0))
9898 || (GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
9899 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
9900 && (STORE_FLAG_VALUE
9901 & (((HOST_WIDE_INT) 1
9902 << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
9903 && (code == LT
9904 || (code == GE && reversible_comparison_p (op0)))))
9905 {
9906 code = (code == LT || code == NE
9907 ? GET_CODE (op0) : reverse_condition (GET_CODE (op0)));
9908 op0 = tem, op1 = tem1;
9909 continue;
9910 }
9911 break;
9912
9913 case IOR:
9914 /* The sign bit of (ior (plus X (const_int -1)) X) is non-zero
9915 iff X <= 0. */
9916 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == PLUS
9917 && XEXP (XEXP (op0, 0), 1) == constm1_rtx
9918 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
9919 {
9920 op0 = XEXP (op0, 1);
9921 code = (code == GE ? GT : LE);
9922 continue;
9923 }
9924 break;
9925
9926 case AND:
9927 /* Convert (and (xshift 1 X) Y) to (and (lshiftrt Y X) 1). This
9928 will be converted to a ZERO_EXTRACT later. */
9929 if (const_op == 0 && equality_comparison_p
9930 && GET_CODE (XEXP (op0, 0)) == ASHIFT
9931 && XEXP (XEXP (op0, 0), 0) == const1_rtx)
9932 {
9933 op0 = simplify_and_const_int
9934 (op0, mode, gen_rtx_combine (LSHIFTRT, mode,
9935 XEXP (op0, 1),
9936 XEXP (XEXP (op0, 0), 1)),
9937 (HOST_WIDE_INT) 1);
9938 continue;
9939 }
9940
9941 /* If we are comparing (and (lshiftrt X C1) C2) for equality with
9942 zero and X is a comparison and C1 and C2 describe only bits set
9943 in STORE_FLAG_VALUE, we can compare with X. */
9944 if (const_op == 0 && equality_comparison_p
9945 && mode_width <= HOST_BITS_PER_WIDE_INT
9946 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9947 && GET_CODE (XEXP (op0, 0)) == LSHIFTRT
9948 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
9949 && INTVAL (XEXP (XEXP (op0, 0), 1)) >= 0
9950 && INTVAL (XEXP (XEXP (op0, 0), 1)) < HOST_BITS_PER_WIDE_INT)
9951 {
9952 mask = ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
9953 << INTVAL (XEXP (XEXP (op0, 0), 1)));
9954 if ((~ STORE_FLAG_VALUE & mask) == 0
9955 && (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (op0, 0), 0))) == '<'
9956 || ((tem = get_last_value (XEXP (XEXP (op0, 0), 0))) != 0
9957 && GET_RTX_CLASS (GET_CODE (tem)) == '<')))
9958 {
9959 op0 = XEXP (XEXP (op0, 0), 0);
9960 continue;
9961 }
9962 }
9963
9964 /* If we are doing an equality comparison of an AND of a bit equal
9965 to the sign bit, replace this with a LT or GE comparison of
9966 the underlying value. */
9967 if (equality_comparison_p
9968 && const_op == 0
9969 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9970 && mode_width <= HOST_BITS_PER_WIDE_INT
9971 && ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
9972 == (HOST_WIDE_INT) 1 << (mode_width - 1)))
9973 {
9974 op0 = XEXP (op0, 0);
9975 code = (code == EQ ? GE : LT);
9976 continue;
9977 }
9978
9979 /* If this AND operation is really a ZERO_EXTEND from a narrower
9980 mode, the constant fits within that mode, and this is either an
9981 equality or unsigned comparison, try to do this comparison in
9982 the narrower mode. */
9983 if ((equality_comparison_p || unsigned_comparison_p)
9984 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9985 && (i = exact_log2 ((INTVAL (XEXP (op0, 1))
9986 & GET_MODE_MASK (mode))
9987 + 1)) >= 0
9988 && const_op >> i == 0
9989 && (tmode = mode_for_size (i, MODE_INT, 1)) != BLKmode)
9990 {
9991 op0 = gen_lowpart_for_combine (tmode, XEXP (op0, 0));
9992 continue;
9993 }
9994 break;
9995
9996 case ASHIFT:
9997 /* If we have (compare (ashift FOO N) (const_int C)) and
9998 the high order N bits of FOO (N+1 if an inequality comparison)
9999 are known to be zero, we can do this by comparing FOO with C
10000 shifted right N bits so long as the low-order N bits of C are
10001 zero. */
10002 if (GET_CODE (XEXP (op0, 1)) == CONST_INT
10003 && INTVAL (XEXP (op0, 1)) >= 0
10004 && ((INTVAL (XEXP (op0, 1)) + ! equality_comparison_p)
10005 < HOST_BITS_PER_WIDE_INT)
10006 && ((const_op
10007 & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0)
10008 && mode_width <= HOST_BITS_PER_WIDE_INT
10009 && (nonzero_bits (XEXP (op0, 0), mode)
10010 & ~ (mask >> (INTVAL (XEXP (op0, 1))
10011 + ! equality_comparison_p))) == 0)
10012 {
10013 const_op >>= INTVAL (XEXP (op0, 1));
10014 op1 = GEN_INT (const_op);
10015 op0 = XEXP (op0, 0);
10016 continue;
10017 }
10018
10019 /* If we are doing a sign bit comparison, it means we are testing
10020 a particular bit. Convert it to the appropriate AND. */
10021 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
10022 && mode_width <= HOST_BITS_PER_WIDE_INT)
10023 {
10024 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
10025 ((HOST_WIDE_INT) 1
10026 << (mode_width - 1
10027 - INTVAL (XEXP (op0, 1)))));
10028 code = (code == LT ? NE : EQ);
10029 continue;
10030 }
10031
10032 /* If this an equality comparison with zero and we are shifting
10033 the low bit to the sign bit, we can convert this to an AND of the
10034 low-order bit. */
10035 if (const_op == 0 && equality_comparison_p
10036 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10037 && INTVAL (XEXP (op0, 1)) == mode_width - 1)
10038 {
10039 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
10040 (HOST_WIDE_INT) 1);
10041 continue;
10042 }
10043 break;
10044
10045 case ASHIFTRT:
10046 /* If this is an equality comparison with zero, we can do this
10047 as a logical shift, which might be much simpler. */
10048 if (equality_comparison_p && const_op == 0
10049 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
10050 {
10051 op0 = simplify_shift_const (NULL_RTX, LSHIFTRT, mode,
10052 XEXP (op0, 0),
10053 INTVAL (XEXP (op0, 1)));
10054 continue;
10055 }
10056
10057 /* If OP0 is a sign extension and CODE is not an unsigned comparison,
10058 do the comparison in a narrower mode. */
10059 if (! unsigned_comparison_p
10060 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10061 && GET_CODE (XEXP (op0, 0)) == ASHIFT
10062 && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1)
10063 && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)),
10064 MODE_INT, 1)) != BLKmode
10065 && ((unsigned HOST_WIDE_INT) const_op <= GET_MODE_MASK (tmode)
10066 || ((unsigned HOST_WIDE_INT) - const_op
10067 <= GET_MODE_MASK (tmode))))
10068 {
10069 op0 = gen_lowpart_for_combine (tmode, XEXP (XEXP (op0, 0), 0));
10070 continue;
10071 }
10072
10073 /* ... fall through ... */
10074 case LSHIFTRT:
10075 /* If we have (compare (xshiftrt FOO N) (const_int C)) and
10076 the low order N bits of FOO are known to be zero, we can do this
10077 by comparing FOO with C shifted left N bits so long as no
10078 overflow occurs. */
10079 if (GET_CODE (XEXP (op0, 1)) == CONST_INT
10080 && INTVAL (XEXP (op0, 1)) >= 0
10081 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
10082 && mode_width <= HOST_BITS_PER_WIDE_INT
10083 && (nonzero_bits (XEXP (op0, 0), mode)
10084 & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0
10085 && (const_op == 0
10086 || (floor_log2 (const_op) + INTVAL (XEXP (op0, 1))
10087 < mode_width)))
10088 {
10089 const_op <<= INTVAL (XEXP (op0, 1));
10090 op1 = GEN_INT (const_op);
10091 op0 = XEXP (op0, 0);
10092 continue;
10093 }
10094
10095 /* If we are using this shift to extract just the sign bit, we
10096 can replace this with an LT or GE comparison. */
10097 if (const_op == 0
10098 && (equality_comparison_p || sign_bit_comparison_p)
10099 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10100 && INTVAL (XEXP (op0, 1)) == mode_width - 1)
10101 {
10102 op0 = XEXP (op0, 0);
10103 code = (code == NE || code == GT ? LT : GE);
10104 continue;
10105 }
10106 break;
10107
10108 default:
10109 break;
10110 }
10111
10112 break;
10113 }
10114
10115 /* Now make any compound operations involved in this comparison. Then,
10116 check for an outmost SUBREG on OP0 that isn't doing anything or is
10117 paradoxical. The latter case can only occur when it is known that the
10118 "extra" bits will be zero. Therefore, it is safe to remove the SUBREG.
10119 We can never remove a SUBREG for a non-equality comparison because the
10120 sign bit is in a different place in the underlying object. */
10121
10122 op0 = make_compound_operation (op0, op1 == const0_rtx ? COMPARE : SET);
10123 op1 = make_compound_operation (op1, SET);
10124
10125 if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
10126 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
10127 && (code == NE || code == EQ)
10128 && ((GET_MODE_SIZE (GET_MODE (op0))
10129 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0))))))
10130 {
10131 op0 = SUBREG_REG (op0);
10132 op1 = gen_lowpart_for_combine (GET_MODE (op0), op1);
10133 }
10134
10135 else if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
10136 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
10137 && (code == NE || code == EQ)
10138 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
10139 <= HOST_BITS_PER_WIDE_INT)
10140 && (nonzero_bits (SUBREG_REG (op0), GET_MODE (SUBREG_REG (op0)))
10141 & ~ GET_MODE_MASK (GET_MODE (op0))) == 0
10142 && (tem = gen_lowpart_for_combine (GET_MODE (SUBREG_REG (op0)),
10143 op1),
10144 (nonzero_bits (tem, GET_MODE (SUBREG_REG (op0)))
10145 & ~ GET_MODE_MASK (GET_MODE (op0))) == 0))
10146 op0 = SUBREG_REG (op0), op1 = tem;
10147
10148 /* We now do the opposite procedure: Some machines don't have compare
10149 insns in all modes. If OP0's mode is an integer mode smaller than a
10150 word and we can't do a compare in that mode, see if there is a larger
10151 mode for which we can do the compare. There are a number of cases in
10152 which we can use the wider mode. */
10153
10154 mode = GET_MODE (op0);
10155 if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT
10156 && GET_MODE_SIZE (mode) < UNITS_PER_WORD
10157 && cmp_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
10158 for (tmode = GET_MODE_WIDER_MODE (mode);
10159 (tmode != VOIDmode
10160 && GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT);
10161 tmode = GET_MODE_WIDER_MODE (tmode))
10162 if (cmp_optab->handlers[(int) tmode].insn_code != CODE_FOR_nothing)
10163 {
10164 /* If the only nonzero bits in OP0 and OP1 are those in the
10165 narrower mode and this is an equality or unsigned comparison,
10166 we can use the wider mode. Similarly for sign-extended
10167 values, in which case it is true for all comparisons. */
10168 if (((code == EQ || code == NE
10169 || code == GEU || code == GTU || code == LEU || code == LTU)
10170 && (nonzero_bits (op0, tmode) & ~ GET_MODE_MASK (mode)) == 0
10171 && (nonzero_bits (op1, tmode) & ~ GET_MODE_MASK (mode)) == 0)
10172 || ((num_sign_bit_copies (op0, tmode)
10173 > GET_MODE_BITSIZE (tmode) - GET_MODE_BITSIZE (mode))
10174 && (num_sign_bit_copies (op1, tmode)
10175 > GET_MODE_BITSIZE (tmode) - GET_MODE_BITSIZE (mode))))
10176 {
10177 op0 = gen_lowpart_for_combine (tmode, op0);
10178 op1 = gen_lowpart_for_combine (tmode, op1);
10179 break;
10180 }
10181
10182 /* If this is a test for negative, we can make an explicit
10183 test of the sign bit. */
10184
10185 if (op1 == const0_rtx && (code == LT || code == GE)
10186 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
10187 {
10188 op0 = gen_binary (AND, tmode,
10189 gen_lowpart_for_combine (tmode, op0),
10190 GEN_INT ((HOST_WIDE_INT) 1
10191 << (GET_MODE_BITSIZE (mode) - 1)));
10192 code = (code == LT) ? NE : EQ;
10193 break;
10194 }
10195 }
10196
10197 #ifdef CANONICALIZE_COMPARISON
10198 /* If this machine only supports a subset of valid comparisons, see if we
10199 can convert an unsupported one into a supported one. */
10200 CANONICALIZE_COMPARISON (code, op0, op1);
10201 #endif
10202
10203 *pop0 = op0;
10204 *pop1 = op1;
10205
10206 return code;
10207 }
10208 \f
10209 /* Return 1 if we know that X, a comparison operation, is not operating
10210 on a floating-point value or is EQ or NE, meaning that we can safely
10211 reverse it. */
10212
10213 static int
10214 reversible_comparison_p (x)
10215 rtx x;
10216 {
10217 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
10218 || flag_fast_math
10219 || GET_CODE (x) == NE || GET_CODE (x) == EQ)
10220 return 1;
10221
10222 switch (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))))
10223 {
10224 case MODE_INT:
10225 case MODE_PARTIAL_INT:
10226 case MODE_COMPLEX_INT:
10227 return 1;
10228
10229 case MODE_CC:
10230 /* If the mode of the condition codes tells us that this is safe,
10231 we need look no further. */
10232 if (REVERSIBLE_CC_MODE (GET_MODE (XEXP (x, 0))))
10233 return 1;
10234
10235 /* Otherwise try and find where the condition codes were last set and
10236 use that. */
10237 x = get_last_value (XEXP (x, 0));
10238 return (x && GET_CODE (x) == COMPARE
10239 && ! FLOAT_MODE_P (GET_MODE (XEXP (x, 0))));
10240
10241 default:
10242 return 0;
10243 }
10244 }
10245 \f
10246 /* Utility function for following routine. Called when X is part of a value
10247 being stored into reg_last_set_value. Sets reg_last_set_table_tick
10248 for each register mentioned. Similar to mention_regs in cse.c */
10249
10250 static void
10251 update_table_tick (x)
10252 rtx x;
10253 {
10254 register enum rtx_code code = GET_CODE (x);
10255 register char *fmt = GET_RTX_FORMAT (code);
10256 register int i;
10257
10258 if (code == REG)
10259 {
10260 int regno = REGNO (x);
10261 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
10262 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
10263
10264 for (i = regno; i < endregno; i++)
10265 reg_last_set_table_tick[i] = label_tick;
10266
10267 return;
10268 }
10269
10270 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
10271 /* Note that we can't have an "E" in values stored; see
10272 get_last_value_validate. */
10273 if (fmt[i] == 'e')
10274 update_table_tick (XEXP (x, i));
10275 }
10276
10277 /* Record that REG is set to VALUE in insn INSN. If VALUE is zero, we
10278 are saying that the register is clobbered and we no longer know its
10279 value. If INSN is zero, don't update reg_last_set; this is only permitted
10280 with VALUE also zero and is used to invalidate the register. */
10281
10282 static void
10283 record_value_for_reg (reg, insn, value)
10284 rtx reg;
10285 rtx insn;
10286 rtx value;
10287 {
10288 int regno = REGNO (reg);
10289 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
10290 ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1);
10291 int i;
10292
10293 /* If VALUE contains REG and we have a previous value for REG, substitute
10294 the previous value. */
10295 if (value && insn && reg_overlap_mentioned_p (reg, value))
10296 {
10297 rtx tem;
10298
10299 /* Set things up so get_last_value is allowed to see anything set up to
10300 our insn. */
10301 subst_low_cuid = INSN_CUID (insn);
10302 tem = get_last_value (reg);
10303
10304 if (tem)
10305 value = replace_rtx (copy_rtx (value), reg, tem);
10306 }
10307
10308 /* For each register modified, show we don't know its value, that
10309 we don't know about its bitwise content, that its value has been
10310 updated, and that we don't know the location of the death of the
10311 register. */
10312 for (i = regno; i < endregno; i ++)
10313 {
10314 if (insn)
10315 reg_last_set[i] = insn;
10316 reg_last_set_value[i] = 0;
10317 reg_last_set_mode[i] = 0;
10318 reg_last_set_nonzero_bits[i] = 0;
10319 reg_last_set_sign_bit_copies[i] = 0;
10320 reg_last_death[i] = 0;
10321 }
10322
10323 /* Mark registers that are being referenced in this value. */
10324 if (value)
10325 update_table_tick (value);
10326
10327 /* Now update the status of each register being set.
10328 If someone is using this register in this block, set this register
10329 to invalid since we will get confused between the two lives in this
10330 basic block. This makes using this register always invalid. In cse, we
10331 scan the table to invalidate all entries using this register, but this
10332 is too much work for us. */
10333
10334 for (i = regno; i < endregno; i++)
10335 {
10336 reg_last_set_label[i] = label_tick;
10337 if (value && reg_last_set_table_tick[i] == label_tick)
10338 reg_last_set_invalid[i] = 1;
10339 else
10340 reg_last_set_invalid[i] = 0;
10341 }
10342
10343 /* The value being assigned might refer to X (like in "x++;"). In that
10344 case, we must replace it with (clobber (const_int 0)) to prevent
10345 infinite loops. */
10346 if (value && ! get_last_value_validate (&value, insn,
10347 reg_last_set_label[regno], 0))
10348 {
10349 value = copy_rtx (value);
10350 if (! get_last_value_validate (&value, insn,
10351 reg_last_set_label[regno], 1))
10352 value = 0;
10353 }
10354
10355 /* For the main register being modified, update the value, the mode, the
10356 nonzero bits, and the number of sign bit copies. */
10357
10358 reg_last_set_value[regno] = value;
10359
10360 if (value)
10361 {
10362 subst_low_cuid = INSN_CUID (insn);
10363 reg_last_set_mode[regno] = GET_MODE (reg);
10364 reg_last_set_nonzero_bits[regno] = nonzero_bits (value, GET_MODE (reg));
10365 reg_last_set_sign_bit_copies[regno]
10366 = num_sign_bit_copies (value, GET_MODE (reg));
10367 }
10368 }
10369
10370 /* Used for communication between the following two routines. */
10371 static rtx record_dead_insn;
10372
10373 /* Called via note_stores from record_dead_and_set_regs to handle one
10374 SET or CLOBBER in an insn. */
10375
10376 static void
10377 record_dead_and_set_regs_1 (dest, setter)
10378 rtx dest, setter;
10379 {
10380 if (GET_CODE (dest) == SUBREG)
10381 dest = SUBREG_REG (dest);
10382
10383 if (GET_CODE (dest) == REG)
10384 {
10385 /* If we are setting the whole register, we know its value. Otherwise
10386 show that we don't know the value. We can handle SUBREG in
10387 some cases. */
10388 if (GET_CODE (setter) == SET && dest == SET_DEST (setter))
10389 record_value_for_reg (dest, record_dead_insn, SET_SRC (setter));
10390 else if (GET_CODE (setter) == SET
10391 && GET_CODE (SET_DEST (setter)) == SUBREG
10392 && SUBREG_REG (SET_DEST (setter)) == dest
10393 && GET_MODE_BITSIZE (GET_MODE (dest)) <= BITS_PER_WORD
10394 && subreg_lowpart_p (SET_DEST (setter)))
10395 record_value_for_reg (dest, record_dead_insn,
10396 gen_lowpart_for_combine (GET_MODE (dest),
10397 SET_SRC (setter)));
10398 else
10399 record_value_for_reg (dest, record_dead_insn, NULL_RTX);
10400 }
10401 else if (GET_CODE (dest) == MEM
10402 /* Ignore pushes, they clobber nothing. */
10403 && ! push_operand (dest, GET_MODE (dest)))
10404 mem_last_set = INSN_CUID (record_dead_insn);
10405 }
10406
10407 /* Update the records of when each REG was most recently set or killed
10408 for the things done by INSN. This is the last thing done in processing
10409 INSN in the combiner loop.
10410
10411 We update reg_last_set, reg_last_set_value, reg_last_set_mode,
10412 reg_last_set_nonzero_bits, reg_last_set_sign_bit_copies, reg_last_death,
10413 and also the similar information mem_last_set (which insn most recently
10414 modified memory) and last_call_cuid (which insn was the most recent
10415 subroutine call). */
10416
10417 static void
10418 record_dead_and_set_regs (insn)
10419 rtx insn;
10420 {
10421 register rtx link;
10422 int i;
10423
10424 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
10425 {
10426 if (REG_NOTE_KIND (link) == REG_DEAD
10427 && GET_CODE (XEXP (link, 0)) == REG)
10428 {
10429 int regno = REGNO (XEXP (link, 0));
10430 int endregno
10431 = regno + (regno < FIRST_PSEUDO_REGISTER
10432 ? HARD_REGNO_NREGS (regno, GET_MODE (XEXP (link, 0)))
10433 : 1);
10434
10435 for (i = regno; i < endregno; i++)
10436 reg_last_death[i] = insn;
10437 }
10438 else if (REG_NOTE_KIND (link) == REG_INC)
10439 record_value_for_reg (XEXP (link, 0), insn, NULL_RTX);
10440 }
10441
10442 if (GET_CODE (insn) == CALL_INSN)
10443 {
10444 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
10445 if (call_used_regs[i])
10446 {
10447 reg_last_set_value[i] = 0;
10448 reg_last_set_mode[i] = 0;
10449 reg_last_set_nonzero_bits[i] = 0;
10450 reg_last_set_sign_bit_copies[i] = 0;
10451 reg_last_death[i] = 0;
10452 }
10453
10454 last_call_cuid = mem_last_set = INSN_CUID (insn);
10455 }
10456
10457 record_dead_insn = insn;
10458 note_stores (PATTERN (insn), record_dead_and_set_regs_1);
10459 }
10460 \f
10461 /* Utility routine for the following function. Verify that all the registers
10462 mentioned in *LOC are valid when *LOC was part of a value set when
10463 label_tick == TICK. Return 0 if some are not.
10464
10465 If REPLACE is non-zero, replace the invalid reference with
10466 (clobber (const_int 0)) and return 1. This replacement is useful because
10467 we often can get useful information about the form of a value (e.g., if
10468 it was produced by a shift that always produces -1 or 0) even though
10469 we don't know exactly what registers it was produced from. */
10470
10471 static int
10472 get_last_value_validate (loc, insn, tick, replace)
10473 rtx *loc;
10474 rtx insn;
10475 int tick;
10476 int replace;
10477 {
10478 rtx x = *loc;
10479 char *fmt = GET_RTX_FORMAT (GET_CODE (x));
10480 int len = GET_RTX_LENGTH (GET_CODE (x));
10481 int i;
10482
10483 if (GET_CODE (x) == REG)
10484 {
10485 int regno = REGNO (x);
10486 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
10487 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
10488 int j;
10489
10490 for (j = regno; j < endregno; j++)
10491 if (reg_last_set_invalid[j]
10492 /* If this is a pseudo-register that was only set once, it is
10493 always valid. */
10494 || (! (regno >= FIRST_PSEUDO_REGISTER && REG_N_SETS (regno) == 1)
10495 && reg_last_set_label[j] > tick))
10496 {
10497 if (replace)
10498 *loc = gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
10499 return replace;
10500 }
10501
10502 return 1;
10503 }
10504 /* If this is a memory reference, make sure that there were
10505 no stores after it that might have clobbered the value. We don't
10506 have alias info, so we assume any store invalidates it. */
10507 else if (GET_CODE (x) == MEM && ! RTX_UNCHANGING_P (x)
10508 && INSN_CUID (insn) <= mem_last_set)
10509 {
10510 if (replace)
10511 *loc = gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
10512 return replace;
10513 }
10514
10515 for (i = 0; i < len; i++)
10516 if ((fmt[i] == 'e'
10517 && get_last_value_validate (&XEXP (x, i), insn, tick, replace) == 0)
10518 /* Don't bother with these. They shouldn't occur anyway. */
10519 || fmt[i] == 'E')
10520 return 0;
10521
10522 /* If we haven't found a reason for it to be invalid, it is valid. */
10523 return 1;
10524 }
10525
10526 /* Get the last value assigned to X, if known. Some registers
10527 in the value may be replaced with (clobber (const_int 0)) if their value
10528 is known longer known reliably. */
10529
10530 static rtx
10531 get_last_value (x)
10532 rtx x;
10533 {
10534 int regno;
10535 rtx value;
10536
10537 /* If this is a non-paradoxical SUBREG, get the value of its operand and
10538 then convert it to the desired mode. If this is a paradoxical SUBREG,
10539 we cannot predict what values the "extra" bits might have. */
10540 if (GET_CODE (x) == SUBREG
10541 && subreg_lowpart_p (x)
10542 && (GET_MODE_SIZE (GET_MODE (x))
10543 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
10544 && (value = get_last_value (SUBREG_REG (x))) != 0)
10545 return gen_lowpart_for_combine (GET_MODE (x), value);
10546
10547 if (GET_CODE (x) != REG)
10548 return 0;
10549
10550 regno = REGNO (x);
10551 value = reg_last_set_value[regno];
10552
10553 /* If we don't have a value or if it isn't for this basic block,
10554 return 0. */
10555
10556 if (value == 0
10557 || (REG_N_SETS (regno) != 1
10558 && reg_last_set_label[regno] != label_tick))
10559 return 0;
10560
10561 /* If the value was set in a later insn than the ones we are processing,
10562 we can't use it even if the register was only set once, but make a quick
10563 check to see if the previous insn set it to something. This is commonly
10564 the case when the same pseudo is used by repeated insns.
10565
10566 This does not work if there exists an instruction which is temporarily
10567 not on the insn chain. */
10568
10569 if (INSN_CUID (reg_last_set[regno]) >= subst_low_cuid)
10570 {
10571 rtx insn, set;
10572
10573 /* We can not do anything useful in this case, because there is
10574 an instruction which is not on the insn chain. */
10575 if (subst_prev_insn)
10576 return 0;
10577
10578 /* Skip over USE insns. They are not useful here, and they may have
10579 been made by combine, in which case they do not have a INSN_CUID
10580 value. We can't use prev_real_insn, because that would incorrectly
10581 take us backwards across labels. Skip over BARRIERs also, since
10582 they could have been made by combine. If we see one, we must be
10583 optimizing dead code, so it doesn't matter what we do. */
10584 for (insn = prev_nonnote_insn (subst_insn);
10585 insn && ((GET_CODE (insn) == INSN
10586 && GET_CODE (PATTERN (insn)) == USE)
10587 || GET_CODE (insn) == BARRIER
10588 || INSN_CUID (insn) >= subst_low_cuid);
10589 insn = prev_nonnote_insn (insn))
10590 ;
10591
10592 if (insn
10593 && (set = single_set (insn)) != 0
10594 && rtx_equal_p (SET_DEST (set), x))
10595 {
10596 value = SET_SRC (set);
10597
10598 /* Make sure that VALUE doesn't reference X. Replace any
10599 explicit references with a CLOBBER. If there are any remaining
10600 references (rare), don't use the value. */
10601
10602 if (reg_mentioned_p (x, value))
10603 value = replace_rtx (copy_rtx (value), x,
10604 gen_rtx (CLOBBER, GET_MODE (x), const0_rtx));
10605
10606 if (reg_overlap_mentioned_p (x, value))
10607 return 0;
10608 }
10609 else
10610 return 0;
10611 }
10612
10613 /* If the value has all its registers valid, return it. */
10614 if (get_last_value_validate (&value, reg_last_set[regno],
10615 reg_last_set_label[regno], 0))
10616 return value;
10617
10618 /* Otherwise, make a copy and replace any invalid register with
10619 (clobber (const_int 0)). If that fails for some reason, return 0. */
10620
10621 value = copy_rtx (value);
10622 if (get_last_value_validate (&value, reg_last_set[regno],
10623 reg_last_set_label[regno], 1))
10624 return value;
10625
10626 return 0;
10627 }
10628 \f
10629 /* Return nonzero if expression X refers to a REG or to memory
10630 that is set in an instruction more recent than FROM_CUID. */
10631
10632 static int
10633 use_crosses_set_p (x, from_cuid)
10634 register rtx x;
10635 int from_cuid;
10636 {
10637 register char *fmt;
10638 register int i;
10639 register enum rtx_code code = GET_CODE (x);
10640
10641 if (code == REG)
10642 {
10643 register int regno = REGNO (x);
10644 int endreg = regno + (regno < FIRST_PSEUDO_REGISTER
10645 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
10646
10647 #ifdef PUSH_ROUNDING
10648 /* Don't allow uses of the stack pointer to be moved,
10649 because we don't know whether the move crosses a push insn. */
10650 if (regno == STACK_POINTER_REGNUM)
10651 return 1;
10652 #endif
10653 for (;regno < endreg; regno++)
10654 if (reg_last_set[regno]
10655 && INSN_CUID (reg_last_set[regno]) > from_cuid)
10656 return 1;
10657 return 0;
10658 }
10659
10660 if (code == MEM && mem_last_set > from_cuid)
10661 return 1;
10662
10663 fmt = GET_RTX_FORMAT (code);
10664
10665 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
10666 {
10667 if (fmt[i] == 'E')
10668 {
10669 register int j;
10670 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
10671 if (use_crosses_set_p (XVECEXP (x, i, j), from_cuid))
10672 return 1;
10673 }
10674 else if (fmt[i] == 'e'
10675 && use_crosses_set_p (XEXP (x, i), from_cuid))
10676 return 1;
10677 }
10678 return 0;
10679 }
10680 \f
10681 /* Define three variables used for communication between the following
10682 routines. */
10683
10684 static int reg_dead_regno, reg_dead_endregno;
10685 static int reg_dead_flag;
10686
10687 /* Function called via note_stores from reg_dead_at_p.
10688
10689 If DEST is within [reg_dead_regno, reg_dead_endregno), set
10690 reg_dead_flag to 1 if X is a CLOBBER and to -1 it is a SET. */
10691
10692 static void
10693 reg_dead_at_p_1 (dest, x)
10694 rtx dest;
10695 rtx x;
10696 {
10697 int regno, endregno;
10698
10699 if (GET_CODE (dest) != REG)
10700 return;
10701
10702 regno = REGNO (dest);
10703 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
10704 ? HARD_REGNO_NREGS (regno, GET_MODE (dest)) : 1);
10705
10706 if (reg_dead_endregno > regno && reg_dead_regno < endregno)
10707 reg_dead_flag = (GET_CODE (x) == CLOBBER) ? 1 : -1;
10708 }
10709
10710 /* Return non-zero if REG is known to be dead at INSN.
10711
10712 We scan backwards from INSN. If we hit a REG_DEAD note or a CLOBBER
10713 referencing REG, it is dead. If we hit a SET referencing REG, it is
10714 live. Otherwise, see if it is live or dead at the start of the basic
10715 block we are in. Hard regs marked as being live in NEWPAT_USED_REGS
10716 must be assumed to be always live. */
10717
10718 static int
10719 reg_dead_at_p (reg, insn)
10720 rtx reg;
10721 rtx insn;
10722 {
10723 int block, i;
10724
10725 /* Set variables for reg_dead_at_p_1. */
10726 reg_dead_regno = REGNO (reg);
10727 reg_dead_endregno = reg_dead_regno + (reg_dead_regno < FIRST_PSEUDO_REGISTER
10728 ? HARD_REGNO_NREGS (reg_dead_regno,
10729 GET_MODE (reg))
10730 : 1);
10731
10732 reg_dead_flag = 0;
10733
10734 /* Check that reg isn't mentioned in NEWPAT_USED_REGS. */
10735 if (reg_dead_regno < FIRST_PSEUDO_REGISTER)
10736 {
10737 for (i = reg_dead_regno; i < reg_dead_endregno; i++)
10738 if (TEST_HARD_REG_BIT (newpat_used_regs, i))
10739 return 0;
10740 }
10741
10742 /* Scan backwards until we find a REG_DEAD note, SET, CLOBBER, label, or
10743 beginning of function. */
10744 for (; insn && GET_CODE (insn) != CODE_LABEL && GET_CODE (insn) != BARRIER;
10745 insn = prev_nonnote_insn (insn))
10746 {
10747 note_stores (PATTERN (insn), reg_dead_at_p_1);
10748 if (reg_dead_flag)
10749 return reg_dead_flag == 1 ? 1 : 0;
10750
10751 if (find_regno_note (insn, REG_DEAD, reg_dead_regno))
10752 return 1;
10753 }
10754
10755 /* Get the basic block number that we were in. */
10756 if (insn == 0)
10757 block = 0;
10758 else
10759 {
10760 for (block = 0; block < n_basic_blocks; block++)
10761 if (insn == basic_block_head[block])
10762 break;
10763
10764 if (block == n_basic_blocks)
10765 return 0;
10766 }
10767
10768 for (i = reg_dead_regno; i < reg_dead_endregno; i++)
10769 if (REGNO_REG_SET_P (basic_block_live_at_start[block], i))
10770 return 0;
10771
10772 return 1;
10773 }
10774 \f
10775 /* Note hard registers in X that are used. This code is similar to
10776 that in flow.c, but much simpler since we don't care about pseudos. */
10777
10778 static void
10779 mark_used_regs_combine (x)
10780 rtx x;
10781 {
10782 register RTX_CODE code = GET_CODE (x);
10783 register int regno;
10784 int i;
10785
10786 switch (code)
10787 {
10788 case LABEL_REF:
10789 case SYMBOL_REF:
10790 case CONST_INT:
10791 case CONST:
10792 case CONST_DOUBLE:
10793 case PC:
10794 case ADDR_VEC:
10795 case ADDR_DIFF_VEC:
10796 case ASM_INPUT:
10797 #ifdef HAVE_cc0
10798 /* CC0 must die in the insn after it is set, so we don't need to take
10799 special note of it here. */
10800 case CC0:
10801 #endif
10802 return;
10803
10804 case CLOBBER:
10805 /* If we are clobbering a MEM, mark any hard registers inside the
10806 address as used. */
10807 if (GET_CODE (XEXP (x, 0)) == MEM)
10808 mark_used_regs_combine (XEXP (XEXP (x, 0), 0));
10809 return;
10810
10811 case REG:
10812 regno = REGNO (x);
10813 /* A hard reg in a wide mode may really be multiple registers.
10814 If so, mark all of them just like the first. */
10815 if (regno < FIRST_PSEUDO_REGISTER)
10816 {
10817 /* None of this applies to the stack, frame or arg pointers */
10818 if (regno == STACK_POINTER_REGNUM
10819 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
10820 || regno == HARD_FRAME_POINTER_REGNUM
10821 #endif
10822 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
10823 || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
10824 #endif
10825 || regno == FRAME_POINTER_REGNUM)
10826 return;
10827
10828 i = HARD_REGNO_NREGS (regno, GET_MODE (x));
10829 while (i-- > 0)
10830 SET_HARD_REG_BIT (newpat_used_regs, regno + i);
10831 }
10832 return;
10833
10834 case SET:
10835 {
10836 /* If setting a MEM, or a SUBREG of a MEM, then note any hard regs in
10837 the address. */
10838 register rtx testreg = SET_DEST (x);
10839
10840 while (GET_CODE (testreg) == SUBREG
10841 || GET_CODE (testreg) == ZERO_EXTRACT
10842 || GET_CODE (testreg) == SIGN_EXTRACT
10843 || GET_CODE (testreg) == STRICT_LOW_PART)
10844 testreg = XEXP (testreg, 0);
10845
10846 if (GET_CODE (testreg) == MEM)
10847 mark_used_regs_combine (XEXP (testreg, 0));
10848
10849 mark_used_regs_combine (SET_SRC (x));
10850 }
10851 return;
10852
10853 default:
10854 break;
10855 }
10856
10857 /* Recursively scan the operands of this expression. */
10858
10859 {
10860 register char *fmt = GET_RTX_FORMAT (code);
10861
10862 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
10863 {
10864 if (fmt[i] == 'e')
10865 mark_used_regs_combine (XEXP (x, i));
10866 else if (fmt[i] == 'E')
10867 {
10868 register int j;
10869
10870 for (j = 0; j < XVECLEN (x, i); j++)
10871 mark_used_regs_combine (XVECEXP (x, i, j));
10872 }
10873 }
10874 }
10875 }
10876
10877 \f
10878 /* Remove register number REGNO from the dead registers list of INSN.
10879
10880 Return the note used to record the death, if there was one. */
10881
10882 rtx
10883 remove_death (regno, insn)
10884 int regno;
10885 rtx insn;
10886 {
10887 register rtx note = find_regno_note (insn, REG_DEAD, regno);
10888
10889 if (note)
10890 {
10891 REG_N_DEATHS (regno)--;
10892 remove_note (insn, note);
10893 }
10894
10895 return note;
10896 }
10897
10898 /* For each register (hardware or pseudo) used within expression X, if its
10899 death is in an instruction with cuid between FROM_CUID (inclusive) and
10900 TO_INSN (exclusive), put a REG_DEAD note for that register in the
10901 list headed by PNOTES.
10902
10903 That said, don't move registers killed by maybe_kill_insn.
10904
10905 This is done when X is being merged by combination into TO_INSN. These
10906 notes will then be distributed as needed. */
10907
10908 static void
10909 move_deaths (x, maybe_kill_insn, from_cuid, to_insn, pnotes)
10910 rtx x;
10911 rtx maybe_kill_insn;
10912 int from_cuid;
10913 rtx to_insn;
10914 rtx *pnotes;
10915 {
10916 register char *fmt;
10917 register int len, i;
10918 register enum rtx_code code = GET_CODE (x);
10919
10920 if (code == REG)
10921 {
10922 register int regno = REGNO (x);
10923 register rtx where_dead = reg_last_death[regno];
10924 register rtx before_dead, after_dead;
10925
10926 /* Don't move the register if it gets killed in between from and to */
10927 if (maybe_kill_insn && reg_set_p (x, maybe_kill_insn)
10928 && !reg_referenced_p (x, maybe_kill_insn))
10929 return;
10930
10931 /* WHERE_DEAD could be a USE insn made by combine, so first we
10932 make sure that we have insns with valid INSN_CUID values. */
10933 before_dead = where_dead;
10934 while (before_dead && INSN_UID (before_dead) > max_uid_cuid)
10935 before_dead = PREV_INSN (before_dead);
10936 after_dead = where_dead;
10937 while (after_dead && INSN_UID (after_dead) > max_uid_cuid)
10938 after_dead = NEXT_INSN (after_dead);
10939
10940 if (before_dead && after_dead
10941 && INSN_CUID (before_dead) >= from_cuid
10942 && (INSN_CUID (after_dead) < INSN_CUID (to_insn)
10943 || (where_dead != after_dead
10944 && INSN_CUID (after_dead) == INSN_CUID (to_insn))))
10945 {
10946 rtx note = remove_death (regno, where_dead);
10947
10948 /* It is possible for the call above to return 0. This can occur
10949 when reg_last_death points to I2 or I1 that we combined with.
10950 In that case make a new note.
10951
10952 We must also check for the case where X is a hard register
10953 and NOTE is a death note for a range of hard registers
10954 including X. In that case, we must put REG_DEAD notes for
10955 the remaining registers in place of NOTE. */
10956
10957 if (note != 0 && regno < FIRST_PSEUDO_REGISTER
10958 && (GET_MODE_SIZE (GET_MODE (XEXP (note, 0)))
10959 > GET_MODE_SIZE (GET_MODE (x))))
10960 {
10961 int deadregno = REGNO (XEXP (note, 0));
10962 int deadend
10963 = (deadregno + HARD_REGNO_NREGS (deadregno,
10964 GET_MODE (XEXP (note, 0))));
10965 int ourend = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
10966 int i;
10967
10968 for (i = deadregno; i < deadend; i++)
10969 if (i < regno || i >= ourend)
10970 REG_NOTES (where_dead)
10971 = gen_rtx (EXPR_LIST, REG_DEAD,
10972 gen_rtx (REG, reg_raw_mode[i], i),
10973 REG_NOTES (where_dead));
10974 }
10975 /* If we didn't find any note, or if we found a REG_DEAD note that
10976 covers only part of the given reg, and we have a multi-reg hard
10977 register, then to be safe we must check for REG_DEAD notes
10978 for each register other than the first. They could have
10979 their own REG_DEAD notes lying around. */
10980 else if ((note == 0
10981 || (note != 0
10982 && (GET_MODE_SIZE (GET_MODE (XEXP (note, 0)))
10983 < GET_MODE_SIZE (GET_MODE (x)))))
10984 && regno < FIRST_PSEUDO_REGISTER
10985 && HARD_REGNO_NREGS (regno, GET_MODE (x)) > 1)
10986 {
10987 int ourend = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
10988 int i, offset;
10989 rtx oldnotes = 0;
10990
10991 if (note)
10992 offset = HARD_REGNO_NREGS (regno, GET_MODE (XEXP (note, 0)));
10993 else
10994 offset = 1;
10995
10996 for (i = regno + offset; i < ourend; i++)
10997 move_deaths (gen_rtx (REG, reg_raw_mode[i], i),
10998 maybe_kill_insn, from_cuid, to_insn, &oldnotes);
10999 }
11000
11001 if (note != 0 && GET_MODE (XEXP (note, 0)) == GET_MODE (x))
11002 {
11003 XEXP (note, 1) = *pnotes;
11004 *pnotes = note;
11005 }
11006 else
11007 *pnotes = gen_rtx (EXPR_LIST, REG_DEAD, x, *pnotes);
11008
11009 REG_N_DEATHS (regno)++;
11010 }
11011
11012 return;
11013 }
11014
11015 else if (GET_CODE (x) == SET)
11016 {
11017 rtx dest = SET_DEST (x);
11018
11019 move_deaths (SET_SRC (x), maybe_kill_insn, from_cuid, to_insn, pnotes);
11020
11021 /* In the case of a ZERO_EXTRACT, a STRICT_LOW_PART, or a SUBREG
11022 that accesses one word of a multi-word item, some
11023 piece of everything register in the expression is used by
11024 this insn, so remove any old death. */
11025
11026 if (GET_CODE (dest) == ZERO_EXTRACT
11027 || GET_CODE (dest) == STRICT_LOW_PART
11028 || (GET_CODE (dest) == SUBREG
11029 && (((GET_MODE_SIZE (GET_MODE (dest))
11030 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
11031 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
11032 + UNITS_PER_WORD - 1) / UNITS_PER_WORD))))
11033 {
11034 move_deaths (dest, maybe_kill_insn, from_cuid, to_insn, pnotes);
11035 return;
11036 }
11037
11038 /* If this is some other SUBREG, we know it replaces the entire
11039 value, so use that as the destination. */
11040 if (GET_CODE (dest) == SUBREG)
11041 dest = SUBREG_REG (dest);
11042
11043 /* If this is a MEM, adjust deaths of anything used in the address.
11044 For a REG (the only other possibility), the entire value is
11045 being replaced so the old value is not used in this insn. */
11046
11047 if (GET_CODE (dest) == MEM)
11048 move_deaths (XEXP (dest, 0), maybe_kill_insn, from_cuid,
11049 to_insn, pnotes);
11050 return;
11051 }
11052
11053 else if (GET_CODE (x) == CLOBBER)
11054 return;
11055
11056 len = GET_RTX_LENGTH (code);
11057 fmt = GET_RTX_FORMAT (code);
11058
11059 for (i = 0; i < len; i++)
11060 {
11061 if (fmt[i] == 'E')
11062 {
11063 register int j;
11064 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
11065 move_deaths (XVECEXP (x, i, j), maybe_kill_insn, from_cuid,
11066 to_insn, pnotes);
11067 }
11068 else if (fmt[i] == 'e')
11069 move_deaths (XEXP (x, i), maybe_kill_insn, from_cuid, to_insn, pnotes);
11070 }
11071 }
11072 \f
11073 /* Return 1 if X is the target of a bit-field assignment in BODY, the
11074 pattern of an insn. X must be a REG. */
11075
11076 static int
11077 reg_bitfield_target_p (x, body)
11078 rtx x;
11079 rtx body;
11080 {
11081 int i;
11082
11083 if (GET_CODE (body) == SET)
11084 {
11085 rtx dest = SET_DEST (body);
11086 rtx target;
11087 int regno, tregno, endregno, endtregno;
11088
11089 if (GET_CODE (dest) == ZERO_EXTRACT)
11090 target = XEXP (dest, 0);
11091 else if (GET_CODE (dest) == STRICT_LOW_PART)
11092 target = SUBREG_REG (XEXP (dest, 0));
11093 else
11094 return 0;
11095
11096 if (GET_CODE (target) == SUBREG)
11097 target = SUBREG_REG (target);
11098
11099 if (GET_CODE (target) != REG)
11100 return 0;
11101
11102 tregno = REGNO (target), regno = REGNO (x);
11103 if (tregno >= FIRST_PSEUDO_REGISTER || regno >= FIRST_PSEUDO_REGISTER)
11104 return target == x;
11105
11106 endtregno = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (target));
11107 endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
11108
11109 return endregno > tregno && regno < endtregno;
11110 }
11111
11112 else if (GET_CODE (body) == PARALLEL)
11113 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
11114 if (reg_bitfield_target_p (x, XVECEXP (body, 0, i)))
11115 return 1;
11116
11117 return 0;
11118 }
11119 \f
11120 /* Given a chain of REG_NOTES originally from FROM_INSN, try to place them
11121 as appropriate. I3 and I2 are the insns resulting from the combination
11122 insns including FROM (I2 may be zero).
11123
11124 ELIM_I2 and ELIM_I1 are either zero or registers that we know will
11125 not need REG_DEAD notes because they are being substituted for. This
11126 saves searching in the most common cases.
11127
11128 Each note in the list is either ignored or placed on some insns, depending
11129 on the type of note. */
11130
11131 static void
11132 distribute_notes (notes, from_insn, i3, i2, elim_i2, elim_i1)
11133 rtx notes;
11134 rtx from_insn;
11135 rtx i3, i2;
11136 rtx elim_i2, elim_i1;
11137 {
11138 rtx note, next_note;
11139 rtx tem;
11140
11141 for (note = notes; note; note = next_note)
11142 {
11143 rtx place = 0, place2 = 0;
11144
11145 /* If this NOTE references a pseudo register, ensure it references
11146 the latest copy of that register. */
11147 if (XEXP (note, 0) && GET_CODE (XEXP (note, 0)) == REG
11148 && REGNO (XEXP (note, 0)) >= FIRST_PSEUDO_REGISTER)
11149 XEXP (note, 0) = regno_reg_rtx[REGNO (XEXP (note, 0))];
11150
11151 next_note = XEXP (note, 1);
11152 switch (REG_NOTE_KIND (note))
11153 {
11154 case REG_BR_PROB:
11155 case REG_EXEC_COUNT:
11156 /* Doesn't matter much where we put this, as long as it's somewhere.
11157 It is preferable to keep these notes on branches, which is most
11158 likely to be i3. */
11159 place = i3;
11160 break;
11161
11162 case REG_UNUSED:
11163 /* Any clobbers for i3 may still exist, and so we must process
11164 REG_UNUSED notes from that insn.
11165
11166 Any clobbers from i2 or i1 can only exist if they were added by
11167 recog_for_combine. In that case, recog_for_combine created the
11168 necessary REG_UNUSED notes. Trying to keep any original
11169 REG_UNUSED notes from these insns can cause incorrect output
11170 if it is for the same register as the original i3 dest.
11171 In that case, we will notice that the register is set in i3,
11172 and then add a REG_UNUSED note for the destination of i3, which
11173 is wrong. However, it is possible to have REG_UNUSED notes from
11174 i2 or i1 for register which were both used and clobbered, so
11175 we keep notes from i2 or i1 if they will turn into REG_DEAD
11176 notes. */
11177
11178 /* If this register is set or clobbered in I3, put the note there
11179 unless there is one already. */
11180 if (reg_set_p (XEXP (note, 0), PATTERN (i3)))
11181 {
11182 if (from_insn != i3)
11183 break;
11184
11185 if (! (GET_CODE (XEXP (note, 0)) == REG
11186 ? find_regno_note (i3, REG_UNUSED, REGNO (XEXP (note, 0)))
11187 : find_reg_note (i3, REG_UNUSED, XEXP (note, 0))))
11188 place = i3;
11189 }
11190 /* Otherwise, if this register is used by I3, then this register
11191 now dies here, so we must put a REG_DEAD note here unless there
11192 is one already. */
11193 else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3))
11194 && ! (GET_CODE (XEXP (note, 0)) == REG
11195 ? find_regno_note (i3, REG_DEAD, REGNO (XEXP (note, 0)))
11196 : find_reg_note (i3, REG_DEAD, XEXP (note, 0))))
11197 {
11198 PUT_REG_NOTE_KIND (note, REG_DEAD);
11199 place = i3;
11200 }
11201 break;
11202
11203 case REG_EQUAL:
11204 case REG_EQUIV:
11205 case REG_NONNEG:
11206 case REG_NOALIAS:
11207 /* These notes say something about results of an insn. We can
11208 only support them if they used to be on I3 in which case they
11209 remain on I3. Otherwise they are ignored.
11210
11211 If the note refers to an expression that is not a constant, we
11212 must also ignore the note since we cannot tell whether the
11213 equivalence is still true. It might be possible to do
11214 slightly better than this (we only have a problem if I2DEST
11215 or I1DEST is present in the expression), but it doesn't
11216 seem worth the trouble. */
11217
11218 if (from_insn == i3
11219 && (XEXP (note, 0) == 0 || CONSTANT_P (XEXP (note, 0))))
11220 place = i3;
11221 break;
11222
11223 case REG_INC:
11224 case REG_NO_CONFLICT:
11225 case REG_LABEL:
11226 /* These notes say something about how a register is used. They must
11227 be present on any use of the register in I2 or I3. */
11228 if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3)))
11229 place = i3;
11230
11231 if (i2 && reg_mentioned_p (XEXP (note, 0), PATTERN (i2)))
11232 {
11233 if (place)
11234 place2 = i2;
11235 else
11236 place = i2;
11237 }
11238 break;
11239
11240 case REG_WAS_0:
11241 /* It is too much trouble to try to see if this note is still
11242 correct in all situations. It is better to simply delete it. */
11243 break;
11244
11245 case REG_RETVAL:
11246 /* If the insn previously containing this note still exists,
11247 put it back where it was. Otherwise move it to the previous
11248 insn. Adjust the corresponding REG_LIBCALL note. */
11249 if (GET_CODE (from_insn) != NOTE)
11250 place = from_insn;
11251 else
11252 {
11253 tem = find_reg_note (XEXP (note, 0), REG_LIBCALL, NULL_RTX);
11254 place = prev_real_insn (from_insn);
11255 if (tem && place)
11256 XEXP (tem, 0) = place;
11257 }
11258 break;
11259
11260 case REG_LIBCALL:
11261 /* This is handled similarly to REG_RETVAL. */
11262 if (GET_CODE (from_insn) != NOTE)
11263 place = from_insn;
11264 else
11265 {
11266 tem = find_reg_note (XEXP (note, 0), REG_RETVAL, NULL_RTX);
11267 place = next_real_insn (from_insn);
11268 if (tem && place)
11269 XEXP (tem, 0) = place;
11270 }
11271 break;
11272
11273 case REG_DEAD:
11274 /* If the register is used as an input in I3, it dies there.
11275 Similarly for I2, if it is non-zero and adjacent to I3.
11276
11277 If the register is not used as an input in either I3 or I2
11278 and it is not one of the registers we were supposed to eliminate,
11279 there are two possibilities. We might have a non-adjacent I2
11280 or we might have somehow eliminated an additional register
11281 from a computation. For example, we might have had A & B where
11282 we discover that B will always be zero. In this case we will
11283 eliminate the reference to A.
11284
11285 In both cases, we must search to see if we can find a previous
11286 use of A and put the death note there. */
11287
11288 if (from_insn
11289 && GET_CODE (from_insn) == CALL_INSN
11290 && find_reg_fusage (from_insn, USE, XEXP (note, 0)))
11291 place = from_insn;
11292 else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3)))
11293 place = i3;
11294 else if (i2 != 0 && next_nonnote_insn (i2) == i3
11295 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
11296 place = i2;
11297
11298 if (XEXP (note, 0) == elim_i2 || XEXP (note, 0) == elim_i1)
11299 break;
11300
11301 /* If the register is used in both I2 and I3 and it dies in I3,
11302 we might have added another reference to it. If reg_n_refs
11303 was 2, bump it to 3. This has to be correct since the
11304 register must have been set somewhere. The reason this is
11305 done is because local-alloc.c treats 2 references as a
11306 special case. */
11307
11308 if (place == i3 && i2 != 0 && GET_CODE (XEXP (note, 0)) == REG
11309 && REG_N_REFS (REGNO (XEXP (note, 0)))== 2
11310 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
11311 REG_N_REFS (REGNO (XEXP (note, 0))) = 3;
11312
11313 if (place == 0)
11314 {
11315 for (tem = prev_nonnote_insn (i3);
11316 place == 0 && tem
11317 && (GET_CODE (tem) == INSN || GET_CODE (tem) == CALL_INSN);
11318 tem = prev_nonnote_insn (tem))
11319 {
11320 /* If the register is being set at TEM, see if that is all
11321 TEM is doing. If so, delete TEM. Otherwise, make this
11322 into a REG_UNUSED note instead. */
11323 if (reg_set_p (XEXP (note, 0), PATTERN (tem)))
11324 {
11325 rtx set = single_set (tem);
11326
11327 /* Verify that it was the set, and not a clobber that
11328 modified the register. */
11329
11330 if (set != 0 && ! side_effects_p (SET_SRC (set))
11331 && (rtx_equal_p (XEXP (note, 0), SET_DEST (set))
11332 || (GET_CODE (SET_DEST (set)) == SUBREG
11333 && rtx_equal_p (XEXP (note, 0),
11334 XEXP (SET_DEST (set), 0)))))
11335 {
11336 /* Move the notes and links of TEM elsewhere.
11337 This might delete other dead insns recursively.
11338 First set the pattern to something that won't use
11339 any register. */
11340
11341 PATTERN (tem) = pc_rtx;
11342
11343 distribute_notes (REG_NOTES (tem), tem, tem,
11344 NULL_RTX, NULL_RTX, NULL_RTX);
11345 distribute_links (LOG_LINKS (tem));
11346
11347 PUT_CODE (tem, NOTE);
11348 NOTE_LINE_NUMBER (tem) = NOTE_INSN_DELETED;
11349 NOTE_SOURCE_FILE (tem) = 0;
11350 }
11351 else
11352 {
11353 PUT_REG_NOTE_KIND (note, REG_UNUSED);
11354
11355 /* If there isn't already a REG_UNUSED note, put one
11356 here. */
11357 if (! find_regno_note (tem, REG_UNUSED,
11358 REGNO (XEXP (note, 0))))
11359 place = tem;
11360 break;
11361 }
11362 }
11363 else if (reg_referenced_p (XEXP (note, 0), PATTERN (tem))
11364 || (GET_CODE (tem) == CALL_INSN
11365 && find_reg_fusage (tem, USE, XEXP (note, 0))))
11366 {
11367 place = tem;
11368
11369 /* If we are doing a 3->2 combination, and we have a
11370 register which formerly died in i3 and was not used
11371 by i2, which now no longer dies in i3 and is used in
11372 i2 but does not die in i2, and place is between i2
11373 and i3, then we may need to move a link from place to
11374 i2. */
11375 if (i2 && INSN_UID (place) <= max_uid_cuid
11376 && INSN_CUID (place) > INSN_CUID (i2)
11377 && from_insn && INSN_CUID (from_insn) > INSN_CUID (i2)
11378 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
11379 {
11380 rtx links = LOG_LINKS (place);
11381 LOG_LINKS (place) = 0;
11382 distribute_links (links);
11383 }
11384 break;
11385 }
11386 }
11387
11388 /* If we haven't found an insn for the death note and it
11389 is still a REG_DEAD note, but we have hit a CODE_LABEL,
11390 insert a USE insn for the register at that label and
11391 put the death node there. This prevents problems with
11392 call-state tracking in caller-save.c. */
11393 if (REG_NOTE_KIND (note) == REG_DEAD && place == 0 && tem != 0)
11394 {
11395 place
11396 = emit_insn_after (gen_rtx (USE, VOIDmode, XEXP (note, 0)),
11397 tem);
11398
11399 /* If this insn was emitted between blocks, then update
11400 basic_block_head of the current block to include it. */
11401 if (basic_block_end[this_basic_block - 1] == tem)
11402 basic_block_head[this_basic_block] = place;
11403 }
11404 }
11405
11406 /* If the register is set or already dead at PLACE, we needn't do
11407 anything with this note if it is still a REG_DEAD note.
11408
11409 Note that we cannot use just `dead_or_set_p' here since we can
11410 convert an assignment to a register into a bit-field assignment.
11411 Therefore, we must also omit the note if the register is the
11412 target of a bitfield assignment. */
11413
11414 if (place && REG_NOTE_KIND (note) == REG_DEAD)
11415 {
11416 int regno = REGNO (XEXP (note, 0));
11417
11418 if (dead_or_set_p (place, XEXP (note, 0))
11419 || reg_bitfield_target_p (XEXP (note, 0), PATTERN (place)))
11420 {
11421 /* Unless the register previously died in PLACE, clear
11422 reg_last_death. [I no longer understand why this is
11423 being done.] */
11424 if (reg_last_death[regno] != place)
11425 reg_last_death[regno] = 0;
11426 place = 0;
11427 }
11428 else
11429 reg_last_death[regno] = place;
11430
11431 /* If this is a death note for a hard reg that is occupying
11432 multiple registers, ensure that we are still using all
11433 parts of the object. If we find a piece of the object
11434 that is unused, we must add a USE for that piece before
11435 PLACE and put the appropriate REG_DEAD note on it.
11436
11437 An alternative would be to put a REG_UNUSED for the pieces
11438 on the insn that set the register, but that can't be done if
11439 it is not in the same block. It is simpler, though less
11440 efficient, to add the USE insns. */
11441
11442 if (place && regno < FIRST_PSEUDO_REGISTER
11443 && HARD_REGNO_NREGS (regno, GET_MODE (XEXP (note, 0))) > 1)
11444 {
11445 int endregno
11446 = regno + HARD_REGNO_NREGS (regno,
11447 GET_MODE (XEXP (note, 0)));
11448 int all_used = 1;
11449 int i;
11450
11451 for (i = regno; i < endregno; i++)
11452 if (! refers_to_regno_p (i, i + 1, PATTERN (place), 0)
11453 && ! find_regno_fusage (place, USE, i))
11454 {
11455 rtx piece = gen_rtx (REG, reg_raw_mode[i], i);
11456 rtx p;
11457
11458 /* See if we already placed a USE note for this
11459 register in front of PLACE. */
11460 for (p = place;
11461 GET_CODE (PREV_INSN (p)) == INSN
11462 && GET_CODE (PATTERN (PREV_INSN (p))) == USE;
11463 p = PREV_INSN (p))
11464 if (rtx_equal_p (piece,
11465 XEXP (PATTERN (PREV_INSN (p)), 0)))
11466 {
11467 p = 0;
11468 break;
11469 }
11470
11471 if (p)
11472 {
11473 rtx use_insn
11474 = emit_insn_before (gen_rtx (USE, VOIDmode,
11475 piece),
11476 p);
11477 REG_NOTES (use_insn)
11478 = gen_rtx (EXPR_LIST, REG_DEAD, piece,
11479 REG_NOTES (use_insn));
11480 }
11481
11482 all_used = 0;
11483 }
11484
11485 /* Check for the case where the register dying partially
11486 overlaps the register set by this insn. */
11487 if (all_used)
11488 for (i = regno; i < endregno; i++)
11489 if (dead_or_set_regno_p (place, i))
11490 {
11491 all_used = 0;
11492 break;
11493 }
11494
11495 if (! all_used)
11496 {
11497 /* Put only REG_DEAD notes for pieces that are
11498 still used and that are not already dead or set. */
11499
11500 for (i = regno; i < endregno; i++)
11501 {
11502 rtx piece = gen_rtx (REG, reg_raw_mode[i], i);
11503
11504 if ((reg_referenced_p (piece, PATTERN (place))
11505 || (GET_CODE (place) == CALL_INSN
11506 && find_reg_fusage (place, USE, piece)))
11507 && ! dead_or_set_p (place, piece)
11508 && ! reg_bitfield_target_p (piece,
11509 PATTERN (place)))
11510 REG_NOTES (place) = gen_rtx (EXPR_LIST, REG_DEAD,
11511 piece,
11512 REG_NOTES (place));
11513 }
11514
11515 place = 0;
11516 }
11517 }
11518 }
11519 break;
11520
11521 default:
11522 /* Any other notes should not be present at this point in the
11523 compilation. */
11524 abort ();
11525 }
11526
11527 if (place)
11528 {
11529 XEXP (note, 1) = REG_NOTES (place);
11530 REG_NOTES (place) = note;
11531 }
11532 else if ((REG_NOTE_KIND (note) == REG_DEAD
11533 || REG_NOTE_KIND (note) == REG_UNUSED)
11534 && GET_CODE (XEXP (note, 0)) == REG)
11535 REG_N_DEATHS (REGNO (XEXP (note, 0)))--;
11536
11537 if (place2)
11538 {
11539 if ((REG_NOTE_KIND (note) == REG_DEAD
11540 || REG_NOTE_KIND (note) == REG_UNUSED)
11541 && GET_CODE (XEXP (note, 0)) == REG)
11542 REG_N_DEATHS (REGNO (XEXP (note, 0)))++;
11543
11544 REG_NOTES (place2) = gen_rtx (GET_CODE (note), REG_NOTE_KIND (note),
11545 XEXP (note, 0), REG_NOTES (place2));
11546 }
11547 }
11548 }
11549 \f
11550 /* Similarly to above, distribute the LOG_LINKS that used to be present on
11551 I3, I2, and I1 to new locations. This is also called in one case to
11552 add a link pointing at I3 when I3's destination is changed. */
11553
11554 static void
11555 distribute_links (links)
11556 rtx links;
11557 {
11558 rtx link, next_link;
11559
11560 for (link = links; link; link = next_link)
11561 {
11562 rtx place = 0;
11563 rtx insn;
11564 rtx set, reg;
11565
11566 next_link = XEXP (link, 1);
11567
11568 /* If the insn that this link points to is a NOTE or isn't a single
11569 set, ignore it. In the latter case, it isn't clear what we
11570 can do other than ignore the link, since we can't tell which
11571 register it was for. Such links wouldn't be used by combine
11572 anyway.
11573
11574 It is not possible for the destination of the target of the link to
11575 have been changed by combine. The only potential of this is if we
11576 replace I3, I2, and I1 by I3 and I2. But in that case the
11577 destination of I2 also remains unchanged. */
11578
11579 if (GET_CODE (XEXP (link, 0)) == NOTE
11580 || (set = single_set (XEXP (link, 0))) == 0)
11581 continue;
11582
11583 reg = SET_DEST (set);
11584 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
11585 || GET_CODE (reg) == SIGN_EXTRACT
11586 || GET_CODE (reg) == STRICT_LOW_PART)
11587 reg = XEXP (reg, 0);
11588
11589 /* A LOG_LINK is defined as being placed on the first insn that uses
11590 a register and points to the insn that sets the register. Start
11591 searching at the next insn after the target of the link and stop
11592 when we reach a set of the register or the end of the basic block.
11593
11594 Note that this correctly handles the link that used to point from
11595 I3 to I2. Also note that not much searching is typically done here
11596 since most links don't point very far away. */
11597
11598 for (insn = NEXT_INSN (XEXP (link, 0));
11599 (insn && (this_basic_block == n_basic_blocks - 1
11600 || basic_block_head[this_basic_block + 1] != insn));
11601 insn = NEXT_INSN (insn))
11602 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
11603 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
11604 {
11605 if (reg_referenced_p (reg, PATTERN (insn)))
11606 place = insn;
11607 break;
11608 }
11609 else if (GET_CODE (insn) == CALL_INSN
11610 && find_reg_fusage (insn, USE, reg))
11611 {
11612 place = insn;
11613 break;
11614 }
11615
11616 /* If we found a place to put the link, place it there unless there
11617 is already a link to the same insn as LINK at that point. */
11618
11619 if (place)
11620 {
11621 rtx link2;
11622
11623 for (link2 = LOG_LINKS (place); link2; link2 = XEXP (link2, 1))
11624 if (XEXP (link2, 0) == XEXP (link, 0))
11625 break;
11626
11627 if (link2 == 0)
11628 {
11629 XEXP (link, 1) = LOG_LINKS (place);
11630 LOG_LINKS (place) = link;
11631
11632 /* Set added_links_insn to the earliest insn we added a
11633 link to. */
11634 if (added_links_insn == 0
11635 || INSN_CUID (added_links_insn) > INSN_CUID (place))
11636 added_links_insn = place;
11637 }
11638 }
11639 }
11640 }
11641 \f
11642 /* Compute INSN_CUID for INSN, which is an insn made by combine. */
11643
11644 static int
11645 insn_cuid (insn)
11646 rtx insn;
11647 {
11648 while (insn != 0 && INSN_UID (insn) > max_uid_cuid
11649 && GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == USE)
11650 insn = NEXT_INSN (insn);
11651
11652 if (INSN_UID (insn) > max_uid_cuid)
11653 abort ();
11654
11655 return INSN_CUID (insn);
11656 }
11657 \f
11658 void
11659 dump_combine_stats (file)
11660 FILE *file;
11661 {
11662 fprintf
11663 (file,
11664 ";; Combiner statistics: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n\n",
11665 combine_attempts, combine_merges, combine_extras, combine_successes);
11666 }
11667
11668 void
11669 dump_combine_total_stats (file)
11670 FILE *file;
11671 {
11672 fprintf
11673 (file,
11674 "\n;; Combiner totals: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n",
11675 total_attempts, total_merges, total_extras, total_successes);
11676 }
This page took 0.636291 seconds and 4 git commands to generate.