]> gcc.gnu.org Git - gcc.git/blob - gcc/combine.c
Remove trash character introduced by last patch.
[gcc.git] / gcc / combine.c
1 /* Optimize by combining instructions for GNU compiler.
2 Copyright (C) 1987, 1988, 1992 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21 /* This module is essentially the "combiner" phase of the U. of Arizona
22 Portable Optimizer, but redone to work on our list-structured
23 representation for RTL instead of their string representation.
24
25 The LOG_LINKS of each insn identify the most recent assignment
26 to each REG used in the insn. It is a list of previous insns,
27 each of which contains a SET for a REG that is used in this insn
28 and not used or set in between. LOG_LINKs never cross basic blocks.
29 They were set up by the preceding pass (lifetime analysis).
30
31 We try to combine each pair of insns joined by a logical link.
32 We also try to combine triples of insns A, B and C when
33 C has a link back to B and B has a link back to A.
34
35 LOG_LINKS does not have links for use of the CC0. They don't
36 need to, because the insn that sets the CC0 is always immediately
37 before the insn that tests it. So we always regard a branch
38 insn as having a logical link to the preceding insn. The same is true
39 for an insn explicitly using CC0.
40
41 We check (with use_crosses_set_p) to avoid combining in such a way
42 as to move a computation to a place where its value would be different.
43
44 Combination is done by mathematically substituting the previous
45 insn(s) values for the regs they set into the expressions in
46 the later insns that refer to these regs. If the result is a valid insn
47 for our target machine, according to the machine description,
48 we install it, delete the earlier insns, and update the data flow
49 information (LOG_LINKS and REG_NOTES) for what we did.
50
51 There are a few exceptions where the dataflow information created by
52 flow.c aren't completely updated:
53
54 - reg_live_length is not updated
55 - reg_n_refs is not adjusted in the rare case when a register is
56 no longer required in a computation
57 - there are extremely rare cases (see distribute_regnotes) when a
58 REG_DEAD note is lost
59 - a LOG_LINKS entry that refers to an insn with multiple SETs may be
60 removed because there is no way to know which register it was
61 linking
62
63 To simplify substitution, we combine only when the earlier insn(s)
64 consist of only a single assignment. To simplify updating afterward,
65 we never combine when a subroutine call appears in the middle.
66
67 Since we do not represent assignments to CC0 explicitly except when that
68 is all an insn does, there is no LOG_LINKS entry in an insn that uses
69 the condition code for the insn that set the condition code.
70 Fortunately, these two insns must be consecutive.
71 Therefore, every JUMP_INSN is taken to have an implicit logical link
72 to the preceding insn. This is not quite right, since non-jumps can
73 also use the condition code; but in practice such insns would not
74 combine anyway. */
75
76 #include "config.h"
77 #include "gvarargs.h"
78 #include "rtl.h"
79 #include "flags.h"
80 #include "regs.h"
81 #include "expr.h"
82 #include "basic-block.h"
83 #include "insn-config.h"
84 #include "insn-flags.h"
85 #include "insn-codes.h"
86 #include "insn-attr.h"
87 #include "recog.h"
88 #include "real.h"
89 #include <stdio.h>
90
91 /* It is not safe to use ordinary gen_lowpart in combine.
92 Use gen_lowpart_for_combine instead. See comments there. */
93 #define gen_lowpart dont_use_gen_lowpart_you_dummy
94
95 /* If byte loads either zero- or sign- extend, define BYTE_LOADS_EXTEND
96 for cases when we don't care which is true. Define LOAD_EXTEND to
97 be ZERO_EXTEND or SIGN_EXTEND, depending on which was defined. */
98
99 #ifdef BYTE_LOADS_ZERO_EXTEND
100 #define BYTE_LOADS_EXTEND
101 #define LOAD_EXTEND ZERO_EXTEND
102 #endif
103
104 #ifdef BYTE_LOADS_SIGN_EXTEND
105 #define BYTE_LOADS_EXTEND
106 #define LOAD_EXTEND SIGN_EXTEND
107 #endif
108
109 /* Number of attempts to combine instructions in this function. */
110
111 static int combine_attempts;
112
113 /* Number of attempts that got as far as substitution in this function. */
114
115 static int combine_merges;
116
117 /* Number of instructions combined with added SETs in this function. */
118
119 static int combine_extras;
120
121 /* Number of instructions combined in this function. */
122
123 static int combine_successes;
124
125 /* Totals over entire compilation. */
126
127 static int total_attempts, total_merges, total_extras, total_successes;
128 \f
129 /* Vector mapping INSN_UIDs to cuids.
130 The cuids are like uids but increase monotonically always.
131 Combine always uses cuids so that it can compare them.
132 But actually renumbering the uids, which we used to do,
133 proves to be a bad idea because it makes it hard to compare
134 the dumps produced by earlier passes with those from later passes. */
135
136 static int *uid_cuid;
137
138 /* Get the cuid of an insn. */
139
140 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
141
142 /* Maximum register number, which is the size of the tables below. */
143
144 static int combine_max_regno;
145
146 /* Record last point of death of (hard or pseudo) register n. */
147
148 static rtx *reg_last_death;
149
150 /* Record last point of modification of (hard or pseudo) register n. */
151
152 static rtx *reg_last_set;
153
154 /* Record the cuid of the last insn that invalidated memory
155 (anything that writes memory, and subroutine calls, but not pushes). */
156
157 static int mem_last_set;
158
159 /* Record the cuid of the last CALL_INSN
160 so we can tell whether a potential combination crosses any calls. */
161
162 static int last_call_cuid;
163
164 /* When `subst' is called, this is the insn that is being modified
165 (by combining in a previous insn). The PATTERN of this insn
166 is still the old pattern partially modified and it should not be
167 looked at, but this may be used to examine the successors of the insn
168 to judge whether a simplification is valid. */
169
170 static rtx subst_insn;
171
172 /* This is the lowest CUID that `subst' is currently dealing with.
173 get_last_value will not return a value if the register was set at or
174 after this CUID. If not for this mechanism, we could get confused if
175 I2 or I1 in try_combine were an insn that used the old value of a register
176 to obtain a new value. In that case, we might erroneously get the
177 new value of the register when we wanted the old one. */
178
179 static int subst_low_cuid;
180
181 /* This is the value of undobuf.num_undo when we started processing this
182 substitution. This will prevent gen_rtx_combine from re-used a piece
183 from the previous expression. Doing so can produce circular rtl
184 structures. */
185
186 static int previous_num_undos;
187 \f
188 /* The next group of arrays allows the recording of the last value assigned
189 to (hard or pseudo) register n. We use this information to see if a
190 operation being processed is redundant given a prior operation performed
191 on the register. For example, an `and' with a constant is redundant if
192 all the zero bits are already known to be turned off.
193
194 We use an approach similar to that used by cse, but change it in the
195 following ways:
196
197 (1) We do not want to reinitialize at each label.
198 (2) It is useful, but not critical, to know the actual value assigned
199 to a register. Often just its form is helpful.
200
201 Therefore, we maintain the following arrays:
202
203 reg_last_set_value the last value assigned
204 reg_last_set_label records the value of label_tick when the
205 register was assigned
206 reg_last_set_table_tick records the value of label_tick when a
207 value using the register is assigned
208 reg_last_set_invalid set to non-zero when it is not valid
209 to use the value of this register in some
210 register's value
211
212 To understand the usage of these tables, it is important to understand
213 the distinction between the value in reg_last_set_value being valid
214 and the register being validly contained in some other expression in the
215 table.
216
217 Entry I in reg_last_set_value is valid if it is non-zero, and either
218 reg_n_sets[i] is 1 or reg_last_set_label[i] == label_tick.
219
220 Register I may validly appear in any expression returned for the value
221 of another register if reg_n_sets[i] is 1. It may also appear in the
222 value for register J if reg_last_set_label[i] < reg_last_set_label[j] or
223 reg_last_set_invalid[j] is zero.
224
225 If an expression is found in the table containing a register which may
226 not validly appear in an expression, the register is replaced by
227 something that won't match, (clobber (const_int 0)).
228
229 reg_last_set_invalid[i] is set non-zero when register I is being assigned
230 to and reg_last_set_table_tick[i] == label_tick. */
231
232 /* Record last value assigned to (hard or pseudo) register n. */
233
234 static rtx *reg_last_set_value;
235
236 /* Record the value of label_tick when the value for register n is placed in
237 reg_last_set_value[n]. */
238
239 static short *reg_last_set_label;
240
241 /* Record the value of label_tick when an expression involving register n
242 is placed in reg_last_set_value. */
243
244 static short *reg_last_set_table_tick;
245
246 /* Set non-zero if references to register n in expressions should not be
247 used. */
248
249 static char *reg_last_set_invalid;
250
251 /* Incremented for each label. */
252
253 static short label_tick;
254
255 /* Some registers that are set more than once and used in more than one
256 basic block are nevertheless always set in similar ways. For example,
257 a QImode register may be loaded from memory in two places on a machine
258 where byte loads zero extend.
259
260 We record in the following array what we know about the nonzero
261 bits of a register, specifically which bits are known to be zero.
262
263 If an entry is zero, it means that we don't know anything special. */
264
265 static HOST_WIDE_INT *reg_nonzero_bits;
266
267 /* Mode used to compute significance in reg_nonzero_bits. It is the largest
268 integer mode that can fit in HOST_BITS_PER_WIDE_INT. */
269
270 static enum machine_mode nonzero_bits_mode;
271
272 /* Nonzero if we know that a register has some leading bits that are always
273 equal to the sign bit. */
274
275 static char *reg_sign_bit_copies;
276
277 /* Nonzero when reg_nonzero_bits and reg_sign_bit_copies can be safely used.
278 It is zero while computing them and after combine has completed. This
279 former test prevents propagating values based on previously set values,
280 which can be incorrect if a variable is modified in a loop. */
281
282 static int nonzero_sign_valid;
283 \f
284 /* Record one modification to rtl structure
285 to be undone by storing old_contents into *where.
286 is_int is 1 if the contents are an int. */
287
288 struct undo
289 {
290 int is_int;
291 union {rtx rtx; int i;} old_contents;
292 union {rtx *rtx; int *i;} where;
293 };
294
295 /* Record a bunch of changes to be undone, up to MAX_UNDO of them.
296 num_undo says how many are currently recorded.
297
298 storage is nonzero if we must undo the allocation of new storage.
299 The value of storage is what to pass to obfree.
300
301 other_insn is nonzero if we have modified some other insn in the process
302 of working on subst_insn. It must be verified too. */
303
304 #define MAX_UNDO 50
305
306 struct undobuf
307 {
308 int num_undo;
309 char *storage;
310 struct undo undo[MAX_UNDO];
311 rtx other_insn;
312 };
313
314 static struct undobuf undobuf;
315
316 /* Substitute NEWVAL, an rtx expression, into INTO, a place in some
317 insn. The substitution can be undone by undo_all. If INTO is already
318 set to NEWVAL, do not record this change. Because computing NEWVAL might
319 also call SUBST, we have to compute it before we put anything into
320 the undo table. */
321
322 #define SUBST(INTO, NEWVAL) \
323 do { rtx _new = (NEWVAL); \
324 if (undobuf.num_undo < MAX_UNDO) \
325 { \
326 undobuf.undo[undobuf.num_undo].is_int = 0; \
327 undobuf.undo[undobuf.num_undo].where.rtx = &INTO; \
328 undobuf.undo[undobuf.num_undo].old_contents.rtx = INTO; \
329 INTO = _new; \
330 if (undobuf.undo[undobuf.num_undo].old_contents.rtx != INTO) \
331 undobuf.num_undo++; \
332 } \
333 } while (0)
334
335 /* Similar to SUBST, but NEWVAL is an int. INTO will normally be an XINT
336 expression.
337 Note that substitution for the value of a CONST_INT is not safe. */
338
339 #define SUBST_INT(INTO, NEWVAL) \
340 do { if (undobuf.num_undo < MAX_UNDO) \
341 { \
342 undobuf.undo[undobuf.num_undo].is_int = 1; \
343 undobuf.undo[undobuf.num_undo].where.i = (int *) &INTO; \
344 undobuf.undo[undobuf.num_undo].old_contents.i = INTO; \
345 INTO = NEWVAL; \
346 if (undobuf.undo[undobuf.num_undo].old_contents.i != INTO) \
347 undobuf.num_undo++; \
348 } \
349 } while (0)
350
351 /* Number of times the pseudo being substituted for
352 was found and replaced. */
353
354 static int n_occurrences;
355
356 static void set_nonzero_bits_and_sign_copies ();
357 static void setup_incoming_promotions ();
358 static void move_deaths ();
359 rtx remove_death ();
360 static void record_value_for_reg ();
361 static void record_dead_and_set_regs ();
362 static int use_crosses_set_p ();
363 static rtx try_combine ();
364 static rtx *find_split_point ();
365 static rtx subst ();
366 static void undo_all ();
367 static int reg_dead_at_p ();
368 static rtx expand_compound_operation ();
369 static rtx expand_field_assignment ();
370 static rtx make_extraction ();
371 static int get_pos_from_mask ();
372 static rtx force_to_mode ();
373 static rtx known_cond ();
374 static rtx make_field_assignment ();
375 static rtx make_compound_operation ();
376 static rtx apply_distributive_law ();
377 static rtx simplify_and_const_int ();
378 static unsigned HOST_WIDE_INT nonzero_bits ();
379 static int num_sign_bit_copies ();
380 static int merge_outer_ops ();
381 static rtx simplify_shift_const ();
382 static int recog_for_combine ();
383 static rtx gen_lowpart_for_combine ();
384 static rtx gen_rtx_combine ();
385 static rtx gen_binary ();
386 static rtx gen_unary ();
387 static enum rtx_code simplify_comparison ();
388 static int reversible_comparison_p ();
389 static int get_last_value_validate ();
390 static rtx get_last_value ();
391 static void distribute_notes ();
392 static void distribute_links ();
393 \f
394 /* Main entry point for combiner. F is the first insn of the function.
395 NREGS is the first unused pseudo-reg number. */
396
397 void
398 combine_instructions (f, nregs)
399 rtx f;
400 int nregs;
401 {
402 register rtx insn, next, prev;
403 register int i;
404 register rtx links, nextlinks;
405
406 combine_attempts = 0;
407 combine_merges = 0;
408 combine_extras = 0;
409 combine_successes = 0;
410 undobuf.num_undo = previous_num_undos = 0;
411
412 combine_max_regno = nregs;
413
414 reg_last_death = (rtx *) alloca (nregs * sizeof (rtx));
415 reg_last_set = (rtx *) alloca (nregs * sizeof (rtx));
416 reg_last_set_value = (rtx *) alloca (nregs * sizeof (rtx));
417 reg_last_set_table_tick = (short *) alloca (nregs * sizeof (short));
418 reg_last_set_label = (short *) alloca (nregs * sizeof (short));
419 reg_last_set_invalid = (char *) alloca (nregs * sizeof (char));
420 reg_nonzero_bits = (HOST_WIDE_INT *) alloca (nregs * sizeof (HOST_WIDE_INT));
421 reg_sign_bit_copies = (char *) alloca (nregs * sizeof (char));
422
423 bzero (reg_last_death, nregs * sizeof (rtx));
424 bzero (reg_last_set, nregs * sizeof (rtx));
425 bzero (reg_last_set_value, nregs * sizeof (rtx));
426 bzero (reg_last_set_table_tick, nregs * sizeof (short));
427 bzero (reg_last_set_label, nregs * sizeof (short));
428 bzero (reg_last_set_invalid, nregs * sizeof (char));
429 bzero (reg_nonzero_bits, nregs * sizeof (HOST_WIDE_INT));
430 bzero (reg_sign_bit_copies, nregs * sizeof (char));
431
432 init_recog_no_volatile ();
433
434 /* Compute maximum uid value so uid_cuid can be allocated. */
435
436 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
437 if (INSN_UID (insn) > i)
438 i = INSN_UID (insn);
439
440 uid_cuid = (int *) alloca ((i + 1) * sizeof (int));
441
442 nonzero_bits_mode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
443
444 /* Don't use reg_nonzero_bits when computing it. This can cause problems
445 when, for example, we have j <<= 1 in a loop. */
446
447 nonzero_sign_valid = 0;
448
449 /* Compute the mapping from uids to cuids.
450 Cuids are numbers assigned to insns, like uids,
451 except that cuids increase monotonically through the code.
452
453 Scan all SETs and see if we can deduce anything about what
454 bits are known to be zero for some registers and how many copies
455 of the sign bit are known to exist for those registers.
456
457 Also set any known values so that we can use it while searching
458 for what bits are known to be set. */
459
460 label_tick = 1;
461
462 setup_incoming_promotions ();
463
464 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
465 {
466 INSN_CUID (insn) = ++i;
467 subst_low_cuid = i;
468 subst_insn = insn;
469
470 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
471 {
472 note_stores (PATTERN (insn), set_nonzero_bits_and_sign_copies);
473 record_dead_and_set_regs (insn);
474 }
475
476 if (GET_CODE (insn) == CODE_LABEL)
477 label_tick++;
478 }
479
480 nonzero_sign_valid = 1;
481
482 /* Now scan all the insns in forward order. */
483
484 label_tick = 1;
485 last_call_cuid = 0;
486 mem_last_set = 0;
487 bzero (reg_last_death, nregs * sizeof (rtx));
488 bzero (reg_last_set, nregs * sizeof (rtx));
489 bzero (reg_last_set_value, nregs * sizeof (rtx));
490 bzero (reg_last_set_table_tick, nregs * sizeof (short));
491 bzero (reg_last_set_label, nregs * sizeof (short));
492 bzero (reg_last_set_invalid, nregs * sizeof (char));
493
494 setup_incoming_promotions ();
495
496 for (insn = f; insn; insn = next ? next : NEXT_INSN (insn))
497 {
498 next = 0;
499
500 if (GET_CODE (insn) == CODE_LABEL)
501 label_tick++;
502
503 else if (GET_CODE (insn) == INSN
504 || GET_CODE (insn) == CALL_INSN
505 || GET_CODE (insn) == JUMP_INSN)
506 {
507 /* Try this insn with each insn it links back to. */
508
509 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
510 if ((next = try_combine (insn, XEXP (links, 0), NULL_RTX)) != 0)
511 goto retry;
512
513 /* Try each sequence of three linked insns ending with this one. */
514
515 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
516 for (nextlinks = LOG_LINKS (XEXP (links, 0)); nextlinks;
517 nextlinks = XEXP (nextlinks, 1))
518 if ((next = try_combine (insn, XEXP (links, 0),
519 XEXP (nextlinks, 0))) != 0)
520 goto retry;
521
522 #ifdef HAVE_cc0
523 /* Try to combine a jump insn that uses CC0
524 with a preceding insn that sets CC0, and maybe with its
525 logical predecessor as well.
526 This is how we make decrement-and-branch insns.
527 We need this special code because data flow connections
528 via CC0 do not get entered in LOG_LINKS. */
529
530 if (GET_CODE (insn) == JUMP_INSN
531 && (prev = prev_nonnote_insn (insn)) != 0
532 && GET_CODE (prev) == INSN
533 && sets_cc0_p (PATTERN (prev)))
534 {
535 if ((next = try_combine (insn, prev, NULL_RTX)) != 0)
536 goto retry;
537
538 for (nextlinks = LOG_LINKS (prev); nextlinks;
539 nextlinks = XEXP (nextlinks, 1))
540 if ((next = try_combine (insn, prev,
541 XEXP (nextlinks, 0))) != 0)
542 goto retry;
543 }
544
545 /* Do the same for an insn that explicitly references CC0. */
546 if (GET_CODE (insn) == INSN
547 && (prev = prev_nonnote_insn (insn)) != 0
548 && GET_CODE (prev) == INSN
549 && sets_cc0_p (PATTERN (prev))
550 && GET_CODE (PATTERN (insn)) == SET
551 && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (insn))))
552 {
553 if ((next = try_combine (insn, prev, NULL_RTX)) != 0)
554 goto retry;
555
556 for (nextlinks = LOG_LINKS (prev); nextlinks;
557 nextlinks = XEXP (nextlinks, 1))
558 if ((next = try_combine (insn, prev,
559 XEXP (nextlinks, 0))) != 0)
560 goto retry;
561 }
562
563 /* Finally, see if any of the insns that this insn links to
564 explicitly references CC0. If so, try this insn, that insn,
565 and its predecessor if it sets CC0. */
566 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
567 if (GET_CODE (XEXP (links, 0)) == INSN
568 && GET_CODE (PATTERN (XEXP (links, 0))) == SET
569 && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (XEXP (links, 0))))
570 && (prev = prev_nonnote_insn (XEXP (links, 0))) != 0
571 && GET_CODE (prev) == INSN
572 && sets_cc0_p (PATTERN (prev))
573 && (next = try_combine (insn, XEXP (links, 0), prev)) != 0)
574 goto retry;
575 #endif
576
577 /* Try combining an insn with two different insns whose results it
578 uses. */
579 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
580 for (nextlinks = XEXP (links, 1); nextlinks;
581 nextlinks = XEXP (nextlinks, 1))
582 if ((next = try_combine (insn, XEXP (links, 0),
583 XEXP (nextlinks, 0))) != 0)
584 goto retry;
585
586 if (GET_CODE (insn) != NOTE)
587 record_dead_and_set_regs (insn);
588
589 retry:
590 ;
591 }
592 }
593
594 total_attempts += combine_attempts;
595 total_merges += combine_merges;
596 total_extras += combine_extras;
597 total_successes += combine_successes;
598
599 nonzero_sign_valid = 0;
600 }
601 \f
602 /* Set up any promoted values for incoming argument registers. */
603
604 static void
605 setup_incoming_promotions ()
606 {
607 #ifdef PROMOTE_FUNCTION_ARGS
608 int regno;
609 rtx reg;
610 enum machine_mode mode;
611 int unsignedp;
612 rtx first = get_insns ();
613
614 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
615 if (FUNCTION_ARG_REGNO_P (regno)
616 && (reg = promoted_input_arg (regno, &mode, &unsignedp)) != 0)
617 record_value_for_reg (reg, first,
618 gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
619 GET_MODE (reg),
620 gen_rtx (CLOBBER, mode, const0_rtx)));
621 #endif
622 }
623 \f
624 /* Called via note_stores. If X is a pseudo that is used in more than
625 one basic block, is narrower that HOST_BITS_PER_WIDE_INT, and is being
626 set, record what bits are known zero. If we are clobbering X,
627 ignore this "set" because the clobbered value won't be used.
628
629 If we are setting only a portion of X and we can't figure out what
630 portion, assume all bits will be used since we don't know what will
631 be happening.
632
633 Similarly, set how many bits of X are known to be copies of the sign bit
634 at all locations in the function. This is the smallest number implied
635 by any set of X. */
636
637 static void
638 set_nonzero_bits_and_sign_copies (x, set)
639 rtx x;
640 rtx set;
641 {
642 int num;
643
644 if (GET_CODE (x) == REG
645 && REGNO (x) >= FIRST_PSEUDO_REGISTER
646 && reg_n_sets[REGNO (x)] > 1
647 && reg_basic_block[REGNO (x)] < 0
648 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
649 {
650 if (GET_CODE (set) == CLOBBER)
651 return;
652
653 /* If this is a complex assignment, see if we can convert it into a
654 simple assignment. */
655 set = expand_field_assignment (set);
656
657 /* If this is a simple assignment, or we have a paradoxical SUBREG,
658 set what we know about X. */
659
660 if (SET_DEST (set) == x
661 || (GET_CODE (SET_DEST (set)) == SUBREG
662 && (GET_MODE_SIZE (GET_MODE (SET_DEST (set)))
663 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (set)))))
664 && SUBREG_REG (SET_DEST (set)) == x))
665 {
666 reg_nonzero_bits[REGNO (x)]
667 |= nonzero_bits (SET_SRC (set), nonzero_bits_mode);
668 num = num_sign_bit_copies (SET_SRC (set), GET_MODE (x));
669 if (reg_sign_bit_copies[REGNO (x)] == 0
670 || reg_sign_bit_copies[REGNO (x)] > num)
671 reg_sign_bit_copies[REGNO (x)] = num;
672 }
673 else
674 {
675 reg_nonzero_bits[REGNO (x)] = GET_MODE_MASK (GET_MODE (x));
676 reg_sign_bit_copies[REGNO (x)] = 0;
677 }
678 }
679 }
680 \f
681 /* See if INSN can be combined into I3. PRED and SUCC are optionally
682 insns that were previously combined into I3 or that will be combined
683 into the merger of INSN and I3.
684
685 Return 0 if the combination is not allowed for any reason.
686
687 If the combination is allowed, *PDEST will be set to the single
688 destination of INSN and *PSRC to the single source, and this function
689 will return 1. */
690
691 static int
692 can_combine_p (insn, i3, pred, succ, pdest, psrc)
693 rtx insn;
694 rtx i3;
695 rtx pred, succ;
696 rtx *pdest, *psrc;
697 {
698 int i;
699 rtx set = 0, src, dest;
700 rtx p, link;
701 int all_adjacent = (succ ? (next_active_insn (insn) == succ
702 && next_active_insn (succ) == i3)
703 : next_active_insn (insn) == i3);
704
705 /* Can combine only if previous insn is a SET of a REG, a SUBREG or CC0.
706 or a PARALLEL consisting of such a SET and CLOBBERs.
707
708 If INSN has CLOBBER parallel parts, ignore them for our processing.
709 By definition, these happen during the execution of the insn. When it
710 is merged with another insn, all bets are off. If they are, in fact,
711 needed and aren't also supplied in I3, they may be added by
712 recog_for_combine. Otherwise, it won't match.
713
714 We can also ignore a SET whose SET_DEST is mentioned in a REG_UNUSED
715 note.
716
717 Get the source and destination of INSN. If more than one, can't
718 combine. */
719
720 if (GET_CODE (PATTERN (insn)) == SET)
721 set = PATTERN (insn);
722 else if (GET_CODE (PATTERN (insn)) == PARALLEL
723 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
724 {
725 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
726 {
727 rtx elt = XVECEXP (PATTERN (insn), 0, i);
728
729 switch (GET_CODE (elt))
730 {
731 /* We can ignore CLOBBERs. */
732 case CLOBBER:
733 break;
734
735 case SET:
736 /* Ignore SETs whose result isn't used but not those that
737 have side-effects. */
738 if (find_reg_note (insn, REG_UNUSED, SET_DEST (elt))
739 && ! side_effects_p (elt))
740 break;
741
742 /* If we have already found a SET, this is a second one and
743 so we cannot combine with this insn. */
744 if (set)
745 return 0;
746
747 set = elt;
748 break;
749
750 default:
751 /* Anything else means we can't combine. */
752 return 0;
753 }
754 }
755
756 if (set == 0
757 /* If SET_SRC is an ASM_OPERANDS we can't throw away these CLOBBERs,
758 so don't do anything with it. */
759 || GET_CODE (SET_SRC (set)) == ASM_OPERANDS)
760 return 0;
761 }
762 else
763 return 0;
764
765 if (set == 0)
766 return 0;
767
768 set = expand_field_assignment (set);
769 src = SET_SRC (set), dest = SET_DEST (set);
770
771 /* Don't eliminate a store in the stack pointer. */
772 if (dest == stack_pointer_rtx
773 /* Don't install a subreg involving two modes not tieable.
774 It can worsen register allocation, and can even make invalid reload
775 insns, since the reg inside may need to be copied from in the
776 outside mode, and that may be invalid if it is an fp reg copied in
777 integer mode. As a special exception, we can allow this if
778 I3 is simply copying DEST, a REG, to CC0. */
779 || (GET_CODE (src) == SUBREG
780 && ! MODES_TIEABLE_P (GET_MODE (src), GET_MODE (SUBREG_REG (src)))
781 #ifdef HAVE_cc0
782 && ! (GET_CODE (i3) == INSN && GET_CODE (PATTERN (i3)) == SET
783 && SET_DEST (PATTERN (i3)) == cc0_rtx
784 && GET_CODE (dest) == REG && dest == SET_SRC (PATTERN (i3)))
785 #endif
786 )
787 /* If we couldn't eliminate a field assignment, we can't combine. */
788 || GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == STRICT_LOW_PART
789 /* Don't combine with an insn that sets a register to itself if it has
790 a REG_EQUAL note. This may be part of a REG_NO_CONFLICT sequence. */
791 || (rtx_equal_p (src, dest) && find_reg_note (insn, REG_EQUAL, NULL_RTX))
792 /* Can't merge a function call. */
793 || GET_CODE (src) == CALL
794 /* Don't substitute into an incremented register. */
795 || FIND_REG_INC_NOTE (i3, dest)
796 || (succ && FIND_REG_INC_NOTE (succ, dest))
797 /* Don't combine the end of a libcall into anything. */
798 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
799 /* Make sure that DEST is not used after SUCC but before I3. */
800 || (succ && ! all_adjacent
801 && reg_used_between_p (dest, succ, i3))
802 /* Make sure that the value that is to be substituted for the register
803 does not use any registers whose values alter in between. However,
804 If the insns are adjacent, a use can't cross a set even though we
805 think it might (this can happen for a sequence of insns each setting
806 the same destination; reg_last_set of that register might point to
807 a NOTE). Also, don't move a volatile asm across any other insns. */
808 || (! all_adjacent
809 && (use_crosses_set_p (src, INSN_CUID (insn))
810 || (GET_CODE (src) == ASM_OPERANDS && MEM_VOLATILE_P (src))))
811 /* If there is a REG_NO_CONFLICT note for DEST in I3 or SUCC, we get
812 better register allocation by not doing the combine. */
813 || find_reg_note (i3, REG_NO_CONFLICT, dest)
814 || (succ && find_reg_note (succ, REG_NO_CONFLICT, dest))
815 /* Don't combine across a CALL_INSN, because that would possibly
816 change whether the life span of some REGs crosses calls or not,
817 and it is a pain to update that information.
818 Exception: if source is a constant, moving it later can't hurt.
819 Accept that special case, because it helps -fforce-addr a lot. */
820 || (INSN_CUID (insn) < last_call_cuid && ! CONSTANT_P (src)))
821 return 0;
822
823 /* DEST must either be a REG or CC0. */
824 if (GET_CODE (dest) == REG)
825 {
826 /* If register alignment is being enforced for multi-word items in all
827 cases except for parameters, it is possible to have a register copy
828 insn referencing a hard register that is not allowed to contain the
829 mode being copied and which would not be valid as an operand of most
830 insns. Eliminate this problem by not combining with such an insn.
831
832 Also, on some machines we don't want to extend the life of a hard
833 register. */
834
835 if (GET_CODE (src) == REG
836 && ((REGNO (dest) < FIRST_PSEUDO_REGISTER
837 && ! HARD_REGNO_MODE_OK (REGNO (dest), GET_MODE (dest)))
838 #ifdef SMALL_REGISTER_CLASSES
839 /* Don't extend the life of a hard register. */
840 || REGNO (src) < FIRST_PSEUDO_REGISTER
841 #else
842 || (REGNO (src) < FIRST_PSEUDO_REGISTER
843 && ! HARD_REGNO_MODE_OK (REGNO (src), GET_MODE (src)))
844 #endif
845 ))
846 return 0;
847 }
848 else if (GET_CODE (dest) != CC0)
849 return 0;
850
851 /* Don't substitute for a register intended as a clobberable operand.
852 Similarly, don't substitute an expression containing a register that
853 will be clobbered in I3. */
854 if (GET_CODE (PATTERN (i3)) == PARALLEL)
855 for (i = XVECLEN (PATTERN (i3), 0) - 1; i >= 0; i--)
856 if (GET_CODE (XVECEXP (PATTERN (i3), 0, i)) == CLOBBER
857 && (reg_overlap_mentioned_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0),
858 src)
859 || rtx_equal_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0), dest)))
860 return 0;
861
862 /* If INSN contains anything volatile, or is an `asm' (whether volatile
863 or not), reject, unless nothing volatile comes between it and I3,
864 with the exception of SUCC. */
865
866 if (GET_CODE (src) == ASM_OPERANDS || volatile_refs_p (src))
867 for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
868 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
869 && p != succ && volatile_refs_p (PATTERN (p)))
870 return 0;
871
872 /* If INSN or I2 contains an autoincrement or autodecrement,
873 make sure that register is not used between there and I3,
874 and not already used in I3 either.
875 Also insist that I3 not be a jump; if it were one
876 and the incremented register were spilled, we would lose. */
877
878 #ifdef AUTO_INC_DEC
879 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
880 if (REG_NOTE_KIND (link) == REG_INC
881 && (GET_CODE (i3) == JUMP_INSN
882 || reg_used_between_p (XEXP (link, 0), insn, i3)
883 || reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i3))))
884 return 0;
885 #endif
886
887 #ifdef HAVE_cc0
888 /* Don't combine an insn that follows a CC0-setting insn.
889 An insn that uses CC0 must not be separated from the one that sets it.
890 We do, however, allow I2 to follow a CC0-setting insn if that insn
891 is passed as I1; in that case it will be deleted also.
892 We also allow combining in this case if all the insns are adjacent
893 because that would leave the two CC0 insns adjacent as well.
894 It would be more logical to test whether CC0 occurs inside I1 or I2,
895 but that would be much slower, and this ought to be equivalent. */
896
897 p = prev_nonnote_insn (insn);
898 if (p && p != pred && GET_CODE (p) == INSN && sets_cc0_p (PATTERN (p))
899 && ! all_adjacent)
900 return 0;
901 #endif
902
903 /* If we get here, we have passed all the tests and the combination is
904 to be allowed. */
905
906 *pdest = dest;
907 *psrc = src;
908
909 return 1;
910 }
911 \f
912 /* LOC is the location within I3 that contains its pattern or the component
913 of a PARALLEL of the pattern. We validate that it is valid for combining.
914
915 One problem is if I3 modifies its output, as opposed to replacing it
916 entirely, we can't allow the output to contain I2DEST or I1DEST as doing
917 so would produce an insn that is not equivalent to the original insns.
918
919 Consider:
920
921 (set (reg:DI 101) (reg:DI 100))
922 (set (subreg:SI (reg:DI 101) 0) <foo>)
923
924 This is NOT equivalent to:
925
926 (parallel [(set (subreg:SI (reg:DI 100) 0) <foo>)
927 (set (reg:DI 101) (reg:DI 100))])
928
929 Not only does this modify 100 (in which case it might still be valid
930 if 100 were dead in I2), it sets 101 to the ORIGINAL value of 100.
931
932 We can also run into a problem if I2 sets a register that I1
933 uses and I1 gets directly substituted into I3 (not via I2). In that
934 case, we would be getting the wrong value of I2DEST into I3, so we
935 must reject the combination. This case occurs when I2 and I1 both
936 feed into I3, rather than when I1 feeds into I2, which feeds into I3.
937 If I1_NOT_IN_SRC is non-zero, it means that finding I1 in the source
938 of a SET must prevent combination from occurring.
939
940 On machines where SMALL_REGISTER_CLASSES is defined, we don't combine
941 if the destination of a SET is a hard register.
942
943 Before doing the above check, we first try to expand a field assignment
944 into a set of logical operations.
945
946 If PI3_DEST_KILLED is non-zero, it is a pointer to a location in which
947 we place a register that is both set and used within I3. If more than one
948 such register is detected, we fail.
949
950 Return 1 if the combination is valid, zero otherwise. */
951
952 static int
953 combinable_i3pat (i3, loc, i2dest, i1dest, i1_not_in_src, pi3dest_killed)
954 rtx i3;
955 rtx *loc;
956 rtx i2dest;
957 rtx i1dest;
958 int i1_not_in_src;
959 rtx *pi3dest_killed;
960 {
961 rtx x = *loc;
962
963 if (GET_CODE (x) == SET)
964 {
965 rtx set = expand_field_assignment (x);
966 rtx dest = SET_DEST (set);
967 rtx src = SET_SRC (set);
968 rtx inner_dest = dest, inner_src = src;
969
970 SUBST (*loc, set);
971
972 while (GET_CODE (inner_dest) == STRICT_LOW_PART
973 || GET_CODE (inner_dest) == SUBREG
974 || GET_CODE (inner_dest) == ZERO_EXTRACT)
975 inner_dest = XEXP (inner_dest, 0);
976
977 /* We probably don't need this any more now that LIMIT_RELOAD_CLASS
978 was added. */
979 #if 0
980 while (GET_CODE (inner_src) == STRICT_LOW_PART
981 || GET_CODE (inner_src) == SUBREG
982 || GET_CODE (inner_src) == ZERO_EXTRACT)
983 inner_src = XEXP (inner_src, 0);
984
985 /* If it is better that two different modes keep two different pseudos,
986 avoid combining them. This avoids producing the following pattern
987 on a 386:
988 (set (subreg:SI (reg/v:QI 21) 0)
989 (lshiftrt:SI (reg/v:SI 20)
990 (const_int 24)))
991 If that were made, reload could not handle the pair of
992 reg 20/21, since it would try to get any GENERAL_REGS
993 but some of them don't handle QImode. */
994
995 if (rtx_equal_p (inner_src, i2dest)
996 && GET_CODE (inner_dest) == REG
997 && ! MODES_TIEABLE_P (GET_MODE (i2dest), GET_MODE (inner_dest)))
998 return 0;
999 #endif
1000
1001 /* Check for the case where I3 modifies its output, as
1002 discussed above. */
1003 if ((inner_dest != dest
1004 && (reg_overlap_mentioned_p (i2dest, inner_dest)
1005 || (i1dest && reg_overlap_mentioned_p (i1dest, inner_dest))))
1006 /* This is the same test done in can_combine_p except that we
1007 allow a hard register with SMALL_REGISTER_CLASSES if SRC is a
1008 CALL operation. */
1009 || (GET_CODE (inner_dest) == REG
1010 && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER
1011 #ifdef SMALL_REGISTER_CLASSES
1012 && GET_CODE (src) != CALL
1013 #else
1014 && ! HARD_REGNO_MODE_OK (REGNO (inner_dest),
1015 GET_MODE (inner_dest))
1016 #endif
1017 )
1018
1019 || (i1_not_in_src && reg_overlap_mentioned_p (i1dest, src)))
1020 return 0;
1021
1022 /* If DEST is used in I3, it is being killed in this insn,
1023 so record that for later. */
1024 if (pi3dest_killed && GET_CODE (dest) == REG
1025 && reg_referenced_p (dest, PATTERN (i3)))
1026 {
1027 if (*pi3dest_killed)
1028 return 0;
1029
1030 *pi3dest_killed = dest;
1031 }
1032 }
1033
1034 else if (GET_CODE (x) == PARALLEL)
1035 {
1036 int i;
1037
1038 for (i = 0; i < XVECLEN (x, 0); i++)
1039 if (! combinable_i3pat (i3, &XVECEXP (x, 0, i), i2dest, i1dest,
1040 i1_not_in_src, pi3dest_killed))
1041 return 0;
1042 }
1043
1044 return 1;
1045 }
1046 \f
1047 /* Try to combine the insns I1 and I2 into I3.
1048 Here I1 and I2 appear earlier than I3.
1049 I1 can be zero; then we combine just I2 into I3.
1050
1051 It we are combining three insns and the resulting insn is not recognized,
1052 try splitting it into two insns. If that happens, I2 and I3 are retained
1053 and I1 is pseudo-deleted by turning it into a NOTE. Otherwise, I1 and I2
1054 are pseudo-deleted.
1055
1056 If we created two insns, return I2; otherwise return I3.
1057 Return 0 if the combination does not work. Then nothing is changed. */
1058
1059 static rtx
1060 try_combine (i3, i2, i1)
1061 register rtx i3, i2, i1;
1062 {
1063 /* New patterns for I3 and I3, respectively. */
1064 rtx newpat, newi2pat = 0;
1065 /* Indicates need to preserve SET in I1 or I2 in I3 if it is not dead. */
1066 int added_sets_1, added_sets_2;
1067 /* Total number of SETs to put into I3. */
1068 int total_sets;
1069 /* Nonzero is I2's body now appears in I3. */
1070 int i2_is_used;
1071 /* INSN_CODEs for new I3, new I2, and user of condition code. */
1072 int insn_code_number, i2_code_number, other_code_number;
1073 /* Contains I3 if the destination of I3 is used in its source, which means
1074 that the old life of I3 is being killed. If that usage is placed into
1075 I2 and not in I3, a REG_DEAD note must be made. */
1076 rtx i3dest_killed = 0;
1077 /* SET_DEST and SET_SRC of I2 and I1. */
1078 rtx i2dest, i2src, i1dest = 0, i1src = 0;
1079 /* PATTERN (I2), or a copy of it in certain cases. */
1080 rtx i2pat;
1081 /* Indicates if I2DEST or I1DEST is in I2SRC or I1_SRC. */
1082 int i2dest_in_i2src, i1dest_in_i1src = 0, i2dest_in_i1src = 0;
1083 int i1_feeds_i3 = 0;
1084 /* Notes that must be added to REG_NOTES in I3 and I2. */
1085 rtx new_i3_notes, new_i2_notes;
1086
1087 int maxreg;
1088 rtx temp;
1089 register rtx link;
1090 int i;
1091
1092 /* If any of I1, I2, and I3 isn't really an insn, we can't do anything.
1093 This can occur when flow deletes an insn that it has merged into an
1094 auto-increment address. We also can't do anything if I3 has a
1095 REG_LIBCALL note since we don't want to disrupt the contiguity of a
1096 libcall. */
1097
1098 if (GET_RTX_CLASS (GET_CODE (i3)) != 'i'
1099 || GET_RTX_CLASS (GET_CODE (i2)) != 'i'
1100 || (i1 && GET_RTX_CLASS (GET_CODE (i1)) != 'i')
1101 || find_reg_note (i3, REG_LIBCALL, NULL_RTX))
1102 return 0;
1103
1104 combine_attempts++;
1105
1106 undobuf.num_undo = previous_num_undos = 0;
1107 undobuf.other_insn = 0;
1108
1109 /* Save the current high-water-mark so we can free storage if we didn't
1110 accept this combination. */
1111 undobuf.storage = (char *) oballoc (0);
1112
1113 /* If I1 and I2 both feed I3, they can be in any order. To simplify the
1114 code below, set I1 to be the earlier of the two insns. */
1115 if (i1 && INSN_CUID (i1) > INSN_CUID (i2))
1116 temp = i1, i1 = i2, i2 = temp;
1117
1118 /* First check for one important special-case that the code below will
1119 not handle. Namely, the case where I1 is zero, I2 has multiple sets,
1120 and I3 is a SET whose SET_SRC is a SET_DEST in I2. In that case,
1121 we may be able to replace that destination with the destination of I3.
1122 This occurs in the common code where we compute both a quotient and
1123 remainder into a structure, in which case we want to do the computation
1124 directly into the structure to avoid register-register copies.
1125
1126 We make very conservative checks below and only try to handle the
1127 most common cases of this. For example, we only handle the case
1128 where I2 and I3 are adjacent to avoid making difficult register
1129 usage tests. */
1130
1131 if (i1 == 0 && GET_CODE (i3) == INSN && GET_CODE (PATTERN (i3)) == SET
1132 && GET_CODE (SET_SRC (PATTERN (i3))) == REG
1133 && REGNO (SET_SRC (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER
1134 #ifdef SMALL_REGISTER_CLASSES
1135 && (GET_CODE (SET_DEST (PATTERN (i3))) != REG
1136 || REGNO (SET_DEST (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER)
1137 #endif
1138 && find_reg_note (i3, REG_DEAD, SET_SRC (PATTERN (i3)))
1139 && GET_CODE (PATTERN (i2)) == PARALLEL
1140 && ! side_effects_p (SET_DEST (PATTERN (i3)))
1141 /* If the dest of I3 is a ZERO_EXTRACT or STRICT_LOW_PART, the code
1142 below would need to check what is inside (and reg_overlap_mentioned_p
1143 doesn't support those codes anyway). Don't allow those destinations;
1144 the resulting insn isn't likely to be recognized anyway. */
1145 && GET_CODE (SET_DEST (PATTERN (i3))) != ZERO_EXTRACT
1146 && GET_CODE (SET_DEST (PATTERN (i3))) != STRICT_LOW_PART
1147 && ! reg_overlap_mentioned_p (SET_SRC (PATTERN (i3)),
1148 SET_DEST (PATTERN (i3)))
1149 && next_real_insn (i2) == i3)
1150 {
1151 rtx p2 = PATTERN (i2);
1152
1153 /* Make sure that the destination of I3,
1154 which we are going to substitute into one output of I2,
1155 is not used within another output of I2. We must avoid making this:
1156 (parallel [(set (mem (reg 69)) ...)
1157 (set (reg 69) ...)])
1158 which is not well-defined as to order of actions.
1159 (Besides, reload can't handle output reloads for this.)
1160
1161 The problem can also happen if the dest of I3 is a memory ref,
1162 if another dest in I2 is an indirect memory ref. */
1163 for (i = 0; i < XVECLEN (p2, 0); i++)
1164 if (GET_CODE (XVECEXP (p2, 0, i)) == SET
1165 && reg_overlap_mentioned_p (SET_DEST (PATTERN (i3)),
1166 SET_DEST (XVECEXP (p2, 0, i))))
1167 break;
1168
1169 if (i == XVECLEN (p2, 0))
1170 for (i = 0; i < XVECLEN (p2, 0); i++)
1171 if (SET_DEST (XVECEXP (p2, 0, i)) == SET_SRC (PATTERN (i3)))
1172 {
1173 combine_merges++;
1174
1175 subst_insn = i3;
1176 subst_low_cuid = INSN_CUID (i2);
1177
1178 added_sets_2 = 0;
1179 i2dest = SET_SRC (PATTERN (i3));
1180
1181 /* Replace the dest in I2 with our dest and make the resulting
1182 insn the new pattern for I3. Then skip to where we
1183 validate the pattern. Everything was set up above. */
1184 SUBST (SET_DEST (XVECEXP (p2, 0, i)),
1185 SET_DEST (PATTERN (i3)));
1186
1187 newpat = p2;
1188 goto validate_replacement;
1189 }
1190 }
1191
1192 #ifndef HAVE_cc0
1193 /* If we have no I1 and I2 looks like:
1194 (parallel [(set (reg:CC X) (compare:CC OP (const_int 0)))
1195 (set Y OP)])
1196 make up a dummy I1 that is
1197 (set Y OP)
1198 and change I2 to be
1199 (set (reg:CC X) (compare:CC Y (const_int 0)))
1200
1201 (We can ignore any trailing CLOBBERs.)
1202
1203 This undoes a previous combination and allows us to match a branch-and-
1204 decrement insn. */
1205
1206 if (i1 == 0 && GET_CODE (PATTERN (i2)) == PARALLEL
1207 && XVECLEN (PATTERN (i2), 0) >= 2
1208 && GET_CODE (XVECEXP (PATTERN (i2), 0, 0)) == SET
1209 && (GET_MODE_CLASS (GET_MODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 0))))
1210 == MODE_CC)
1211 && GET_CODE (SET_SRC (XVECEXP (PATTERN (i2), 0, 0))) == COMPARE
1212 && XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 1) == const0_rtx
1213 && GET_CODE (XVECEXP (PATTERN (i2), 0, 1)) == SET
1214 && GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 1))) == REG
1215 && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 0),
1216 SET_SRC (XVECEXP (PATTERN (i2), 0, 1))))
1217 {
1218 for (i = XVECLEN (PATTERN (i2), 0) - 1; i >= 2; i--)
1219 if (GET_CODE (XVECEXP (PATTERN (i2), 0, i)) != CLOBBER)
1220 break;
1221
1222 if (i == 1)
1223 {
1224 /* We make I1 with the same INSN_UID as I2. This gives it
1225 the same INSN_CUID for value tracking. Our fake I1 will
1226 never appear in the insn stream so giving it the same INSN_UID
1227 as I2 will not cause a problem. */
1228
1229 i1 = gen_rtx (INSN, VOIDmode, INSN_UID (i2), 0, i2,
1230 XVECEXP (PATTERN (i2), 0, 1), -1, 0, 0);
1231
1232 SUBST (PATTERN (i2), XVECEXP (PATTERN (i2), 0, 0));
1233 SUBST (XEXP (SET_SRC (PATTERN (i2)), 0),
1234 SET_DEST (PATTERN (i1)));
1235 }
1236 }
1237 #endif
1238
1239 /* Verify that I2 and I1 are valid for combining. */
1240 if (! can_combine_p (i2, i3, i1, NULL_RTX, &i2dest, &i2src)
1241 || (i1 && ! can_combine_p (i1, i3, NULL_RTX, i2, &i1dest, &i1src)))
1242 {
1243 undo_all ();
1244 return 0;
1245 }
1246
1247 /* Record whether I2DEST is used in I2SRC and similarly for the other
1248 cases. Knowing this will help in register status updating below. */
1249 i2dest_in_i2src = reg_overlap_mentioned_p (i2dest, i2src);
1250 i1dest_in_i1src = i1 && reg_overlap_mentioned_p (i1dest, i1src);
1251 i2dest_in_i1src = i1 && reg_overlap_mentioned_p (i2dest, i1src);
1252
1253 /* See if I1 directly feeds into I3. It does if I1DEST is not used
1254 in I2SRC. */
1255 i1_feeds_i3 = i1 && ! reg_overlap_mentioned_p (i1dest, i2src);
1256
1257 /* Ensure that I3's pattern can be the destination of combines. */
1258 if (! combinable_i3pat (i3, &PATTERN (i3), i2dest, i1dest,
1259 i1 && i2dest_in_i1src && i1_feeds_i3,
1260 &i3dest_killed))
1261 {
1262 undo_all ();
1263 return 0;
1264 }
1265
1266 /* If I3 has an inc, then give up if I1 or I2 uses the reg that is inc'd.
1267 We used to do this EXCEPT in one case: I3 has a post-inc in an
1268 output operand. However, that exception can give rise to insns like
1269 mov r3,(r3)+
1270 which is a famous insn on the PDP-11 where the value of r3 used as the
1271 source was model-dependent. Avoid this sort of thing. */
1272
1273 #if 0
1274 if (!(GET_CODE (PATTERN (i3)) == SET
1275 && GET_CODE (SET_SRC (PATTERN (i3))) == REG
1276 && GET_CODE (SET_DEST (PATTERN (i3))) == MEM
1277 && (GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_INC
1278 || GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_DEC)))
1279 /* It's not the exception. */
1280 #endif
1281 #ifdef AUTO_INC_DEC
1282 for (link = REG_NOTES (i3); link; link = XEXP (link, 1))
1283 if (REG_NOTE_KIND (link) == REG_INC
1284 && (reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i2))
1285 || (i1 != 0
1286 && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i1)))))
1287 {
1288 undo_all ();
1289 return 0;
1290 }
1291 #endif
1292
1293 /* See if the SETs in I1 or I2 need to be kept around in the merged
1294 instruction: whenever the value set there is still needed past I3.
1295 For the SETs in I2, this is easy: we see if I2DEST dies or is set in I3.
1296
1297 For the SET in I1, we have two cases: If I1 and I2 independently
1298 feed into I3, the set in I1 needs to be kept around if I1DEST dies
1299 or is set in I3. Otherwise (if I1 feeds I2 which feeds I3), the set
1300 in I1 needs to be kept around unless I1DEST dies or is set in either
1301 I2 or I3. We can distinguish these cases by seeing if I2SRC mentions
1302 I1DEST. If so, we know I1 feeds into I2. */
1303
1304 added_sets_2 = ! dead_or_set_p (i3, i2dest);
1305
1306 added_sets_1
1307 = i1 && ! (i1_feeds_i3 ? dead_or_set_p (i3, i1dest)
1308 : (dead_or_set_p (i3, i1dest) || dead_or_set_p (i2, i1dest)));
1309
1310 /* If the set in I2 needs to be kept around, we must make a copy of
1311 PATTERN (I2), so that when we substitute I1SRC for I1DEST in
1312 PATTERN (I2), we are only substituting for the original I1DEST, not into
1313 an already-substituted copy. This also prevents making self-referential
1314 rtx. If I2 is a PARALLEL, we just need the piece that assigns I2SRC to
1315 I2DEST. */
1316
1317 i2pat = (GET_CODE (PATTERN (i2)) == PARALLEL
1318 ? gen_rtx (SET, VOIDmode, i2dest, i2src)
1319 : PATTERN (i2));
1320
1321 if (added_sets_2)
1322 i2pat = copy_rtx (i2pat);
1323
1324 combine_merges++;
1325
1326 /* Substitute in the latest insn for the regs set by the earlier ones. */
1327
1328 maxreg = max_reg_num ();
1329
1330 subst_insn = i3;
1331
1332 /* It is possible that the source of I2 or I1 may be performing an
1333 unneeded operation, such as a ZERO_EXTEND of something that is known
1334 to have the high part zero. Handle that case by letting subst look at
1335 the innermost one of them.
1336
1337 Another way to do this would be to have a function that tries to
1338 simplify a single insn instead of merging two or more insns. We don't
1339 do this because of the potential of infinite loops and because
1340 of the potential extra memory required. However, doing it the way
1341 we are is a bit of a kludge and doesn't catch all cases.
1342
1343 But only do this if -fexpensive-optimizations since it slows things down
1344 and doesn't usually win. */
1345
1346 if (flag_expensive_optimizations)
1347 {
1348 /* Pass pc_rtx so no substitutions are done, just simplifications.
1349 The cases that we are interested in here do not involve the few
1350 cases were is_replaced is checked. */
1351 if (i1)
1352 {
1353 subst_low_cuid = INSN_CUID (i1);
1354 i1src = subst (i1src, pc_rtx, pc_rtx, 0, 0);
1355 }
1356 else
1357 {
1358 subst_low_cuid = INSN_CUID (i2);
1359 i2src = subst (i2src, pc_rtx, pc_rtx, 0, 0);
1360 }
1361
1362 previous_num_undos = undobuf.num_undo;
1363 }
1364
1365 #ifndef HAVE_cc0
1366 /* Many machines that don't use CC0 have insns that can both perform an
1367 arithmetic operation and set the condition code. These operations will
1368 be represented as a PARALLEL with the first element of the vector
1369 being a COMPARE of an arithmetic operation with the constant zero.
1370 The second element of the vector will set some pseudo to the result
1371 of the same arithmetic operation. If we simplify the COMPARE, we won't
1372 match such a pattern and so will generate an extra insn. Here we test
1373 for this case, where both the comparison and the operation result are
1374 needed, and make the PARALLEL by just replacing I2DEST in I3SRC with
1375 I2SRC. Later we will make the PARALLEL that contains I2. */
1376
1377 if (i1 == 0 && added_sets_2 && GET_CODE (PATTERN (i3)) == SET
1378 && GET_CODE (SET_SRC (PATTERN (i3))) == COMPARE
1379 && XEXP (SET_SRC (PATTERN (i3)), 1) == const0_rtx
1380 && rtx_equal_p (XEXP (SET_SRC (PATTERN (i3)), 0), i2dest))
1381 {
1382 rtx *cc_use;
1383 enum machine_mode compare_mode;
1384
1385 newpat = PATTERN (i3);
1386 SUBST (XEXP (SET_SRC (newpat), 0), i2src);
1387
1388 i2_is_used = 1;
1389
1390 #ifdef EXTRA_CC_MODES
1391 /* See if a COMPARE with the operand we substituted in should be done
1392 with the mode that is currently being used. If not, do the same
1393 processing we do in `subst' for a SET; namely, if the destination
1394 is used only once, try to replace it with a register of the proper
1395 mode and also replace the COMPARE. */
1396 if (undobuf.other_insn == 0
1397 && (cc_use = find_single_use (SET_DEST (newpat), i3,
1398 &undobuf.other_insn))
1399 && ((compare_mode = SELECT_CC_MODE (GET_CODE (*cc_use),
1400 i2src, const0_rtx))
1401 != GET_MODE (SET_DEST (newpat))))
1402 {
1403 int regno = REGNO (SET_DEST (newpat));
1404 rtx new_dest = gen_rtx (REG, compare_mode, regno);
1405
1406 if (regno < FIRST_PSEUDO_REGISTER
1407 || (reg_n_sets[regno] == 1 && ! added_sets_2
1408 && ! REG_USERVAR_P (SET_DEST (newpat))))
1409 {
1410 if (regno >= FIRST_PSEUDO_REGISTER)
1411 SUBST (regno_reg_rtx[regno], new_dest);
1412
1413 SUBST (SET_DEST (newpat), new_dest);
1414 SUBST (XEXP (*cc_use, 0), new_dest);
1415 SUBST (SET_SRC (newpat),
1416 gen_rtx_combine (COMPARE, compare_mode,
1417 i2src, const0_rtx));
1418 }
1419 else
1420 undobuf.other_insn = 0;
1421 }
1422 #endif
1423 }
1424 else
1425 #endif
1426 {
1427 n_occurrences = 0; /* `subst' counts here */
1428
1429 /* If I1 feeds into I2 (not into I3) and I1DEST is in I1SRC, we
1430 need to make a unique copy of I2SRC each time we substitute it
1431 to avoid self-referential rtl. */
1432
1433 subst_low_cuid = INSN_CUID (i2);
1434 newpat = subst (PATTERN (i3), i2dest, i2src, 0,
1435 ! i1_feeds_i3 && i1dest_in_i1src);
1436 previous_num_undos = undobuf.num_undo;
1437
1438 /* Record whether i2's body now appears within i3's body. */
1439 i2_is_used = n_occurrences;
1440 }
1441
1442 /* If we already got a failure, don't try to do more. Otherwise,
1443 try to substitute in I1 if we have it. */
1444
1445 if (i1 && GET_CODE (newpat) != CLOBBER)
1446 {
1447 /* Before we can do this substitution, we must redo the test done
1448 above (see detailed comments there) that ensures that I1DEST
1449 isn't mentioned in any SETs in NEWPAT that are field assignments. */
1450
1451 if (! combinable_i3pat (NULL_RTX, &newpat, i1dest, NULL_RTX,
1452 0, NULL_PTR))
1453 {
1454 undo_all ();
1455 return 0;
1456 }
1457
1458 n_occurrences = 0;
1459 subst_low_cuid = INSN_CUID (i1);
1460 newpat = subst (newpat, i1dest, i1src, 0, 0);
1461 previous_num_undos = undobuf.num_undo;
1462 }
1463
1464 /* Fail if an autoincrement side-effect has been duplicated. Be careful
1465 to count all the ways that I2SRC and I1SRC can be used. */
1466 if ((FIND_REG_INC_NOTE (i2, NULL_RTX) != 0
1467 && i2_is_used + added_sets_2 > 1)
1468 || (i1 != 0 && FIND_REG_INC_NOTE (i1, NULL_RTX) != 0
1469 && (n_occurrences + added_sets_1 + (added_sets_2 && ! i1_feeds_i3)
1470 > 1))
1471 /* Fail if we tried to make a new register (we used to abort, but there's
1472 really no reason to). */
1473 || max_reg_num () != maxreg
1474 /* Fail if we couldn't do something and have a CLOBBER. */
1475 || GET_CODE (newpat) == CLOBBER)
1476 {
1477 undo_all ();
1478 return 0;
1479 }
1480
1481 /* If the actions of the earlier insns must be kept
1482 in addition to substituting them into the latest one,
1483 we must make a new PARALLEL for the latest insn
1484 to hold additional the SETs. */
1485
1486 if (added_sets_1 || added_sets_2)
1487 {
1488 combine_extras++;
1489
1490 if (GET_CODE (newpat) == PARALLEL)
1491 {
1492 rtvec old = XVEC (newpat, 0);
1493 total_sets = XVECLEN (newpat, 0) + added_sets_1 + added_sets_2;
1494 newpat = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (total_sets));
1495 bcopy (&old->elem[0], &XVECEXP (newpat, 0, 0),
1496 sizeof (old->elem[0]) * old->num_elem);
1497 }
1498 else
1499 {
1500 rtx old = newpat;
1501 total_sets = 1 + added_sets_1 + added_sets_2;
1502 newpat = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (total_sets));
1503 XVECEXP (newpat, 0, 0) = old;
1504 }
1505
1506 if (added_sets_1)
1507 XVECEXP (newpat, 0, --total_sets)
1508 = (GET_CODE (PATTERN (i1)) == PARALLEL
1509 ? gen_rtx (SET, VOIDmode, i1dest, i1src) : PATTERN (i1));
1510
1511 if (added_sets_2)
1512 {
1513 /* If there is no I1, use I2's body as is. We used to also not do
1514 the subst call below if I2 was substituted into I3,
1515 but that could lose a simplification. */
1516 if (i1 == 0)
1517 XVECEXP (newpat, 0, --total_sets) = i2pat;
1518 else
1519 /* See comment where i2pat is assigned. */
1520 XVECEXP (newpat, 0, --total_sets)
1521 = subst (i2pat, i1dest, i1src, 0, 0);
1522 }
1523 }
1524
1525 /* We come here when we are replacing a destination in I2 with the
1526 destination of I3. */
1527 validate_replacement:
1528
1529 /* Is the result of combination a valid instruction? */
1530 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1531
1532 /* If the result isn't valid, see if it is a PARALLEL of two SETs where
1533 the second SET's destination is a register that is unused. In that case,
1534 we just need the first SET. This can occur when simplifying a divmod
1535 insn. We *must* test for this case here because the code below that
1536 splits two independent SETs doesn't handle this case correctly when it
1537 updates the register status. Also check the case where the first
1538 SET's destination is unused. That would not cause incorrect code, but
1539 does cause an unneeded insn to remain. */
1540
1541 if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
1542 && XVECLEN (newpat, 0) == 2
1543 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1544 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1545 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == REG
1546 && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 1)))
1547 && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 1)))
1548 && asm_noperands (newpat) < 0)
1549 {
1550 newpat = XVECEXP (newpat, 0, 0);
1551 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1552 }
1553
1554 else if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
1555 && XVECLEN (newpat, 0) == 2
1556 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1557 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1558 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) == REG
1559 && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 0)))
1560 && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 0)))
1561 && asm_noperands (newpat) < 0)
1562 {
1563 newpat = XVECEXP (newpat, 0, 1);
1564 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1565 }
1566
1567 /* See if this is an XOR. If so, perhaps the problem is that the
1568 constant is out of range. Replace it with a complemented XOR with
1569 a complemented constant; it might be in range. */
1570
1571 else if (insn_code_number < 0 && GET_CODE (newpat) == SET
1572 && GET_CODE (SET_SRC (newpat)) == XOR
1573 && GET_CODE (XEXP (SET_SRC (newpat), 1)) == CONST_INT
1574 && ((temp = simplify_unary_operation (NOT,
1575 GET_MODE (SET_SRC (newpat)),
1576 XEXP (SET_SRC (newpat), 1),
1577 GET_MODE (SET_SRC (newpat))))
1578 != 0))
1579 {
1580 enum machine_mode i_mode = GET_MODE (SET_SRC (newpat));
1581 rtx pat
1582 = gen_rtx_combine (SET, VOIDmode, SET_DEST (newpat),
1583 gen_unary (NOT, i_mode,
1584 gen_binary (XOR, i_mode,
1585 XEXP (SET_SRC (newpat), 0),
1586 temp)));
1587
1588 insn_code_number = recog_for_combine (&pat, i3, &new_i3_notes);
1589 if (insn_code_number >= 0)
1590 newpat = pat;
1591 }
1592
1593 /* If we were combining three insns and the result is a simple SET
1594 with no ASM_OPERANDS that wasn't recognized, try to split it into two
1595 insns. There are two ways to do this. It can be split using a
1596 machine-specific method (like when you have an addition of a large
1597 constant) or by combine in the function find_split_point. */
1598
1599 if (i1 && insn_code_number < 0 && GET_CODE (newpat) == SET
1600 && asm_noperands (newpat) < 0)
1601 {
1602 rtx m_split, *split;
1603 rtx ni2dest = i2dest;
1604
1605 /* See if the MD file can split NEWPAT. If it can't, see if letting it
1606 use I2DEST as a scratch register will help. In the latter case,
1607 convert I2DEST to the mode of the source of NEWPAT if we can. */
1608
1609 m_split = split_insns (newpat, i3);
1610
1611 /* We can only use I2DEST as a scratch reg if it doesn't overlap any
1612 inputs of NEWPAT. */
1613
1614 /* ??? If I2DEST is not safe, and I1DEST exists, then it would be
1615 possible to try that as a scratch reg. This would require adding
1616 more code to make it work though. */
1617
1618 if (m_split == 0 && ! reg_overlap_mentioned_p (ni2dest, newpat))
1619 {
1620 /* If I2DEST is a hard register or the only use of a pseudo,
1621 we can change its mode. */
1622 if (GET_MODE (SET_DEST (newpat)) != GET_MODE (i2dest)
1623 && GET_MODE (SET_DEST (newpat)) != VOIDmode
1624 && GET_CODE (i2dest) == REG
1625 && (REGNO (i2dest) < FIRST_PSEUDO_REGISTER
1626 || (reg_n_sets[REGNO (i2dest)] == 1 && ! added_sets_2
1627 && ! REG_USERVAR_P (i2dest))))
1628 ni2dest = gen_rtx (REG, GET_MODE (SET_DEST (newpat)),
1629 REGNO (i2dest));
1630
1631 m_split = split_insns (gen_rtx (PARALLEL, VOIDmode,
1632 gen_rtvec (2, newpat,
1633 gen_rtx (CLOBBER,
1634 VOIDmode,
1635 ni2dest))),
1636 i3);
1637 }
1638
1639 if (m_split && GET_CODE (m_split) == SEQUENCE
1640 && XVECLEN (m_split, 0) == 2
1641 && (next_real_insn (i2) == i3
1642 || ! use_crosses_set_p (PATTERN (XVECEXP (m_split, 0, 0)),
1643 INSN_CUID (i2))))
1644 {
1645 rtx i2set, i3set;
1646 rtx newi3pat = PATTERN (XVECEXP (m_split, 0, 1));
1647 newi2pat = PATTERN (XVECEXP (m_split, 0, 0));
1648
1649 i3set = single_set (XVECEXP (m_split, 0, 1));
1650 i2set = single_set (XVECEXP (m_split, 0, 0));
1651
1652 /* In case we changed the mode of I2DEST, replace it in the
1653 pseudo-register table here. We can't do it above in case this
1654 code doesn't get executed and we do a split the other way. */
1655
1656 if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
1657 SUBST (regno_reg_rtx[REGNO (i2dest)], ni2dest);
1658
1659 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
1660
1661 /* If I2 or I3 has multiple SETs, we won't know how to track
1662 register status, so don't use these insns. */
1663
1664 if (i2_code_number >= 0 && i2set && i3set)
1665 insn_code_number = recog_for_combine (&newi3pat, i3,
1666 &new_i3_notes);
1667
1668 if (insn_code_number >= 0)
1669 newpat = newi3pat;
1670
1671 /* It is possible that both insns now set the destination of I3.
1672 If so, we must show an extra use of it. */
1673
1674 if (insn_code_number >= 0 && GET_CODE (SET_DEST (i3set)) == REG
1675 && GET_CODE (SET_DEST (i2set)) == REG
1676 && REGNO (SET_DEST (i3set)) == REGNO (SET_DEST (i2set)))
1677 reg_n_sets[REGNO (SET_DEST (i2set))]++;
1678 }
1679
1680 /* If we can split it and use I2DEST, go ahead and see if that
1681 helps things be recognized. Verify that none of the registers
1682 are set between I2 and I3. */
1683 if (insn_code_number < 0 && (split = find_split_point (&newpat, i3)) != 0
1684 #ifdef HAVE_cc0
1685 && GET_CODE (i2dest) == REG
1686 #endif
1687 /* We need I2DEST in the proper mode. If it is a hard register
1688 or the only use of a pseudo, we can change its mode. */
1689 && (GET_MODE (*split) == GET_MODE (i2dest)
1690 || GET_MODE (*split) == VOIDmode
1691 || REGNO (i2dest) < FIRST_PSEUDO_REGISTER
1692 || (reg_n_sets[REGNO (i2dest)] == 1 && ! added_sets_2
1693 && ! REG_USERVAR_P (i2dest)))
1694 && (next_real_insn (i2) == i3
1695 || ! use_crosses_set_p (*split, INSN_CUID (i2)))
1696 /* We can't overwrite I2DEST if its value is still used by
1697 NEWPAT. */
1698 && ! reg_referenced_p (i2dest, newpat))
1699 {
1700 rtx newdest = i2dest;
1701
1702 /* Get NEWDEST as a register in the proper mode. We have already
1703 validated that we can do this. */
1704 if (GET_MODE (i2dest) != GET_MODE (*split)
1705 && GET_MODE (*split) != VOIDmode)
1706 {
1707 newdest = gen_rtx (REG, GET_MODE (*split), REGNO (i2dest));
1708
1709 if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
1710 SUBST (regno_reg_rtx[REGNO (i2dest)], newdest);
1711 }
1712
1713 /* If *SPLIT is a (mult FOO (const_int pow2)), convert it to
1714 an ASHIFT. This can occur if it was inside a PLUS and hence
1715 appeared to be a memory address. This is a kludge. */
1716 if (GET_CODE (*split) == MULT
1717 && GET_CODE (XEXP (*split, 1)) == CONST_INT
1718 && (i = exact_log2 (INTVAL (XEXP (*split, 1)))) >= 0)
1719 SUBST (*split, gen_rtx_combine (ASHIFT, GET_MODE (*split),
1720 XEXP (*split, 0), GEN_INT (i)));
1721
1722 #ifdef INSN_SCHEDULING
1723 /* If *SPLIT is a paradoxical SUBREG, when we split it, it should
1724 be written as a ZERO_EXTEND. */
1725 if (GET_CODE (*split) == SUBREG
1726 && GET_CODE (SUBREG_REG (*split)) == MEM)
1727 SUBST (*split, gen_rtx_combine (ZERO_EXTEND, GET_MODE (*split),
1728 XEXP (*split, 0)));
1729 #endif
1730
1731 newi2pat = gen_rtx_combine (SET, VOIDmode, newdest, *split);
1732 SUBST (*split, newdest);
1733 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
1734 if (i2_code_number >= 0)
1735 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1736 }
1737 }
1738
1739 /* Check for a case where we loaded from memory in a narrow mode and
1740 then sign extended it, but we need both registers. In that case,
1741 we have a PARALLEL with both loads from the same memory location.
1742 We can split this into a load from memory followed by a register-register
1743 copy. This saves at least one insn, more if register allocation can
1744 eliminate the copy. */
1745
1746 else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
1747 && GET_CODE (newpat) == PARALLEL
1748 && XVECLEN (newpat, 0) == 2
1749 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1750 && GET_CODE (SET_SRC (XVECEXP (newpat, 0, 0))) == SIGN_EXTEND
1751 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1752 && rtx_equal_p (SET_SRC (XVECEXP (newpat, 0, 1)),
1753 XEXP (SET_SRC (XVECEXP (newpat, 0, 0)), 0))
1754 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
1755 INSN_CUID (i2))
1756 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
1757 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
1758 && ! reg_overlap_mentioned_p (SET_DEST (XVECEXP (newpat, 0, 1)),
1759 SET_SRC (XVECEXP (newpat, 0, 1)))
1760 && ! find_reg_note (i3, REG_UNUSED,
1761 SET_DEST (XVECEXP (newpat, 0, 0))))
1762 {
1763 rtx ni2dest;
1764
1765 newi2pat = XVECEXP (newpat, 0, 0);
1766 ni2dest = SET_DEST (XVECEXP (newpat, 0, 0));
1767 newpat = XVECEXP (newpat, 0, 1);
1768 SUBST (SET_SRC (newpat),
1769 gen_lowpart_for_combine (GET_MODE (SET_SRC (newpat)), ni2dest));
1770 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
1771 if (i2_code_number >= 0)
1772 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1773
1774 if (insn_code_number >= 0)
1775 {
1776 rtx insn;
1777 rtx link;
1778
1779 /* If we will be able to accept this, we have made a change to the
1780 destination of I3. This can invalidate a LOG_LINKS pointing
1781 to I3. No other part of combine.c makes such a transformation.
1782
1783 The new I3 will have a destination that was previously the
1784 destination of I1 or I2 and which was used in i2 or I3. Call
1785 distribute_links to make a LOG_LINK from the next use of
1786 that destination. */
1787
1788 PATTERN (i3) = newpat;
1789 distribute_links (gen_rtx (INSN_LIST, VOIDmode, i3, NULL_RTX));
1790
1791 /* I3 now uses what used to be its destination and which is
1792 now I2's destination. That means we need a LOG_LINK from
1793 I3 to I2. But we used to have one, so we still will.
1794
1795 However, some later insn might be using I2's dest and have
1796 a LOG_LINK pointing at I3. We must remove this link.
1797 The simplest way to remove the link is to point it at I1,
1798 which we know will be a NOTE. */
1799
1800 for (insn = NEXT_INSN (i3);
1801 insn && GET_CODE (insn) != CODE_LABEL
1802 && GET_CODE (PREV_INSN (insn)) != JUMP_INSN;
1803 insn = NEXT_INSN (insn))
1804 {
1805 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
1806 && reg_referenced_p (ni2dest, PATTERN (insn)))
1807 {
1808 for (link = LOG_LINKS (insn); link;
1809 link = XEXP (link, 1))
1810 if (XEXP (link, 0) == i3)
1811 XEXP (link, 0) = i1;
1812
1813 break;
1814 }
1815 }
1816 }
1817 }
1818
1819 /* Similarly, check for a case where we have a PARALLEL of two independent
1820 SETs but we started with three insns. In this case, we can do the sets
1821 as two separate insns. This case occurs when some SET allows two
1822 other insns to combine, but the destination of that SET is still live. */
1823
1824 else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
1825 && GET_CODE (newpat) == PARALLEL
1826 && XVECLEN (newpat, 0) == 2
1827 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1828 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != ZERO_EXTRACT
1829 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != STRICT_LOW_PART
1830 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1831 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
1832 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
1833 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
1834 INSN_CUID (i2))
1835 /* Don't pass sets with (USE (MEM ...)) dests to the following. */
1836 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != USE
1837 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != USE
1838 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 1)),
1839 XVECEXP (newpat, 0, 0))
1840 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 0)),
1841 XVECEXP (newpat, 0, 1)))
1842 {
1843 newi2pat = XVECEXP (newpat, 0, 1);
1844 newpat = XVECEXP (newpat, 0, 0);
1845
1846 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
1847 if (i2_code_number >= 0)
1848 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1849 }
1850
1851 /* If it still isn't recognized, fail and change things back the way they
1852 were. */
1853 if ((insn_code_number < 0
1854 /* Is the result a reasonable ASM_OPERANDS? */
1855 && (! check_asm_operands (newpat) || added_sets_1 || added_sets_2)))
1856 {
1857 undo_all ();
1858 return 0;
1859 }
1860
1861 /* If we had to change another insn, make sure it is valid also. */
1862 if (undobuf.other_insn)
1863 {
1864 rtx other_notes = REG_NOTES (undobuf.other_insn);
1865 rtx other_pat = PATTERN (undobuf.other_insn);
1866 rtx new_other_notes;
1867 rtx note, next;
1868
1869 other_code_number = recog_for_combine (&other_pat, undobuf.other_insn,
1870 &new_other_notes);
1871
1872 if (other_code_number < 0 && ! check_asm_operands (other_pat))
1873 {
1874 undo_all ();
1875 return 0;
1876 }
1877
1878 PATTERN (undobuf.other_insn) = other_pat;
1879
1880 /* If any of the notes in OTHER_INSN were REG_UNUSED, ensure that they
1881 are still valid. Then add any non-duplicate notes added by
1882 recog_for_combine. */
1883 for (note = REG_NOTES (undobuf.other_insn); note; note = next)
1884 {
1885 next = XEXP (note, 1);
1886
1887 if (REG_NOTE_KIND (note) == REG_UNUSED
1888 && ! reg_set_p (XEXP (note, 0), PATTERN (undobuf.other_insn)))
1889 {
1890 if (GET_CODE (XEXP (note, 0)) == REG)
1891 reg_n_deaths[REGNO (XEXP (note, 0))]--;
1892
1893 remove_note (undobuf.other_insn, note);
1894 }
1895 }
1896
1897 for (note = new_other_notes; note; note = XEXP (note, 1))
1898 if (GET_CODE (XEXP (note, 0)) == REG)
1899 reg_n_deaths[REGNO (XEXP (note, 0))]++;
1900
1901 distribute_notes (new_other_notes, undobuf.other_insn,
1902 undobuf.other_insn, NULL_RTX, NULL_RTX, NULL_RTX);
1903 }
1904
1905 /* We now know that we can do this combination. Merge the insns and
1906 update the status of registers and LOG_LINKS. */
1907
1908 {
1909 rtx i3notes, i2notes, i1notes = 0;
1910 rtx i3links, i2links, i1links = 0;
1911 rtx midnotes = 0;
1912 int all_adjacent = (next_real_insn (i2) == i3
1913 && (i1 == 0 || next_real_insn (i1) == i2));
1914 register int regno;
1915 /* Compute which registers we expect to eliminate. */
1916 rtx elim_i2 = (newi2pat || i2dest_in_i2src || i2dest_in_i1src
1917 ? 0 : i2dest);
1918 rtx elim_i1 = i1 == 0 || i1dest_in_i1src ? 0 : i1dest;
1919
1920 /* Get the old REG_NOTES and LOG_LINKS from all our insns and
1921 clear them. */
1922 i3notes = REG_NOTES (i3), i3links = LOG_LINKS (i3);
1923 i2notes = REG_NOTES (i2), i2links = LOG_LINKS (i2);
1924 if (i1)
1925 i1notes = REG_NOTES (i1), i1links = LOG_LINKS (i1);
1926
1927 /* Ensure that we do not have something that should not be shared but
1928 occurs multiple times in the new insns. Check this by first
1929 resetting all the `used' flags and then copying anything is shared. */
1930
1931 reset_used_flags (i3notes);
1932 reset_used_flags (i2notes);
1933 reset_used_flags (i1notes);
1934 reset_used_flags (newpat);
1935 reset_used_flags (newi2pat);
1936 if (undobuf.other_insn)
1937 reset_used_flags (PATTERN (undobuf.other_insn));
1938
1939 i3notes = copy_rtx_if_shared (i3notes);
1940 i2notes = copy_rtx_if_shared (i2notes);
1941 i1notes = copy_rtx_if_shared (i1notes);
1942 newpat = copy_rtx_if_shared (newpat);
1943 newi2pat = copy_rtx_if_shared (newi2pat);
1944 if (undobuf.other_insn)
1945 reset_used_flags (PATTERN (undobuf.other_insn));
1946
1947 INSN_CODE (i3) = insn_code_number;
1948 PATTERN (i3) = newpat;
1949 if (undobuf.other_insn)
1950 INSN_CODE (undobuf.other_insn) = other_code_number;
1951
1952 /* We had one special case above where I2 had more than one set and
1953 we replaced a destination of one of those sets with the destination
1954 of I3. In that case, we have to update LOG_LINKS of insns later
1955 in this basic block. Note that this (expensive) case is rare. */
1956
1957 if (GET_CODE (PATTERN (i2)) == PARALLEL)
1958 for (i = 0; i < XVECLEN (PATTERN (i2), 0); i++)
1959 if (GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, i))) == REG
1960 && SET_DEST (XVECEXP (PATTERN (i2), 0, i)) != i2dest
1961 && ! find_reg_note (i2, REG_UNUSED,
1962 SET_DEST (XVECEXP (PATTERN (i2), 0, i))))
1963 {
1964 register rtx insn;
1965
1966 for (insn = NEXT_INSN (i2); insn; insn = NEXT_INSN (insn))
1967 {
1968 if (insn != i3 && GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1969 for (link = LOG_LINKS (insn); link; link = XEXP (link, 1))
1970 if (XEXP (link, 0) == i2)
1971 XEXP (link, 0) = i3;
1972
1973 if (GET_CODE (insn) == CODE_LABEL
1974 || GET_CODE (insn) == JUMP_INSN)
1975 break;
1976 }
1977 }
1978
1979 LOG_LINKS (i3) = 0;
1980 REG_NOTES (i3) = 0;
1981 LOG_LINKS (i2) = 0;
1982 REG_NOTES (i2) = 0;
1983
1984 if (newi2pat)
1985 {
1986 INSN_CODE (i2) = i2_code_number;
1987 PATTERN (i2) = newi2pat;
1988 }
1989 else
1990 {
1991 PUT_CODE (i2, NOTE);
1992 NOTE_LINE_NUMBER (i2) = NOTE_INSN_DELETED;
1993 NOTE_SOURCE_FILE (i2) = 0;
1994 }
1995
1996 if (i1)
1997 {
1998 LOG_LINKS (i1) = 0;
1999 REG_NOTES (i1) = 0;
2000 PUT_CODE (i1, NOTE);
2001 NOTE_LINE_NUMBER (i1) = NOTE_INSN_DELETED;
2002 NOTE_SOURCE_FILE (i1) = 0;
2003 }
2004
2005 /* Get death notes for everything that is now used in either I3 or
2006 I2 and used to die in a previous insn. */
2007
2008 move_deaths (newpat, i1 ? INSN_CUID (i1) : INSN_CUID (i2), i3, &midnotes);
2009 if (newi2pat)
2010 move_deaths (newi2pat, INSN_CUID (i1), i2, &midnotes);
2011
2012 /* Distribute all the LOG_LINKS and REG_NOTES from I1, I2, and I3. */
2013 if (i3notes)
2014 distribute_notes (i3notes, i3, i3, newi2pat ? i2 : NULL_RTX,
2015 elim_i2, elim_i1);
2016 if (i2notes)
2017 distribute_notes (i2notes, i2, i3, newi2pat ? i2 : NULL_RTX,
2018 elim_i2, elim_i1);
2019 if (i1notes)
2020 distribute_notes (i1notes, i1, i3, newi2pat ? i2 : NULL_RTX,
2021 elim_i2, elim_i1);
2022 if (midnotes)
2023 distribute_notes (midnotes, NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2024 elim_i2, elim_i1);
2025
2026 /* Distribute any notes added to I2 or I3 by recog_for_combine. We
2027 know these are REG_UNUSED and want them to go to the desired insn,
2028 so we always pass it as i3. We have not counted the notes in
2029 reg_n_deaths yet, so we need to do so now. */
2030
2031 if (newi2pat && new_i2_notes)
2032 {
2033 for (temp = new_i2_notes; temp; temp = XEXP (temp, 1))
2034 if (GET_CODE (XEXP (temp, 0)) == REG)
2035 reg_n_deaths[REGNO (XEXP (temp, 0))]++;
2036
2037 distribute_notes (new_i2_notes, i2, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2038 }
2039
2040 if (new_i3_notes)
2041 {
2042 for (temp = new_i3_notes; temp; temp = XEXP (temp, 1))
2043 if (GET_CODE (XEXP (temp, 0)) == REG)
2044 reg_n_deaths[REGNO (XEXP (temp, 0))]++;
2045
2046 distribute_notes (new_i3_notes, i3, i3, NULL_RTX, NULL_RTX, NULL_RTX);
2047 }
2048
2049 /* If I3DEST was used in I3SRC, it really died in I3. We may need to
2050 put a REG_DEAD note for it somewhere. Similarly for I2 and I1.
2051 Show an additional death due to the REG_DEAD note we make here. If
2052 we discard it in distribute_notes, we will decrement it again. */
2053
2054 if (i3dest_killed)
2055 {
2056 if (GET_CODE (i3dest_killed) == REG)
2057 reg_n_deaths[REGNO (i3dest_killed)]++;
2058
2059 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i3dest_killed,
2060 NULL_RTX),
2061 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2062 NULL_RTX, NULL_RTX);
2063 }
2064
2065 /* For I2 and I1, we have to be careful. If NEWI2PAT exists and sets
2066 I2DEST or I1DEST, the death must be somewhere before I2, not I3. If
2067 we passed I3 in that case, it might delete I2. */
2068
2069 if (i2dest_in_i2src)
2070 {
2071 if (GET_CODE (i2dest) == REG)
2072 reg_n_deaths[REGNO (i2dest)]++;
2073
2074 if (newi2pat && reg_set_p (i2dest, newi2pat))
2075 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i2dest, NULL_RTX),
2076 NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2077 else
2078 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i2dest, NULL_RTX),
2079 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2080 NULL_RTX, NULL_RTX);
2081 }
2082
2083 if (i1dest_in_i1src)
2084 {
2085 if (GET_CODE (i1dest) == REG)
2086 reg_n_deaths[REGNO (i1dest)]++;
2087
2088 if (newi2pat && reg_set_p (i1dest, newi2pat))
2089 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i1dest, NULL_RTX),
2090 NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2091 else
2092 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i1dest, NULL_RTX),
2093 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2094 NULL_RTX, NULL_RTX);
2095 }
2096
2097 distribute_links (i3links);
2098 distribute_links (i2links);
2099 distribute_links (i1links);
2100
2101 if (GET_CODE (i2dest) == REG)
2102 {
2103 rtx link;
2104 rtx i2_insn = 0, i2_val = 0, set;
2105
2106 /* The insn that used to set this register doesn't exist, and
2107 this life of the register may not exist either. See if one of
2108 I3's links points to an insn that sets I2DEST. If it does,
2109 that is now the last known value for I2DEST. If we don't update
2110 this and I2 set the register to a value that depended on its old
2111 contents, we will get confused. If this insn is used, thing
2112 will be set correctly in combine_instructions. */
2113
2114 for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2115 if ((set = single_set (XEXP (link, 0))) != 0
2116 && rtx_equal_p (i2dest, SET_DEST (set)))
2117 i2_insn = XEXP (link, 0), i2_val = SET_SRC (set);
2118
2119 record_value_for_reg (i2dest, i2_insn, i2_val);
2120
2121 /* If the reg formerly set in I2 died only once and that was in I3,
2122 zero its use count so it won't make `reload' do any work. */
2123 if (! added_sets_2 && newi2pat == 0)
2124 {
2125 regno = REGNO (i2dest);
2126 reg_n_sets[regno]--;
2127 if (reg_n_sets[regno] == 0
2128 && ! (basic_block_live_at_start[0][regno / REGSET_ELT_BITS]
2129 & ((REGSET_ELT_TYPE) 1 << (regno % REGSET_ELT_BITS))))
2130 reg_n_refs[regno] = 0;
2131 }
2132 }
2133
2134 if (i1 && GET_CODE (i1dest) == REG)
2135 {
2136 rtx link;
2137 rtx i1_insn = 0, i1_val = 0, set;
2138
2139 for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2140 if ((set = single_set (XEXP (link, 0))) != 0
2141 && rtx_equal_p (i1dest, SET_DEST (set)))
2142 i1_insn = XEXP (link, 0), i1_val = SET_SRC (set);
2143
2144 record_value_for_reg (i1dest, i1_insn, i1_val);
2145
2146 regno = REGNO (i1dest);
2147 if (! added_sets_1)
2148 {
2149 reg_n_sets[regno]--;
2150 if (reg_n_sets[regno] == 0
2151 && ! (basic_block_live_at_start[0][regno / REGSET_ELT_BITS]
2152 & ((REGSET_ELT_TYPE) 1 << (regno % REGSET_ELT_BITS))))
2153 reg_n_refs[regno] = 0;
2154 }
2155 }
2156
2157 /* Update reg_nonzero_bits et al for any changes that may have been made
2158 to this insn. */
2159
2160 note_stores (newpat, set_nonzero_bits_and_sign_copies);
2161 if (newi2pat)
2162 note_stores (newi2pat, set_nonzero_bits_and_sign_copies);
2163
2164 /* If I3 is now an unconditional jump, ensure that it has a
2165 BARRIER following it since it may have initially been a
2166 conditional jump. It may also be the last nonnote insn. */
2167
2168 if ((GET_CODE (newpat) == RETURN || simplejump_p (i3))
2169 && ((temp = next_nonnote_insn (i3)) == NULL_RTX
2170 || GET_CODE (temp) != BARRIER))
2171 emit_barrier_after (i3);
2172 }
2173
2174 combine_successes++;
2175
2176 return newi2pat ? i2 : i3;
2177 }
2178 \f
2179 /* Undo all the modifications recorded in undobuf. */
2180
2181 static void
2182 undo_all ()
2183 {
2184 register int i;
2185 if (undobuf.num_undo > MAX_UNDO)
2186 undobuf.num_undo = MAX_UNDO;
2187 for (i = undobuf.num_undo - 1; i >= 0; i--)
2188 {
2189 if (undobuf.undo[i].is_int)
2190 *undobuf.undo[i].where.i = undobuf.undo[i].old_contents.i;
2191 else
2192 *undobuf.undo[i].where.rtx = undobuf.undo[i].old_contents.rtx;
2193
2194 }
2195
2196 obfree (undobuf.storage);
2197 undobuf.num_undo = 0;
2198 }
2199 \f
2200 /* Find the innermost point within the rtx at LOC, possibly LOC itself,
2201 where we have an arithmetic expression and return that point. LOC will
2202 be inside INSN.
2203
2204 try_combine will call this function to see if an insn can be split into
2205 two insns. */
2206
2207 static rtx *
2208 find_split_point (loc, insn)
2209 rtx *loc;
2210 rtx insn;
2211 {
2212 rtx x = *loc;
2213 enum rtx_code code = GET_CODE (x);
2214 rtx *split;
2215 int len = 0, pos, unsignedp;
2216 rtx inner;
2217
2218 /* First special-case some codes. */
2219 switch (code)
2220 {
2221 case SUBREG:
2222 #ifdef INSN_SCHEDULING
2223 /* If we are making a paradoxical SUBREG invalid, it becomes a split
2224 point. */
2225 if (GET_CODE (SUBREG_REG (x)) == MEM)
2226 return loc;
2227 #endif
2228 return find_split_point (&SUBREG_REG (x), insn);
2229
2230 case MEM:
2231 #ifdef HAVE_lo_sum
2232 /* If we have (mem (const ..)) or (mem (symbol_ref ...)), split it
2233 using LO_SUM and HIGH. */
2234 if (GET_CODE (XEXP (x, 0)) == CONST
2235 || GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
2236 {
2237 SUBST (XEXP (x, 0),
2238 gen_rtx_combine (LO_SUM, Pmode,
2239 gen_rtx_combine (HIGH, Pmode, XEXP (x, 0)),
2240 XEXP (x, 0)));
2241 return &XEXP (XEXP (x, 0), 0);
2242 }
2243 #endif
2244
2245 /* If we have a PLUS whose second operand is a constant and the
2246 address is not valid, perhaps will can split it up using
2247 the machine-specific way to split large constants. We use
2248 the first psuedo-reg (one of the virtual regs) as a placeholder;
2249 it will not remain in the result. */
2250 if (GET_CODE (XEXP (x, 0)) == PLUS
2251 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2252 && ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
2253 {
2254 rtx reg = regno_reg_rtx[FIRST_PSEUDO_REGISTER];
2255 rtx seq = split_insns (gen_rtx (SET, VOIDmode, reg, XEXP (x, 0)),
2256 subst_insn);
2257
2258 /* This should have produced two insns, each of which sets our
2259 placeholder. If the source of the second is a valid address,
2260 we can make put both sources together and make a split point
2261 in the middle. */
2262
2263 if (seq && XVECLEN (seq, 0) == 2
2264 && GET_CODE (XVECEXP (seq, 0, 0)) == INSN
2265 && GET_CODE (PATTERN (XVECEXP (seq, 0, 0))) == SET
2266 && SET_DEST (PATTERN (XVECEXP (seq, 0, 0))) == reg
2267 && ! reg_mentioned_p (reg,
2268 SET_SRC (PATTERN (XVECEXP (seq, 0, 0))))
2269 && GET_CODE (XVECEXP (seq, 0, 1)) == INSN
2270 && GET_CODE (PATTERN (XVECEXP (seq, 0, 1))) == SET
2271 && SET_DEST (PATTERN (XVECEXP (seq, 0, 1))) == reg
2272 && memory_address_p (GET_MODE (x),
2273 SET_SRC (PATTERN (XVECEXP (seq, 0, 1)))))
2274 {
2275 rtx src1 = SET_SRC (PATTERN (XVECEXP (seq, 0, 0)));
2276 rtx src2 = SET_SRC (PATTERN (XVECEXP (seq, 0, 1)));
2277
2278 /* Replace the placeholder in SRC2 with SRC1. If we can
2279 find where in SRC2 it was placed, that can become our
2280 split point and we can replace this address with SRC2.
2281 Just try two obvious places. */
2282
2283 src2 = replace_rtx (src2, reg, src1);
2284 split = 0;
2285 if (XEXP (src2, 0) == src1)
2286 split = &XEXP (src2, 0);
2287 else if (GET_RTX_FORMAT (GET_CODE (XEXP (src2, 0)))[0] == 'e'
2288 && XEXP (XEXP (src2, 0), 0) == src1)
2289 split = &XEXP (XEXP (src2, 0), 0);
2290
2291 if (split)
2292 {
2293 SUBST (XEXP (x, 0), src2);
2294 return split;
2295 }
2296 }
2297
2298 /* If that didn't work, perhaps the first operand is complex and
2299 needs to be computed separately, so make a split point there.
2300 This will occur on machines that just support REG + CONST
2301 and have a constant moved through some previous computation. */
2302
2303 else if (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (x, 0), 0))) != 'o'
2304 && ! (GET_CODE (XEXP (XEXP (x, 0), 0)) == SUBREG
2305 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (XEXP (x, 0), 0))))
2306 == 'o')))
2307 return &XEXP (XEXP (x, 0), 0);
2308 }
2309 break;
2310
2311 case SET:
2312 #ifdef HAVE_cc0
2313 /* If SET_DEST is CC0 and SET_SRC is not an operand, a COMPARE, or a
2314 ZERO_EXTRACT, the most likely reason why this doesn't match is that
2315 we need to put the operand into a register. So split at that
2316 point. */
2317
2318 if (SET_DEST (x) == cc0_rtx
2319 && GET_CODE (SET_SRC (x)) != COMPARE
2320 && GET_CODE (SET_SRC (x)) != ZERO_EXTRACT
2321 && GET_RTX_CLASS (GET_CODE (SET_SRC (x))) != 'o'
2322 && ! (GET_CODE (SET_SRC (x)) == SUBREG
2323 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (SET_SRC (x)))) == 'o'))
2324 return &SET_SRC (x);
2325 #endif
2326
2327 /* See if we can split SET_SRC as it stands. */
2328 split = find_split_point (&SET_SRC (x), insn);
2329 if (split && split != &SET_SRC (x))
2330 return split;
2331
2332 /* See if this is a bitfield assignment with everything constant. If
2333 so, this is an IOR of an AND, so split it into that. */
2334 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2335 && (GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
2336 <= HOST_BITS_PER_WIDE_INT)
2337 && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT
2338 && GET_CODE (XEXP (SET_DEST (x), 2)) == CONST_INT
2339 && GET_CODE (SET_SRC (x)) == CONST_INT
2340 && ((INTVAL (XEXP (SET_DEST (x), 1))
2341 + INTVAL (XEXP (SET_DEST (x), 2)))
2342 <= GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0))))
2343 && ! side_effects_p (XEXP (SET_DEST (x), 0)))
2344 {
2345 int pos = INTVAL (XEXP (SET_DEST (x), 2));
2346 int len = INTVAL (XEXP (SET_DEST (x), 1));
2347 int src = INTVAL (SET_SRC (x));
2348 rtx dest = XEXP (SET_DEST (x), 0);
2349 enum machine_mode mode = GET_MODE (dest);
2350 unsigned HOST_WIDE_INT mask = ((HOST_WIDE_INT) 1 << len) - 1;
2351
2352 #if BITS_BIG_ENDIAN
2353 pos = GET_MODE_BITSIZE (mode) - len - pos;
2354 #endif
2355
2356 if (src == mask)
2357 SUBST (SET_SRC (x),
2358 gen_binary (IOR, mode, dest, GEN_INT (src << pos)));
2359 else
2360 SUBST (SET_SRC (x),
2361 gen_binary (IOR, mode,
2362 gen_binary (AND, mode, dest,
2363 GEN_INT (~ (mask << pos)
2364 & GET_MODE_MASK (mode))),
2365 GEN_INT (src << pos)));
2366
2367 SUBST (SET_DEST (x), dest);
2368
2369 split = find_split_point (&SET_SRC (x), insn);
2370 if (split && split != &SET_SRC (x))
2371 return split;
2372 }
2373
2374 /* Otherwise, see if this is an operation that we can split into two.
2375 If so, try to split that. */
2376 code = GET_CODE (SET_SRC (x));
2377
2378 switch (code)
2379 {
2380 case AND:
2381 /* If we are AND'ing with a large constant that is only a single
2382 bit and the result is only being used in a context where we
2383 need to know if it is zero or non-zero, replace it with a bit
2384 extraction. This will avoid the large constant, which might
2385 have taken more than one insn to make. If the constant were
2386 not a valid argument to the AND but took only one insn to make,
2387 this is no worse, but if it took more than one insn, it will
2388 be better. */
2389
2390 if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
2391 && GET_CODE (XEXP (SET_SRC (x), 0)) == REG
2392 && (pos = exact_log2 (INTVAL (XEXP (SET_SRC (x), 1)))) >= 7
2393 && GET_CODE (SET_DEST (x)) == REG
2394 && (split = find_single_use (SET_DEST (x), insn, NULL_PTR)) != 0
2395 && (GET_CODE (*split) == EQ || GET_CODE (*split) == NE)
2396 && XEXP (*split, 0) == SET_DEST (x)
2397 && XEXP (*split, 1) == const0_rtx)
2398 {
2399 SUBST (SET_SRC (x),
2400 make_extraction (GET_MODE (SET_DEST (x)),
2401 XEXP (SET_SRC (x), 0),
2402 pos, NULL_RTX, 1, 1, 0, 0));
2403 return find_split_point (loc, insn);
2404 }
2405 break;
2406
2407 case SIGN_EXTEND:
2408 inner = XEXP (SET_SRC (x), 0);
2409 pos = 0;
2410 len = GET_MODE_BITSIZE (GET_MODE (inner));
2411 unsignedp = 0;
2412 break;
2413
2414 case SIGN_EXTRACT:
2415 case ZERO_EXTRACT:
2416 if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
2417 && GET_CODE (XEXP (SET_SRC (x), 2)) == CONST_INT)
2418 {
2419 inner = XEXP (SET_SRC (x), 0);
2420 len = INTVAL (XEXP (SET_SRC (x), 1));
2421 pos = INTVAL (XEXP (SET_SRC (x), 2));
2422
2423 #if BITS_BIG_ENDIAN
2424 pos = GET_MODE_BITSIZE (GET_MODE (inner)) - len - pos;
2425 #endif
2426 unsignedp = (code == ZERO_EXTRACT);
2427 }
2428 break;
2429 }
2430
2431 if (len && pos >= 0 && pos + len <= GET_MODE_BITSIZE (GET_MODE (inner)))
2432 {
2433 enum machine_mode mode = GET_MODE (SET_SRC (x));
2434
2435 /* For unsigned, we have a choice of a shift followed by an
2436 AND or two shifts. Use two shifts for field sizes where the
2437 constant might be too large. We assume here that we can
2438 always at least get 8-bit constants in an AND insn, which is
2439 true for every current RISC. */
2440
2441 if (unsignedp && len <= 8)
2442 {
2443 SUBST (SET_SRC (x),
2444 gen_rtx_combine
2445 (AND, mode,
2446 gen_rtx_combine (LSHIFTRT, mode,
2447 gen_lowpart_for_combine (mode, inner),
2448 GEN_INT (pos)),
2449 GEN_INT (((HOST_WIDE_INT) 1 << len) - 1)));
2450
2451 split = find_split_point (&SET_SRC (x), insn);
2452 if (split && split != &SET_SRC (x))
2453 return split;
2454 }
2455 else
2456 {
2457 SUBST (SET_SRC (x),
2458 gen_rtx_combine
2459 (unsignedp ? LSHIFTRT : ASHIFTRT, mode,
2460 gen_rtx_combine (ASHIFT, mode,
2461 gen_lowpart_for_combine (mode, inner),
2462 GEN_INT (GET_MODE_BITSIZE (mode)
2463 - len - pos)),
2464 GEN_INT (GET_MODE_BITSIZE (mode) - len)));
2465
2466 split = find_split_point (&SET_SRC (x), insn);
2467 if (split && split != &SET_SRC (x))
2468 return split;
2469 }
2470 }
2471
2472 /* See if this is a simple operation with a constant as the second
2473 operand. It might be that this constant is out of range and hence
2474 could be used as a split point. */
2475 if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
2476 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
2477 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<')
2478 && CONSTANT_P (XEXP (SET_SRC (x), 1))
2479 && (GET_RTX_CLASS (GET_CODE (XEXP (SET_SRC (x), 0))) == 'o'
2480 || (GET_CODE (XEXP (SET_SRC (x), 0)) == SUBREG
2481 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (SET_SRC (x), 0))))
2482 == 'o'))))
2483 return &XEXP (SET_SRC (x), 1);
2484
2485 /* Finally, see if this is a simple operation with its first operand
2486 not in a register. The operation might require this operand in a
2487 register, so return it as a split point. We can always do this
2488 because if the first operand were another operation, we would have
2489 already found it as a split point. */
2490 if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
2491 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
2492 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<'
2493 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '1')
2494 && ! register_operand (XEXP (SET_SRC (x), 0), VOIDmode))
2495 return &XEXP (SET_SRC (x), 0);
2496
2497 return 0;
2498
2499 case AND:
2500 case IOR:
2501 /* We write NOR as (and (not A) (not B)), but if we don't have a NOR,
2502 it is better to write this as (not (ior A B)) so we can split it.
2503 Similarly for IOR. */
2504 if (GET_CODE (XEXP (x, 0)) == NOT && GET_CODE (XEXP (x, 1)) == NOT)
2505 {
2506 SUBST (*loc,
2507 gen_rtx_combine (NOT, GET_MODE (x),
2508 gen_rtx_combine (code == IOR ? AND : IOR,
2509 GET_MODE (x),
2510 XEXP (XEXP (x, 0), 0),
2511 XEXP (XEXP (x, 1), 0))));
2512 return find_split_point (loc, insn);
2513 }
2514
2515 /* Many RISC machines have a large set of logical insns. If the
2516 second operand is a NOT, put it first so we will try to split the
2517 other operand first. */
2518 if (GET_CODE (XEXP (x, 1)) == NOT)
2519 {
2520 rtx tem = XEXP (x, 0);
2521 SUBST (XEXP (x, 0), XEXP (x, 1));
2522 SUBST (XEXP (x, 1), tem);
2523 }
2524 break;
2525 }
2526
2527 /* Otherwise, select our actions depending on our rtx class. */
2528 switch (GET_RTX_CLASS (code))
2529 {
2530 case 'b': /* This is ZERO_EXTRACT and SIGN_EXTRACT. */
2531 case '3':
2532 split = find_split_point (&XEXP (x, 2), insn);
2533 if (split)
2534 return split;
2535 /* ... fall through ... */
2536 case '2':
2537 case 'c':
2538 case '<':
2539 split = find_split_point (&XEXP (x, 1), insn);
2540 if (split)
2541 return split;
2542 /* ... fall through ... */
2543 case '1':
2544 /* Some machines have (and (shift ...) ...) insns. If X is not
2545 an AND, but XEXP (X, 0) is, use it as our split point. */
2546 if (GET_CODE (x) != AND && GET_CODE (XEXP (x, 0)) == AND)
2547 return &XEXP (x, 0);
2548
2549 split = find_split_point (&XEXP (x, 0), insn);
2550 if (split)
2551 return split;
2552 return loc;
2553 }
2554
2555 /* Otherwise, we don't have a split point. */
2556 return 0;
2557 }
2558 \f
2559 /* Throughout X, replace FROM with TO, and return the result.
2560 The result is TO if X is FROM;
2561 otherwise the result is X, but its contents may have been modified.
2562 If they were modified, a record was made in undobuf so that
2563 undo_all will (among other things) return X to its original state.
2564
2565 If the number of changes necessary is too much to record to undo,
2566 the excess changes are not made, so the result is invalid.
2567 The changes already made can still be undone.
2568 undobuf.num_undo is incremented for such changes, so by testing that
2569 the caller can tell whether the result is valid.
2570
2571 `n_occurrences' is incremented each time FROM is replaced.
2572
2573 IN_DEST is non-zero if we are processing the SET_DEST of a SET.
2574
2575 UNIQUE_COPY is non-zero if each substitution must be unique. We do this
2576 by copying if `n_occurrences' is non-zero. */
2577
2578 static rtx
2579 subst (x, from, to, in_dest, unique_copy)
2580 register rtx x, from, to;
2581 int in_dest;
2582 int unique_copy;
2583 {
2584 register char *fmt;
2585 register int len, i;
2586 register enum rtx_code code = GET_CODE (x), orig_code = code;
2587 rtx temp;
2588 enum machine_mode mode = GET_MODE (x);
2589 enum machine_mode op0_mode = VOIDmode;
2590 rtx other_insn;
2591 rtx *cc_use;
2592 int n_restarts = 0;
2593
2594 /* FAKE_EXTEND_SAFE_P (MODE, FROM) is 1 if (subreg:MODE FROM 0) is a safe
2595 replacement for (zero_extend:MODE FROM) or (sign_extend:MODE FROM).
2596 If it is 0, that cannot be done. We can now do this for any MEM
2597 because (SUBREG (MEM...)) is guaranteed to cause the MEM to be reloaded.
2598 If not for that, MEM's would very rarely be safe. */
2599
2600 /* Reject MODEs bigger than a word, because we might not be able
2601 to reference a two-register group starting with an arbitrary register
2602 (and currently gen_lowpart might crash for a SUBREG). */
2603
2604 #define FAKE_EXTEND_SAFE_P(MODE, FROM) \
2605 (GET_MODE_SIZE (MODE) <= UNITS_PER_WORD)
2606
2607 /* Two expressions are equal if they are identical copies of a shared
2608 RTX or if they are both registers with the same register number
2609 and mode. */
2610
2611 #define COMBINE_RTX_EQUAL_P(X,Y) \
2612 ((X) == (Y) \
2613 || (GET_CODE (X) == REG && GET_CODE (Y) == REG \
2614 && REGNO (X) == REGNO (Y) && GET_MODE (X) == GET_MODE (Y)))
2615
2616 if (! in_dest && COMBINE_RTX_EQUAL_P (x, from))
2617 {
2618 n_occurrences++;
2619 return (unique_copy && n_occurrences > 1 ? copy_rtx (to) : to);
2620 }
2621
2622 /* If X and FROM are the same register but different modes, they will
2623 not have been seen as equal above. However, flow.c will make a
2624 LOG_LINKS entry for that case. If we do nothing, we will try to
2625 rerecognize our original insn and, when it succeeds, we will
2626 delete the feeding insn, which is incorrect.
2627
2628 So force this insn not to match in this (rare) case. */
2629 if (! in_dest && code == REG && GET_CODE (from) == REG
2630 && REGNO (x) == REGNO (from))
2631 return gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
2632
2633 /* If this is an object, we are done unless it is a MEM or LO_SUM, both
2634 of which may contain things that can be combined. */
2635 if (code != MEM && code != LO_SUM && GET_RTX_CLASS (code) == 'o')
2636 return x;
2637
2638 /* It is possible to have a subexpression appear twice in the insn.
2639 Suppose that FROM is a register that appears within TO.
2640 Then, after that subexpression has been scanned once by `subst',
2641 the second time it is scanned, TO may be found. If we were
2642 to scan TO here, we would find FROM within it and create a
2643 self-referent rtl structure which is completely wrong. */
2644 if (COMBINE_RTX_EQUAL_P (x, to))
2645 return to;
2646
2647 len = GET_RTX_LENGTH (code);
2648 fmt = GET_RTX_FORMAT (code);
2649
2650 /* We don't need to process a SET_DEST that is a register, CC0, or PC, so
2651 set up to skip this common case. All other cases where we want to
2652 suppress replacing something inside a SET_SRC are handled via the
2653 IN_DEST operand. */
2654 if (code == SET
2655 && (GET_CODE (SET_DEST (x)) == REG
2656 || GET_CODE (SET_DEST (x)) == CC0
2657 || GET_CODE (SET_DEST (x)) == PC))
2658 fmt = "ie";
2659
2660 /* Get the mode of operand 0 in case X is now a SIGN_EXTEND of a constant. */
2661 if (fmt[0] == 'e')
2662 op0_mode = GET_MODE (XEXP (x, 0));
2663
2664 for (i = 0; i < len; i++)
2665 {
2666 if (fmt[i] == 'E')
2667 {
2668 register int j;
2669 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2670 {
2671 register rtx new;
2672 if (COMBINE_RTX_EQUAL_P (XVECEXP (x, i, j), from))
2673 {
2674 new = (unique_copy && n_occurrences ? copy_rtx (to) : to);
2675 n_occurrences++;
2676 }
2677 else
2678 {
2679 new = subst (XVECEXP (x, i, j), from, to, 0, unique_copy);
2680
2681 /* If this substitution failed, this whole thing fails. */
2682 if (GET_CODE (new) == CLOBBER && XEXP (new, 0) == const0_rtx)
2683 return new;
2684 }
2685
2686 SUBST (XVECEXP (x, i, j), new);
2687 }
2688 }
2689 else if (fmt[i] == 'e')
2690 {
2691 register rtx new;
2692
2693 if (COMBINE_RTX_EQUAL_P (XEXP (x, i), from))
2694 {
2695 new = (unique_copy && n_occurrences ? copy_rtx (to) : to);
2696 n_occurrences++;
2697 }
2698 else
2699 /* If we are in a SET_DEST, suppress most cases unless we
2700 have gone inside a MEM, in which case we want to
2701 simplify the address. We assume here that things that
2702 are actually part of the destination have their inner
2703 parts in the first expression. This is true for SUBREG,
2704 STRICT_LOW_PART, and ZERO_EXTRACT, which are the only
2705 things aside from REG and MEM that should appear in a
2706 SET_DEST. */
2707 new = subst (XEXP (x, i), from, to,
2708 (((in_dest
2709 && (code == SUBREG || code == STRICT_LOW_PART
2710 || code == ZERO_EXTRACT))
2711 || code == SET)
2712 && i == 0), unique_copy);
2713
2714 /* If we found that we will have to reject this combination,
2715 indicate that by returning the CLOBBER ourselves, rather than
2716 an expression containing it. This will speed things up as
2717 well as prevent accidents where two CLOBBERs are considered
2718 to be equal, thus producing an incorrect simplification. */
2719
2720 if (GET_CODE (new) == CLOBBER && XEXP (new, 0) == const0_rtx)
2721 return new;
2722
2723 SUBST (XEXP (x, i), new);
2724 }
2725 }
2726
2727 /* We come back to here if we have replaced the expression with one of
2728 a different code and it is likely that further simplification will be
2729 possible. */
2730
2731 restart:
2732
2733 /* If we have restarted more than 4 times, we are probably looping, so
2734 give up. */
2735 if (++n_restarts > 4)
2736 return x;
2737
2738 /* If we are restarting at all, it means that we no longer know the
2739 original mode of operand 0 (since we have probably changed the
2740 form of X). */
2741
2742 if (n_restarts > 1)
2743 op0_mode = VOIDmode;
2744
2745 code = GET_CODE (x);
2746
2747 /* If this is a commutative operation, put a constant last and a complex
2748 expression first. We don't need to do this for comparisons here. */
2749 if (GET_RTX_CLASS (code) == 'c'
2750 && ((CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
2751 || (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == 'o'
2752 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o')
2753 || (GET_CODE (XEXP (x, 0)) == SUBREG
2754 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0)))) == 'o'
2755 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o')))
2756 {
2757 temp = XEXP (x, 0);
2758 SUBST (XEXP (x, 0), XEXP (x, 1));
2759 SUBST (XEXP (x, 1), temp);
2760 }
2761
2762 /* If this is a PLUS, MINUS, or MULT, and the first operand is the
2763 sign extension of a PLUS with a constant, reverse the order of the sign
2764 extension and the addition. Note that this not the same as the original
2765 code, but overflow is undefined for signed values. Also note that the
2766 PLUS will have been partially moved "inside" the sign-extension, so that
2767 the first operand of X will really look like:
2768 (ashiftrt (plus (ashift A C4) C5) C4).
2769 We convert this to
2770 (plus (ashiftrt (ashift A C4) C2) C4)
2771 and replace the first operand of X with that expression. Later parts
2772 of this function may simplify the expression further.
2773
2774 For example, if we start with (mult (sign_extend (plus A C1)) C2),
2775 we swap the SIGN_EXTEND and PLUS. Later code will apply the
2776 distributive law to produce (plus (mult (sign_extend X) C1) C3).
2777
2778 We do this to simplify address expressions. */
2779
2780 if ((code == PLUS || code == MINUS || code == MULT)
2781 && GET_CODE (XEXP (x, 0)) == ASHIFTRT
2782 && GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS
2783 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ASHIFT
2784 && GET_CODE (XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1)) == CONST_INT
2785 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2786 && XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1) == XEXP (XEXP (x, 0), 1)
2787 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
2788 && (temp = simplify_binary_operation (ASHIFTRT, mode,
2789 XEXP (XEXP (XEXP (x, 0), 0), 1),
2790 XEXP (XEXP (x, 0), 1))) != 0)
2791 {
2792 rtx new
2793 = simplify_shift_const (NULL_RTX, ASHIFT, mode,
2794 XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 0),
2795 INTVAL (XEXP (XEXP (x, 0), 1)));
2796
2797 new = simplify_shift_const (NULL_RTX, ASHIFTRT, mode, new,
2798 INTVAL (XEXP (XEXP (x, 0), 1)));
2799
2800 SUBST (XEXP (x, 0), gen_binary (PLUS, mode, new, temp));
2801 }
2802
2803 /* If this is a simple operation applied to an IF_THEN_ELSE, try
2804 applying it to the arms of the IF_THEN_ELSE. This often simplifies
2805 things. Don't deal with operations that change modes here. */
2806
2807 if ((GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c')
2808 && GET_CODE (XEXP (x, 0)) == IF_THEN_ELSE)
2809 {
2810 /* Don't do this by using SUBST inside X since we might be messing
2811 up a shared expression. */
2812 rtx cond = XEXP (XEXP (x, 0), 0);
2813 rtx t_arm = subst (gen_binary (code, mode, XEXP (XEXP (x, 0), 1),
2814 XEXP (x, 1)),
2815 pc_rtx, pc_rtx, 0, 0);
2816 rtx f_arm = subst (gen_binary (code, mode, XEXP (XEXP (x, 0), 2),
2817 XEXP (x, 1)),
2818 pc_rtx, pc_rtx, 0, 0);
2819
2820
2821 x = gen_rtx (IF_THEN_ELSE, mode, cond, t_arm, f_arm);
2822 goto restart;
2823 }
2824
2825 else if (GET_RTX_CLASS (code) == '1'
2826 && GET_CODE (XEXP (x, 0)) == IF_THEN_ELSE
2827 && GET_MODE (XEXP (x, 0)) == mode)
2828 {
2829 rtx cond = XEXP (XEXP (x, 0), 0);
2830 rtx t_arm = subst (gen_unary (code, mode, XEXP (XEXP (x, 0), 1)),
2831 pc_rtx, pc_rtx, 0, 0);
2832 rtx f_arm = subst (gen_unary (code, mode, XEXP (XEXP (x, 0), 2)),
2833 pc_rtx, pc_rtx, 0, 0);
2834
2835 x = gen_rtx_combine (IF_THEN_ELSE, mode, cond, t_arm, f_arm);
2836 goto restart;
2837 }
2838
2839 /* Try to fold this expression in case we have constants that weren't
2840 present before. */
2841 temp = 0;
2842 switch (GET_RTX_CLASS (code))
2843 {
2844 case '1':
2845 temp = simplify_unary_operation (code, mode, XEXP (x, 0), op0_mode);
2846 break;
2847 case '<':
2848 temp = simplify_relational_operation (code, op0_mode,
2849 XEXP (x, 0), XEXP (x, 1));
2850 #ifdef FLOAT_STORE_FLAG_VALUE
2851 if (temp != 0 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2852 temp = ((temp == const0_rtx) ? CONST0_RTX (GET_MODE (x))
2853 : immed_real_const_1 (FLOAT_STORE_FLAG_VALUE, GET_MODE (x)));
2854 #endif
2855 break;
2856 case 'c':
2857 case '2':
2858 temp = simplify_binary_operation (code, mode, XEXP (x, 0), XEXP (x, 1));
2859 break;
2860 case 'b':
2861 case '3':
2862 temp = simplify_ternary_operation (code, mode, op0_mode, XEXP (x, 0),
2863 XEXP (x, 1), XEXP (x, 2));
2864 break;
2865 }
2866
2867 if (temp)
2868 x = temp, code = GET_CODE (temp);
2869
2870 /* First see if we can apply the inverse distributive law. */
2871 if (code == PLUS || code == MINUS || code == IOR || code == XOR)
2872 {
2873 x = apply_distributive_law (x);
2874 code = GET_CODE (x);
2875 }
2876
2877 /* If CODE is an associative operation not otherwise handled, see if we
2878 can associate some operands. This can win if they are constants or
2879 if they are logically related (i.e. (a & b) & a. */
2880 if ((code == PLUS || code == MINUS
2881 || code == MULT || code == AND || code == IOR || code == XOR
2882 || code == DIV || code == UDIV
2883 || code == SMAX || code == SMIN || code == UMAX || code == UMIN)
2884 && GET_MODE_CLASS (mode) == MODE_INT)
2885 {
2886 if (GET_CODE (XEXP (x, 0)) == code)
2887 {
2888 rtx other = XEXP (XEXP (x, 0), 0);
2889 rtx inner_op0 = XEXP (XEXP (x, 0), 1);
2890 rtx inner_op1 = XEXP (x, 1);
2891 rtx inner;
2892
2893 /* Make sure we pass the constant operand if any as the second
2894 one if this is a commutative operation. */
2895 if (CONSTANT_P (inner_op0) && GET_RTX_CLASS (code) == 'c')
2896 {
2897 rtx tem = inner_op0;
2898 inner_op0 = inner_op1;
2899 inner_op1 = tem;
2900 }
2901 inner = simplify_binary_operation (code == MINUS ? PLUS
2902 : code == DIV ? MULT
2903 : code == UDIV ? MULT
2904 : code,
2905 mode, inner_op0, inner_op1);
2906
2907 /* For commutative operations, try the other pair if that one
2908 didn't simplify. */
2909 if (inner == 0 && GET_RTX_CLASS (code) == 'c')
2910 {
2911 other = XEXP (XEXP (x, 0), 1);
2912 inner = simplify_binary_operation (code, mode,
2913 XEXP (XEXP (x, 0), 0),
2914 XEXP (x, 1));
2915 }
2916
2917 if (inner)
2918 {
2919 x = gen_binary (code, mode, other, inner);
2920 goto restart;
2921
2922 }
2923 }
2924 }
2925
2926 /* A little bit of algebraic simplification here. */
2927 switch (code)
2928 {
2929 case MEM:
2930 /* Ensure that our address has any ASHIFTs converted to MULT in case
2931 address-recognizing predicates are called later. */
2932 temp = make_compound_operation (XEXP (x, 0), MEM);
2933 SUBST (XEXP (x, 0), temp);
2934 break;
2935
2936 case SUBREG:
2937 /* (subreg:A (mem:B X) N) becomes a modified MEM unless the SUBREG
2938 is paradoxical. If we can't do that safely, then it becomes
2939 something nonsensical so that this combination won't take place. */
2940
2941 if (GET_CODE (SUBREG_REG (x)) == MEM
2942 && (GET_MODE_SIZE (mode)
2943 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
2944 {
2945 rtx inner = SUBREG_REG (x);
2946 int endian_offset = 0;
2947 /* Don't change the mode of the MEM
2948 if that would change the meaning of the address. */
2949 if (MEM_VOLATILE_P (SUBREG_REG (x))
2950 || mode_dependent_address_p (XEXP (inner, 0)))
2951 return gen_rtx (CLOBBER, mode, const0_rtx);
2952
2953 #if BYTES_BIG_ENDIAN
2954 if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
2955 endian_offset += UNITS_PER_WORD - GET_MODE_SIZE (mode);
2956 if (GET_MODE_SIZE (GET_MODE (inner)) < UNITS_PER_WORD)
2957 endian_offset -= UNITS_PER_WORD - GET_MODE_SIZE (GET_MODE (inner));
2958 #endif
2959 /* Note if the plus_constant doesn't make a valid address
2960 then this combination won't be accepted. */
2961 x = gen_rtx (MEM, mode,
2962 plus_constant (XEXP (inner, 0),
2963 (SUBREG_WORD (x) * UNITS_PER_WORD
2964 + endian_offset)));
2965 MEM_VOLATILE_P (x) = MEM_VOLATILE_P (inner);
2966 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (inner);
2967 MEM_IN_STRUCT_P (x) = MEM_IN_STRUCT_P (inner);
2968 return x;
2969 }
2970
2971 /* If we are in a SET_DEST, these other cases can't apply. */
2972 if (in_dest)
2973 return x;
2974
2975 /* Changing mode twice with SUBREG => just change it once,
2976 or not at all if changing back to starting mode. */
2977 if (GET_CODE (SUBREG_REG (x)) == SUBREG)
2978 {
2979 if (mode == GET_MODE (SUBREG_REG (SUBREG_REG (x)))
2980 && SUBREG_WORD (x) == 0 && SUBREG_WORD (SUBREG_REG (x)) == 0)
2981 return SUBREG_REG (SUBREG_REG (x));
2982
2983 SUBST_INT (SUBREG_WORD (x),
2984 SUBREG_WORD (x) + SUBREG_WORD (SUBREG_REG (x)));
2985 SUBST (SUBREG_REG (x), SUBREG_REG (SUBREG_REG (x)));
2986 }
2987
2988 /* SUBREG of a hard register => just change the register number
2989 and/or mode. If the hard register is not valid in that mode,
2990 suppress this combination. If the hard register is the stack,
2991 frame, or argument pointer, leave this as a SUBREG. */
2992
2993 if (GET_CODE (SUBREG_REG (x)) == REG
2994 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
2995 && REGNO (SUBREG_REG (x)) != FRAME_POINTER_REGNUM
2996 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2997 && REGNO (SUBREG_REG (x)) != ARG_POINTER_REGNUM
2998 #endif
2999 && REGNO (SUBREG_REG (x)) != STACK_POINTER_REGNUM)
3000 {
3001 if (HARD_REGNO_MODE_OK (REGNO (SUBREG_REG (x)) + SUBREG_WORD (x),
3002 mode))
3003 return gen_rtx (REG, mode,
3004 REGNO (SUBREG_REG (x)) + SUBREG_WORD (x));
3005 else
3006 return gen_rtx (CLOBBER, mode, const0_rtx);
3007 }
3008
3009 /* For a constant, try to pick up the part we want. Handle a full
3010 word and low-order part. Only do this if we are narrowing
3011 the constant; if it is being widened, we have no idea what
3012 the extra bits will have been set to. */
3013
3014 if (CONSTANT_P (SUBREG_REG (x)) && op0_mode != VOIDmode
3015 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
3016 && GET_MODE_SIZE (op0_mode) < UNITS_PER_WORD
3017 && GET_MODE_CLASS (mode) == MODE_INT)
3018 {
3019 temp = operand_subword (SUBREG_REG (x), SUBREG_WORD (x),
3020 0, op0_mode);
3021 if (temp)
3022 return temp;
3023 }
3024
3025 if (CONSTANT_P (SUBREG_REG (x)) && subreg_lowpart_p (x)
3026 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (op0_mode))
3027 return gen_lowpart_for_combine (mode, SUBREG_REG (x));
3028
3029 /* If we are narrowing the object, we need to see if we can simplify
3030 the expression for the object knowing that we only need the
3031 low-order bits. */
3032
3033 if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
3034 && subreg_lowpart_p (x))
3035 return force_to_mode (SUBREG_REG (x), mode, GET_MODE_BITSIZE (mode),
3036 NULL_RTX);
3037 break;
3038
3039 case NOT:
3040 /* (not (plus X -1)) can become (neg X). */
3041 if (GET_CODE (XEXP (x, 0)) == PLUS
3042 && XEXP (XEXP (x, 0), 1) == constm1_rtx)
3043 {
3044 x = gen_rtx_combine (NEG, mode, XEXP (XEXP (x, 0), 0));
3045 goto restart;
3046 }
3047
3048 /* Similarly, (not (neg X)) is (plus X -1). */
3049 if (GET_CODE (XEXP (x, 0)) == NEG)
3050 {
3051 x = gen_rtx_combine (PLUS, mode, XEXP (XEXP (x, 0), 0), constm1_rtx);
3052 goto restart;
3053 }
3054
3055 /* (not (xor X C)) for C constant is (xor X D) with D = ~ C. */
3056 if (GET_CODE (XEXP (x, 0)) == XOR
3057 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3058 && (temp = simplify_unary_operation (NOT, mode,
3059 XEXP (XEXP (x, 0), 1),
3060 mode)) != 0)
3061 {
3062 SUBST (XEXP (XEXP (x, 0), 1), temp);
3063 return XEXP (x, 0);
3064 }
3065
3066 /* (not (ashift 1 X)) is (rotate ~1 X). We used to do this for operands
3067 other than 1, but that is not valid. We could do a similar
3068 simplification for (not (lshiftrt C X)) where C is just the sign bit,
3069 but this doesn't seem common enough to bother with. */
3070 if (GET_CODE (XEXP (x, 0)) == ASHIFT
3071 && XEXP (XEXP (x, 0), 0) == const1_rtx)
3072 {
3073 x = gen_rtx (ROTATE, mode, gen_unary (NOT, mode, const1_rtx),
3074 XEXP (XEXP (x, 0), 1));
3075 goto restart;
3076 }
3077
3078 if (GET_CODE (XEXP (x, 0)) == SUBREG
3079 && subreg_lowpart_p (XEXP (x, 0))
3080 && (GET_MODE_SIZE (GET_MODE (XEXP (x, 0)))
3081 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (x, 0)))))
3082 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == ASHIFT
3083 && XEXP (SUBREG_REG (XEXP (x, 0)), 0) == const1_rtx)
3084 {
3085 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (XEXP (x, 0)));
3086
3087 x = gen_rtx (ROTATE, inner_mode,
3088 gen_unary (NOT, inner_mode, const1_rtx),
3089 XEXP (SUBREG_REG (XEXP (x, 0)), 1));
3090 x = gen_lowpart_for_combine (mode, x);
3091 goto restart;
3092 }
3093
3094 #if STORE_FLAG_VALUE == -1
3095 /* (not (comparison foo bar)) can be done by reversing the comparison
3096 code if valid. */
3097 if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3098 && reversible_comparison_p (XEXP (x, 0)))
3099 return gen_rtx_combine (reverse_condition (GET_CODE (XEXP (x, 0))),
3100 mode, XEXP (XEXP (x, 0), 0),
3101 XEXP (XEXP (x, 0), 1));
3102
3103 /* (ashiftrt foo C) where C is the number of bits in FOO minus 1
3104 is (lt foo (const_int 0)), so we can perform the above
3105 simplification. */
3106
3107 if (XEXP (x, 1) == const1_rtx
3108 && GET_CODE (XEXP (x, 0)) == ASHIFTRT
3109 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3110 && INTVAL (XEXP (XEXP (x, 0), 1)) == GET_MODE_BITSIZE (mode) - 1)
3111 return gen_rtx_combine (GE, mode, XEXP (XEXP (x, 0), 0), const0_rtx);
3112 #endif
3113
3114 /* Apply De Morgan's laws to reduce number of patterns for machines
3115 with negating logical insns (and-not, nand, etc.). If result has
3116 only one NOT, put it first, since that is how the patterns are
3117 coded. */
3118
3119 if (GET_CODE (XEXP (x, 0)) == IOR || GET_CODE (XEXP (x, 0)) == AND)
3120 {
3121 rtx in1 = XEXP (XEXP (x, 0), 0), in2 = XEXP (XEXP (x, 0), 1);
3122
3123 if (GET_CODE (in1) == NOT)
3124 in1 = XEXP (in1, 0);
3125 else
3126 in1 = gen_rtx_combine (NOT, GET_MODE (in1), in1);
3127
3128 if (GET_CODE (in2) == NOT)
3129 in2 = XEXP (in2, 0);
3130 else if (GET_CODE (in2) == CONST_INT
3131 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3132 in2 = GEN_INT (GET_MODE_MASK (mode) & ~ INTVAL (in2));
3133 else
3134 in2 = gen_rtx_combine (NOT, GET_MODE (in2), in2);
3135
3136 if (GET_CODE (in2) == NOT)
3137 {
3138 rtx tem = in2;
3139 in2 = in1; in1 = tem;
3140 }
3141
3142 x = gen_rtx_combine (GET_CODE (XEXP (x, 0)) == IOR ? AND : IOR,
3143 mode, in1, in2);
3144 goto restart;
3145 }
3146 break;
3147
3148 case NEG:
3149 /* (neg (plus X 1)) can become (not X). */
3150 if (GET_CODE (XEXP (x, 0)) == PLUS
3151 && XEXP (XEXP (x, 0), 1) == const1_rtx)
3152 {
3153 x = gen_rtx_combine (NOT, mode, XEXP (XEXP (x, 0), 0));
3154 goto restart;
3155 }
3156
3157 /* Similarly, (neg (not X)) is (plus X 1). */
3158 if (GET_CODE (XEXP (x, 0)) == NOT)
3159 {
3160 x = gen_rtx_combine (PLUS, mode, XEXP (XEXP (x, 0), 0), const1_rtx);
3161 goto restart;
3162 }
3163
3164 /* (neg (minus X Y)) can become (minus Y X). */
3165 if (GET_CODE (XEXP (x, 0)) == MINUS
3166 && (GET_MODE_CLASS (mode) != MODE_FLOAT
3167 /* x-y != -(y-x) with IEEE floating point. */
3168 || TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT))
3169 {
3170 x = gen_binary (MINUS, mode, XEXP (XEXP (x, 0), 1),
3171 XEXP (XEXP (x, 0), 0));
3172 goto restart;
3173 }
3174
3175 /* (neg (xor A 1)) is (plus A -1) if A is known to be either 0 or 1. */
3176 if (GET_CODE (XEXP (x, 0)) == XOR && XEXP (XEXP (x, 0), 1) == const1_rtx
3177 && nonzero_bits (XEXP (XEXP (x, 0), 0), mode) == 1)
3178 {
3179 x = gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0), constm1_rtx);
3180 goto restart;
3181 }
3182
3183 /* NEG commutes with ASHIFT since it is multiplication. Only do this
3184 if we can then eliminate the NEG (e.g.,
3185 if the operand is a constant). */
3186
3187 if (GET_CODE (XEXP (x, 0)) == ASHIFT)
3188 {
3189 temp = simplify_unary_operation (NEG, mode,
3190 XEXP (XEXP (x, 0), 0), mode);
3191 if (temp)
3192 {
3193 SUBST (XEXP (XEXP (x, 0), 0), temp);
3194 return XEXP (x, 0);
3195 }
3196 }
3197
3198 temp = expand_compound_operation (XEXP (x, 0));
3199
3200 /* For C equal to the width of MODE minus 1, (neg (ashiftrt X C)) can be
3201 replaced by (lshiftrt X C). This will convert
3202 (neg (sign_extract X 1 Y)) to (zero_extract X 1 Y). */
3203
3204 if (GET_CODE (temp) == ASHIFTRT
3205 && GET_CODE (XEXP (temp, 1)) == CONST_INT
3206 && INTVAL (XEXP (temp, 1)) == GET_MODE_BITSIZE (mode) - 1)
3207 {
3208 x = simplify_shift_const (temp, LSHIFTRT, mode, XEXP (temp, 0),
3209 INTVAL (XEXP (temp, 1)));
3210 goto restart;
3211 }
3212
3213 /* If X has only a single bit that might be nonzero, say, bit I, convert
3214 (neg X) to (ashiftrt (ashift X C-I) C-I) where C is the bitsize of
3215 MODE minus 1. This will convert (neg (zero_extract X 1 Y)) to
3216 (sign_extract X 1 Y). But only do this if TEMP isn't a register
3217 or a SUBREG of one since we'd be making the expression more
3218 complex if it was just a register. */
3219
3220 if (GET_CODE (temp) != REG
3221 && ! (GET_CODE (temp) == SUBREG
3222 && GET_CODE (SUBREG_REG (temp)) == REG)
3223 && (i = exact_log2 (nonzero_bits (temp, mode))) >= 0)
3224 {
3225 rtx temp1 = simplify_shift_const
3226 (NULL_RTX, ASHIFTRT, mode,
3227 simplify_shift_const (NULL_RTX, ASHIFT, mode, temp,
3228 GET_MODE_BITSIZE (mode) - 1 - i),
3229 GET_MODE_BITSIZE (mode) - 1 - i);
3230
3231 /* If all we did was surround TEMP with the two shifts, we
3232 haven't improved anything, so don't use it. Otherwise,
3233 we are better off with TEMP1. */
3234 if (GET_CODE (temp1) != ASHIFTRT
3235 || GET_CODE (XEXP (temp1, 0)) != ASHIFT
3236 || XEXP (XEXP (temp1, 0), 0) != temp)
3237 {
3238 x = temp1;
3239 goto restart;
3240 }
3241 }
3242 break;
3243
3244 case FLOAT_TRUNCATE:
3245 /* (float_truncate:SF (float_extend:DF foo:SF)) = foo:SF. */
3246 if (GET_CODE (XEXP (x, 0)) == FLOAT_EXTEND
3247 && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode)
3248 return XEXP (XEXP (x, 0), 0);
3249 break;
3250
3251 #ifdef HAVE_cc0
3252 case COMPARE:
3253 /* Convert (compare FOO (const_int 0)) to FOO unless we aren't
3254 using cc0, in which case we want to leave it as a COMPARE
3255 so we can distinguish it from a register-register-copy. */
3256 if (XEXP (x, 1) == const0_rtx)
3257 return XEXP (x, 0);
3258
3259 /* In IEEE floating point, x-0 is not the same as x. */
3260 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3261 || GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) == MODE_INT)
3262 && XEXP (x, 1) == CONST0_RTX (GET_MODE (XEXP (x, 0))))
3263 return XEXP (x, 0);
3264 break;
3265 #endif
3266
3267 case CONST:
3268 /* (const (const X)) can become (const X). Do it this way rather than
3269 returning the inner CONST since CONST can be shared with a
3270 REG_EQUAL note. */
3271 if (GET_CODE (XEXP (x, 0)) == CONST)
3272 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
3273 break;
3274
3275 #ifdef HAVE_lo_sum
3276 case LO_SUM:
3277 /* Convert (lo_sum (high FOO) FOO) to FOO. This is necessary so we
3278 can add in an offset. find_split_point will split this address up
3279 again if it doesn't match. */
3280 if (GET_CODE (XEXP (x, 0)) == HIGH
3281 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1)))
3282 return XEXP (x, 1);
3283 break;
3284 #endif
3285
3286 case PLUS:
3287 /* If we have (plus (plus (A const) B)), associate it so that CONST is
3288 outermost. That's because that's the way indexed addresses are
3289 supposed to appear. This code used to check many more cases, but
3290 they are now checked elsewhere. */
3291 if (GET_CODE (XEXP (x, 0)) == PLUS
3292 && CONSTANT_ADDRESS_P (XEXP (XEXP (x, 0), 1)))
3293 return gen_binary (PLUS, mode,
3294 gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0),
3295 XEXP (x, 1)),
3296 XEXP (XEXP (x, 0), 1));
3297
3298 /* (plus (xor (and <foo> (const_int pow2 - 1)) <c>) <-c>)
3299 when c is (const_int (pow2 + 1) / 2) is a sign extension of a
3300 bit-field and can be replaced by either a sign_extend or a
3301 sign_extract. The `and' may be a zero_extend. */
3302 if (GET_CODE (XEXP (x, 0)) == XOR
3303 && GET_CODE (XEXP (x, 1)) == CONST_INT
3304 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3305 && INTVAL (XEXP (x, 1)) == - INTVAL (XEXP (XEXP (x, 0), 1))
3306 && (i = exact_log2 (INTVAL (XEXP (XEXP (x, 0), 1)))) >= 0
3307 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3308 && ((GET_CODE (XEXP (XEXP (x, 0), 0)) == AND
3309 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
3310 && (INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))
3311 == ((HOST_WIDE_INT) 1 << (i + 1)) - 1))
3312 || (GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND
3313 && (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)))
3314 == i + 1))))
3315 {
3316 x = simplify_shift_const
3317 (NULL_RTX, ASHIFTRT, mode,
3318 simplify_shift_const (NULL_RTX, ASHIFT, mode,
3319 XEXP (XEXP (XEXP (x, 0), 0), 0),
3320 GET_MODE_BITSIZE (mode) - (i + 1)),
3321 GET_MODE_BITSIZE (mode) - (i + 1));
3322 goto restart;
3323 }
3324
3325 /* If only the low-order bit of X is possible nonzero, (plus x -1)
3326 can become (ashiftrt (ashift (xor x 1) C) C) where C is
3327 the bitsize of the mode - 1. This allows simplification of
3328 "a = (b & 8) == 0;" */
3329 if (XEXP (x, 1) == constm1_rtx
3330 && GET_CODE (XEXP (x, 0)) != REG
3331 && ! (GET_CODE (XEXP (x,0)) == SUBREG
3332 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG)
3333 && nonzero_bits (XEXP (x, 0), mode) == 1)
3334 {
3335 x = simplify_shift_const
3336 (NULL_RTX, ASHIFTRT, mode,
3337 simplify_shift_const (NULL_RTX, ASHIFT, mode,
3338 gen_rtx_combine (XOR, mode,
3339 XEXP (x, 0), const1_rtx),
3340 GET_MODE_BITSIZE (mode) - 1),
3341 GET_MODE_BITSIZE (mode) - 1);
3342 goto restart;
3343 }
3344
3345 /* If we are adding two things that have no bits in common, convert
3346 the addition into an IOR. This will often be further simplified,
3347 for example in cases like ((a & 1) + (a & 2)), which can
3348 become a & 3. */
3349
3350 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3351 && (nonzero_bits (XEXP (x, 0), mode)
3352 & nonzero_bits (XEXP (x, 1), mode)) == 0)
3353 {
3354 x = gen_binary (IOR, mode, XEXP (x, 0), XEXP (x, 1));
3355 goto restart;
3356 }
3357 break;
3358
3359 case MINUS:
3360 /* (minus <foo> (and <foo> (const_int -pow2))) becomes
3361 (and <foo> (const_int pow2-1)) */
3362 if (GET_CODE (XEXP (x, 1)) == AND
3363 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
3364 && exact_log2 (- INTVAL (XEXP (XEXP (x, 1), 1))) >= 0
3365 && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0)))
3366 {
3367 x = simplify_and_const_int (NULL_RTX, mode, XEXP (x, 0),
3368 - INTVAL (XEXP (XEXP (x, 1), 1)) - 1);
3369 goto restart;
3370 }
3371 break;
3372
3373 case MULT:
3374 /* If we have (mult (plus A B) C), apply the distributive law and then
3375 the inverse distributive law to see if things simplify. This
3376 occurs mostly in addresses, often when unrolling loops. */
3377
3378 if (GET_CODE (XEXP (x, 0)) == PLUS)
3379 {
3380 x = apply_distributive_law
3381 (gen_binary (PLUS, mode,
3382 gen_binary (MULT, mode,
3383 XEXP (XEXP (x, 0), 0), XEXP (x, 1)),
3384 gen_binary (MULT, mode,
3385 XEXP (XEXP (x, 0), 1), XEXP (x, 1))));
3386
3387 if (GET_CODE (x) != MULT)
3388 goto restart;
3389 }
3390
3391 /* If this is multiplication by a power of two and its first operand is
3392 a shift, treat the multiply as a shift to allow the shifts to
3393 possibly combine. */
3394 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3395 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0
3396 && (GET_CODE (XEXP (x, 0)) == ASHIFT
3397 || GET_CODE (XEXP (x, 0)) == LSHIFTRT
3398 || GET_CODE (XEXP (x, 0)) == ASHIFTRT
3399 || GET_CODE (XEXP (x, 0)) == ROTATE
3400 || GET_CODE (XEXP (x, 0)) == ROTATERT))
3401 {
3402 x = simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (x, 0), i);
3403 goto restart;
3404 }
3405
3406 /* Convert (mult (ashift (const_int 1) A) B) to (ashift B A). */
3407 if (GET_CODE (XEXP (x, 0)) == ASHIFT
3408 && XEXP (XEXP (x, 0), 0) == const1_rtx)
3409 return gen_rtx_combine (ASHIFT, mode, XEXP (x, 1),
3410 XEXP (XEXP (x, 0), 1));
3411 break;
3412
3413 case UDIV:
3414 /* If this is a divide by a power of two, treat it as a shift if
3415 its first operand is a shift. */
3416 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3417 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0
3418 && (GET_CODE (XEXP (x, 0)) == ASHIFT
3419 || GET_CODE (XEXP (x, 0)) == LSHIFTRT
3420 || GET_CODE (XEXP (x, 0)) == ASHIFTRT
3421 || GET_CODE (XEXP (x, 0)) == ROTATE
3422 || GET_CODE (XEXP (x, 0)) == ROTATERT))
3423 {
3424 x = simplify_shift_const (NULL_RTX, LSHIFTRT, mode, XEXP (x, 0), i);
3425 goto restart;
3426 }
3427 break;
3428
3429 case EQ: case NE:
3430 case GT: case GTU: case GE: case GEU:
3431 case LT: case LTU: case LE: case LEU:
3432 /* If the first operand is a condition code, we can't do anything
3433 with it. */
3434 if (GET_CODE (XEXP (x, 0)) == COMPARE
3435 || (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) != MODE_CC
3436 #ifdef HAVE_cc0
3437 && XEXP (x, 0) != cc0_rtx
3438 #endif
3439 ))
3440 {
3441 rtx op0 = XEXP (x, 0);
3442 rtx op1 = XEXP (x, 1);
3443 enum rtx_code new_code;
3444
3445 if (GET_CODE (op0) == COMPARE)
3446 op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
3447
3448 /* Simplify our comparison, if possible. */
3449 new_code = simplify_comparison (code, &op0, &op1);
3450
3451 #if STORE_FLAG_VALUE == 1
3452 /* If STORE_FLAG_VALUE is 1, we can convert (ne x 0) to simply X
3453 if only the low-order bit is possibly nonzero in X (such as when
3454 X is a ZERO_EXTRACT of one bit. Similarly, we can convert
3455 EQ to (xor X 1). Remove any ZERO_EXTRACT we made when thinking
3456 this was a comparison. It may now be simpler to use, e.g., an
3457 AND. If a ZERO_EXTRACT is indeed appropriate, it will
3458 be placed back by the call to make_compound_operation in the
3459 SET case. */
3460 if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
3461 && op1 == const0_rtx
3462 && nonzero_bits (op0, GET_MODE (op0)) == 1)
3463 return gen_lowpart_for_combine (mode,
3464 expand_compound_operation (op0));
3465 else if (new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
3466 && op1 == const0_rtx
3467 && nonzero_bits (op0, GET_MODE (op0)) == 1)
3468 {
3469 op0 = expand_compound_operation (op0);
3470
3471 x = gen_rtx_combine (XOR, mode,
3472 gen_lowpart_for_combine (mode, op0),
3473 const1_rtx);
3474 goto restart;
3475 }
3476 #endif
3477
3478 #if STORE_FLAG_VALUE == -1
3479 /* If STORE_FLAG_VALUE is -1, we can convert (ne x 0)
3480 to (neg x) if only the low-order bit of X can be nonzero.
3481 This converts (ne (zero_extract X 1 Y) 0) to
3482 (sign_extract X 1 Y). */
3483 if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
3484 && op1 == const0_rtx
3485 && nonzero_bits (op0, GET_MODE (op0)) == 1)
3486 {
3487 op0 = expand_compound_operation (op0);
3488 x = gen_rtx_combine (NEG, mode,
3489 gen_lowpart_for_combine (mode, op0));
3490 goto restart;
3491 }
3492 #endif
3493
3494 /* If STORE_FLAG_VALUE says to just test the sign bit and X has just
3495 one bit that might be nonzero, we can convert (ne x 0) to
3496 (ashift x c) where C puts the bit in the sign bit. Remove any
3497 AND with STORE_FLAG_VALUE when we are done, since we are only
3498 going to test the sign bit. */
3499 if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
3500 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3501 && (STORE_FLAG_VALUE
3502 == (HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
3503 && op1 == const0_rtx
3504 && mode == GET_MODE (op0)
3505 && (i = exact_log2 (nonzero_bits (op0, GET_MODE (op0)))) >= 0)
3506 {
3507 x = simplify_shift_const (NULL_RTX, ASHIFT, mode,
3508 expand_compound_operation (op0),
3509 GET_MODE_BITSIZE (mode) - 1 - i);
3510 if (GET_CODE (x) == AND && XEXP (x, 1) == const_true_rtx)
3511 return XEXP (x, 0);
3512 else
3513 return x;
3514 }
3515
3516 /* If the code changed, return a whole new comparison. */
3517 if (new_code != code)
3518 return gen_rtx_combine (new_code, mode, op0, op1);
3519
3520 /* Otherwise, keep this operation, but maybe change its operands.
3521 This also converts (ne (compare FOO BAR) 0) to (ne FOO BAR). */
3522 SUBST (XEXP (x, 0), op0);
3523 SUBST (XEXP (x, 1), op1);
3524 }
3525 break;
3526
3527 case IF_THEN_ELSE:
3528 /* Sometimes we can simplify the arm of an IF_THEN_ELSE if a register
3529 used in it is being compared against certain values. Get the
3530 true and false comparisons and see if that says anything about the
3531 value of each arm. */
3532
3533 if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3534 && reversible_comparison_p (XEXP (x, 0))
3535 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG)
3536 {
3537 HOST_WIDE_INT nzb;
3538 rtx from = XEXP (XEXP (x, 0), 0);
3539 enum rtx_code true_code = GET_CODE (XEXP (x, 0));
3540 enum rtx_code false_code = reverse_condition (true_code);
3541 rtx true_val = XEXP (XEXP (x, 0), 1);
3542 rtx false_val = true_val;
3543 rtx true_arm = XEXP (x, 1);
3544 rtx false_arm = XEXP (x, 2);
3545 int swapped = 0;
3546
3547 /* If FALSE_CODE is EQ, swap the codes and arms. */
3548
3549 if (false_code == EQ)
3550 {
3551 swapped = 1, true_code = EQ, false_code = NE;
3552 true_arm = XEXP (x, 2), false_arm = XEXP (x, 1);
3553 }
3554
3555 /* If we are comparing against zero and the expression being tested
3556 has only a single bit that might be nonzero, that is its value
3557 when it is not equal to zero. Similarly if it is known to be
3558 -1 or 0. */
3559
3560 if (true_code == EQ && true_val == const0_rtx
3561 && exact_log2 (nzb = nonzero_bits (from, GET_MODE (from))) >= 0)
3562 false_code = EQ, false_val = GEN_INT (nzb);
3563 else if (true_code == EQ && true_val == const0_rtx
3564 && (num_sign_bit_copies (from, GET_MODE (from))
3565 == GET_MODE_BITSIZE (GET_MODE (from))))
3566 false_code = EQ, false_val = constm1_rtx;
3567
3568 /* Now simplify an arm if we know the value of the register
3569 in the branch and it is used in the arm. Be carefull due to
3570 the potential of locally-shared RTL. */
3571
3572 if (reg_mentioned_p (from, true_arm))
3573 true_arm = subst (known_cond (copy_rtx (true_arm), true_code,
3574 from, true_val),
3575 pc_rtx, pc_rtx, 0, 0);
3576 if (reg_mentioned_p (from, false_arm))
3577 false_arm = subst (known_cond (copy_rtx (false_arm), false_code,
3578 from, false_val),
3579 pc_rtx, pc_rtx, 0, 0);
3580
3581 SUBST (XEXP (x, 1), swapped ? false_arm : true_arm);
3582 SUBST (XEXP (x, 2), swapped ? true_arm : false_arm);
3583 }
3584
3585 /* If we have (if_then_else FOO (pc) (label_ref BAR)) and FOO can be
3586 reversed, do so to avoid needing two sets of patterns for
3587 subtract-and-branch insns. Similarly if we have a constant in that
3588 position or if the third operand is the same as the first operand
3589 of the comparison. */
3590
3591 if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3592 && reversible_comparison_p (XEXP (x, 0))
3593 && (XEXP (x, 1) == pc_rtx || GET_CODE (XEXP (x, 1)) == CONST_INT
3594 || rtx_equal_p (XEXP (x, 2), XEXP (XEXP (x, 0), 0))))
3595 {
3596 SUBST (XEXP (x, 0),
3597 gen_binary (reverse_condition (GET_CODE (XEXP (x, 0))),
3598 GET_MODE (XEXP (x, 0)),
3599 XEXP (XEXP (x, 0), 0), XEXP (XEXP (x, 0), 1)));
3600
3601 temp = XEXP (x, 1);
3602 SUBST (XEXP (x, 1), XEXP (x, 2));
3603 SUBST (XEXP (x, 2), temp);
3604 }
3605
3606 /* If the two arms are identical, we don't need the comparison. */
3607
3608 if (rtx_equal_p (XEXP (x, 1), XEXP (x, 2))
3609 && ! side_effects_p (XEXP (x, 0)))
3610 return XEXP (x, 1);
3611
3612 /* Look for cases where we have (abs x) or (neg (abs X)). */
3613
3614 if (GET_MODE_CLASS (mode) == MODE_INT
3615 && GET_CODE (XEXP (x, 2)) == NEG
3616 && rtx_equal_p (XEXP (x, 1), XEXP (XEXP (x, 2), 0))
3617 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3618 && rtx_equal_p (XEXP (x, 1), XEXP (XEXP (x, 0), 0))
3619 && ! side_effects_p (XEXP (x, 1)))
3620 switch (GET_CODE (XEXP (x, 0)))
3621 {
3622 case GT:
3623 case GE:
3624 x = gen_unary (ABS, mode, XEXP (x, 1));
3625 goto restart;
3626 case LT:
3627 case LE:
3628 x = gen_unary (NEG, mode, gen_unary (ABS, mode, XEXP (x, 1)));
3629 goto restart;
3630 }
3631
3632 /* Look for MIN or MAX. */
3633
3634 if (GET_MODE_CLASS (mode) == MODE_INT
3635 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3636 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1))
3637 && rtx_equal_p (XEXP (XEXP (x, 0), 1), XEXP (x, 2))
3638 && ! side_effects_p (XEXP (x, 0)))
3639 switch (GET_CODE (XEXP (x, 0)))
3640 {
3641 case GE:
3642 case GT:
3643 x = gen_binary (SMAX, mode, XEXP (x, 1), XEXP (x, 2));
3644 goto restart;
3645 case LE:
3646 case LT:
3647 x = gen_binary (SMIN, mode, XEXP (x, 1), XEXP (x, 2));
3648 goto restart;
3649 case GEU:
3650 case GTU:
3651 x = gen_binary (UMAX, mode, XEXP (x, 1), XEXP (x, 2));
3652 goto restart;
3653 case LEU:
3654 case LTU:
3655 x = gen_binary (UMIN, mode, XEXP (x, 1), XEXP (x, 2));
3656 goto restart;
3657 }
3658
3659 /* If we have something like (if_then_else (ne A 0) (OP X C) X),
3660 A is known to be either 0 or 1, and OP is an identity when its
3661 second operand is zero, this can be done as (OP X (mult A C)).
3662 Similarly if A is known to be 0 or -1 and also similarly if we have
3663 a ZERO_EXTEND or SIGN_EXTEND as long as X is already extended (so
3664 we don't destroy it). */
3665
3666 if (mode != VOIDmode
3667 && (GET_CODE (XEXP (x, 0)) == EQ || GET_CODE (XEXP (x, 0)) == NE)
3668 && XEXP (XEXP (x, 0), 1) == const0_rtx
3669 && (nonzero_bits (XEXP (XEXP (x, 0), 0), mode) == 1
3670 || (num_sign_bit_copies (XEXP (XEXP (x, 0), 0), mode)
3671 == GET_MODE_BITSIZE (mode))))
3672 {
3673 rtx nz = make_compound_operation (GET_CODE (XEXP (x, 0)) == NE
3674 ? XEXP (x, 1) : XEXP (x, 2));
3675 rtx z = GET_CODE (XEXP (x, 0)) == NE ? XEXP (x, 2) : XEXP (x, 1);
3676 rtx dir = (nonzero_bits (XEXP (XEXP (x, 0), 0), mode) == 1
3677 ? const1_rtx : constm1_rtx);
3678 rtx c = 0;
3679 enum machine_mode m = mode;
3680 enum rtx_code op, extend_op = 0;
3681
3682 if ((GET_CODE (nz) == PLUS || GET_CODE (nz) == MINUS
3683 || GET_CODE (nz) == IOR || GET_CODE (nz) == XOR
3684 || GET_CODE (nz) == ASHIFT
3685 || GET_CODE (nz) == LSHIFTRT || GET_CODE (nz) == ASHIFTRT)
3686 && rtx_equal_p (XEXP (nz, 0), z))
3687 c = XEXP (nz, 1), op = GET_CODE (nz);
3688 else if (GET_CODE (nz) == SIGN_EXTEND
3689 && (GET_CODE (XEXP (nz, 0)) == PLUS
3690 || GET_CODE (XEXP (nz, 0)) == MINUS
3691 || GET_CODE (XEXP (nz, 0)) == IOR
3692 || GET_CODE (XEXP (nz, 0)) == XOR
3693 || GET_CODE (XEXP (nz, 0)) == ASHIFT
3694 || GET_CODE (XEXP (nz, 0)) == LSHIFTRT
3695 || GET_CODE (XEXP (nz, 0)) == ASHIFTRT)
3696 && GET_CODE (XEXP (XEXP (nz, 0), 0)) == SUBREG
3697 && subreg_lowpart_p (XEXP (XEXP (nz, 0), 0))
3698 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (nz, 0), 0)), z)
3699 && (num_sign_bit_copies (z, GET_MODE (z))
3700 >= (GET_MODE_BITSIZE (mode)
3701 - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (nz, 0), 0))))))
3702 {
3703 c = XEXP (XEXP (nz, 0), 1);
3704 op = GET_CODE (XEXP (nz, 0));
3705 extend_op = SIGN_EXTEND;
3706 m = GET_MODE (XEXP (nz, 0));
3707 }
3708 else if (GET_CODE (nz) == ZERO_EXTEND
3709 && (GET_CODE (XEXP (nz, 0)) == PLUS
3710 || GET_CODE (XEXP (nz, 0)) == MINUS
3711 || GET_CODE (XEXP (nz, 0)) == IOR
3712 || GET_CODE (XEXP (nz, 0)) == XOR
3713 || GET_CODE (XEXP (nz, 0)) == ASHIFT
3714 || GET_CODE (XEXP (nz, 0)) == LSHIFTRT
3715 || GET_CODE (XEXP (nz, 0)) == ASHIFTRT)
3716 && GET_CODE (XEXP (XEXP (nz, 0), 0)) == SUBREG
3717 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3718 && subreg_lowpart_p (XEXP (XEXP (nz, 0), 0))
3719 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (nz, 0), 0)), z)
3720 && ((nonzero_bits (z, GET_MODE (z))
3721 & ~ GET_MODE_MASK (GET_MODE (XEXP (XEXP (nz, 0), 0))))
3722 == 0))
3723 {
3724 c = XEXP (XEXP (nz, 0), 1);
3725 op = GET_CODE (XEXP (nz, 0));
3726 extend_op = ZERO_EXTEND;
3727 m = GET_MODE (XEXP (nz, 0));
3728 }
3729
3730 if (c && ! side_effects_p (c) && ! side_effects_p (z))
3731 {
3732 temp
3733 = gen_binary (MULT, m,
3734 gen_lowpart_for_combine (m,
3735 XEXP (XEXP (x, 0), 0)),
3736 gen_binary (MULT, m, c, dir));
3737
3738 temp = gen_binary (op, m, gen_lowpart_for_combine (m, z), temp);
3739
3740 if (extend_op != 0)
3741 temp = gen_unary (extend_op, mode, temp);
3742
3743 return temp;
3744 }
3745 }
3746 break;
3747
3748 case ZERO_EXTRACT:
3749 case SIGN_EXTRACT:
3750 case ZERO_EXTEND:
3751 case SIGN_EXTEND:
3752 /* If we are processing SET_DEST, we are done. */
3753 if (in_dest)
3754 return x;
3755
3756 x = expand_compound_operation (x);
3757 if (GET_CODE (x) != code)
3758 goto restart;
3759 break;
3760
3761 case SET:
3762 /* (set (pc) (return)) gets written as (return). */
3763 if (GET_CODE (SET_DEST (x)) == PC && GET_CODE (SET_SRC (x)) == RETURN)
3764 return SET_SRC (x);
3765
3766 /* Convert this into a field assignment operation, if possible. */
3767 x = make_field_assignment (x);
3768
3769 /* If we are setting CC0 or if the source is a COMPARE, look for the
3770 use of the comparison result and try to simplify it unless we already
3771 have used undobuf.other_insn. */
3772 if ((GET_CODE (SET_SRC (x)) == COMPARE
3773 #ifdef HAVE_cc0
3774 || SET_DEST (x) == cc0_rtx
3775 #endif
3776 )
3777 && (cc_use = find_single_use (SET_DEST (x), subst_insn,
3778 &other_insn)) != 0
3779 && (undobuf.other_insn == 0 || other_insn == undobuf.other_insn)
3780 && GET_RTX_CLASS (GET_CODE (*cc_use)) == '<'
3781 && XEXP (*cc_use, 0) == SET_DEST (x))
3782 {
3783 enum rtx_code old_code = GET_CODE (*cc_use);
3784 enum rtx_code new_code;
3785 rtx op0, op1;
3786 int other_changed = 0;
3787 enum machine_mode compare_mode = GET_MODE (SET_DEST (x));
3788
3789 if (GET_CODE (SET_SRC (x)) == COMPARE)
3790 op0 = XEXP (SET_SRC (x), 0), op1 = XEXP (SET_SRC (x), 1);
3791 else
3792 op0 = SET_SRC (x), op1 = const0_rtx;
3793
3794 /* Simplify our comparison, if possible. */
3795 new_code = simplify_comparison (old_code, &op0, &op1);
3796
3797 #if !defined (HAVE_cc0) && defined (EXTRA_CC_MODES)
3798 /* If this machine has CC modes other than CCmode, check to see
3799 if we need to use a different CC mode here. */
3800 compare_mode = SELECT_CC_MODE (new_code, op0, op1);
3801
3802 /* If the mode changed, we have to change SET_DEST, the mode
3803 in the compare, and the mode in the place SET_DEST is used.
3804 If SET_DEST is a hard register, just build new versions with
3805 the proper mode. If it is a pseudo, we lose unless it is only
3806 time we set the pseudo, in which case we can safely change
3807 its mode. */
3808 if (compare_mode != GET_MODE (SET_DEST (x)))
3809 {
3810 int regno = REGNO (SET_DEST (x));
3811 rtx new_dest = gen_rtx (REG, compare_mode, regno);
3812
3813 if (regno < FIRST_PSEUDO_REGISTER
3814 || (reg_n_sets[regno] == 1
3815 && ! REG_USERVAR_P (SET_DEST (x))))
3816 {
3817 if (regno >= FIRST_PSEUDO_REGISTER)
3818 SUBST (regno_reg_rtx[regno], new_dest);
3819
3820 SUBST (SET_DEST (x), new_dest);
3821 SUBST (XEXP (*cc_use, 0), new_dest);
3822 other_changed = 1;
3823 }
3824 }
3825 #endif
3826
3827 /* If the code changed, we have to build a new comparison
3828 in undobuf.other_insn. */
3829 if (new_code != old_code)
3830 {
3831 unsigned HOST_WIDE_INT mask;
3832
3833 SUBST (*cc_use, gen_rtx_combine (new_code, GET_MODE (*cc_use),
3834 SET_DEST (x), const0_rtx));
3835
3836 /* If the only change we made was to change an EQ into an
3837 NE or vice versa, OP0 has only one bit that might be nonzero,
3838 and OP1 is zero, check if changing the user of the condition
3839 code will produce a valid insn. If it won't, we can keep
3840 the original code in that insn by surrounding our operation
3841 with an XOR. */
3842
3843 if (((old_code == NE && new_code == EQ)
3844 || (old_code == EQ && new_code == NE))
3845 && ! other_changed && op1 == const0_rtx
3846 && (GET_MODE_BITSIZE (GET_MODE (op0))
3847 <= HOST_BITS_PER_WIDE_INT)
3848 && (exact_log2 (mask = nonzero_bits (op0, GET_MODE (op0)))
3849 >= 0))
3850 {
3851 rtx pat = PATTERN (other_insn), note = 0;
3852
3853 if ((recog_for_combine (&pat, other_insn, &note) < 0
3854 && ! check_asm_operands (pat)))
3855 {
3856 PUT_CODE (*cc_use, old_code);
3857 other_insn = 0;
3858
3859 op0 = gen_binary (XOR, GET_MODE (op0), op0,
3860 GEN_INT (mask));
3861 }
3862 }
3863
3864 other_changed = 1;
3865 }
3866
3867 if (other_changed)
3868 undobuf.other_insn = other_insn;
3869
3870 #ifdef HAVE_cc0
3871 /* If we are now comparing against zero, change our source if
3872 needed. If we do not use cc0, we always have a COMPARE. */
3873 if (op1 == const0_rtx && SET_DEST (x) == cc0_rtx)
3874 SUBST (SET_SRC (x), op0);
3875 else
3876 #endif
3877
3878 /* Otherwise, if we didn't previously have a COMPARE in the
3879 correct mode, we need one. */
3880 if (GET_CODE (SET_SRC (x)) != COMPARE
3881 || GET_MODE (SET_SRC (x)) != compare_mode)
3882 SUBST (SET_SRC (x), gen_rtx_combine (COMPARE, compare_mode,
3883 op0, op1));
3884 else
3885 {
3886 /* Otherwise, update the COMPARE if needed. */
3887 SUBST (XEXP (SET_SRC (x), 0), op0);
3888 SUBST (XEXP (SET_SRC (x), 1), op1);
3889 }
3890 }
3891 else
3892 {
3893 /* Get SET_SRC in a form where we have placed back any
3894 compound expressions. Then do the checks below. */
3895 temp = make_compound_operation (SET_SRC (x), SET);
3896 SUBST (SET_SRC (x), temp);
3897 }
3898
3899 /* If we have (set x (subreg:m1 (op:m2 ...) 0)) with OP being some
3900 operation, and X being a REG or (subreg (reg)), we may be able to
3901 convert this to (set (subreg:m2 x) (op)).
3902
3903 We can always do this if M1 is narrower than M2 because that
3904 means that we only care about the low bits of the result.
3905
3906 However, on most machines (those with neither BYTE_LOADS_ZERO_EXTEND
3907 nor BYTES_LOADS_SIGN_EXTEND defined), we cannot perform a
3908 narrower operation that requested since the high-order bits will
3909 be undefined. On machine where BYTE_LOADS_*_EXTEND is defined,
3910 however, this transformation is safe as long as M1 and M2 have
3911 the same number of words. */
3912
3913 if (GET_CODE (SET_SRC (x)) == SUBREG
3914 && subreg_lowpart_p (SET_SRC (x))
3915 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (SET_SRC (x)))) != 'o'
3916 && (((GET_MODE_SIZE (GET_MODE (SET_SRC (x))) + (UNITS_PER_WORD - 1))
3917 / UNITS_PER_WORD)
3918 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_SRC (x))))
3919 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
3920 #ifndef BYTE_LOADS_EXTEND
3921 && (GET_MODE_SIZE (GET_MODE (SET_SRC (x)))
3922 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_SRC (x)))))
3923 #endif
3924 && (GET_CODE (SET_DEST (x)) == REG
3925 || (GET_CODE (SET_DEST (x)) == SUBREG
3926 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG)))
3927 {
3928 SUBST (SET_DEST (x),
3929 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (SET_SRC (x))),
3930 SET_DEST (x)));
3931 SUBST (SET_SRC (x), SUBREG_REG (SET_SRC (x)));
3932 }
3933
3934 #ifdef BYTE_LOADS_EXTEND
3935 /* If we have (set FOO (subreg:M (mem:N BAR) 0)) with
3936 M wider than N, this would require a paradoxical subreg.
3937 Replace the subreg with a zero_extend to avoid the reload that
3938 would otherwise be required. */
3939
3940 if (GET_CODE (SET_SRC (x)) == SUBREG
3941 && subreg_lowpart_p (SET_SRC (x))
3942 && SUBREG_WORD (SET_SRC (x)) == 0
3943 && (GET_MODE_SIZE (GET_MODE (SET_SRC (x)))
3944 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_SRC (x)))))
3945 && GET_CODE (SUBREG_REG (SET_SRC (x))) == MEM)
3946 SUBST (SET_SRC (x), gen_rtx_combine (LOAD_EXTEND,
3947 GET_MODE (SET_SRC (x)),
3948 XEXP (SET_SRC (x), 0)));
3949 #endif
3950
3951 #ifndef HAVE_conditional_move
3952
3953 /* If we don't have a conditional move, SET_SRC is an IF_THEN_ELSE,
3954 and we are comparing an item known to be 0 or -1 against 0, use a
3955 logical operation instead. Check for one of the arms being an IOR
3956 of the other arm with some value. We compute three terms to be
3957 IOR'ed together. In practice, at most two will be nonzero. Then
3958 we do the IOR's. */
3959
3960 if (GET_CODE (SET_DEST (x)) != PC
3961 && GET_CODE (SET_SRC (x)) == IF_THEN_ELSE
3962 && (GET_CODE (XEXP (SET_SRC (x), 0)) == EQ
3963 || GET_CODE (XEXP (SET_SRC (x), 0)) == NE)
3964 && XEXP (XEXP (SET_SRC (x), 0), 1) == const0_rtx
3965 && (num_sign_bit_copies (XEXP (XEXP (SET_SRC (x), 0), 0),
3966 GET_MODE (XEXP (XEXP (SET_SRC (x), 0), 0)))
3967 == GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (SET_SRC (x), 0), 0))))
3968 && ! side_effects_p (SET_SRC (x)))
3969 {
3970 rtx true = (GET_CODE (XEXP (SET_SRC (x), 0)) == NE
3971 ? XEXP (SET_SRC (x), 1) : XEXP (SET_SRC (x), 2));
3972 rtx false = (GET_CODE (XEXP (SET_SRC (x), 0)) == NE
3973 ? XEXP (SET_SRC (x), 2) : XEXP (SET_SRC (x), 1));
3974 rtx term1 = const0_rtx, term2, term3;
3975
3976 if (GET_CODE (true) == IOR && rtx_equal_p (XEXP (true, 0), false))
3977 term1 = false, true = XEXP (true, 1), false = const0_rtx;
3978 else if (GET_CODE (true) == IOR
3979 && rtx_equal_p (XEXP (true, 1), false))
3980 term1 = false, true = XEXP (true, 0), false = const0_rtx;
3981 else if (GET_CODE (false) == IOR
3982 && rtx_equal_p (XEXP (false, 0), true))
3983 term1 = true, false = XEXP (false, 1), true = const0_rtx;
3984 else if (GET_CODE (false) == IOR
3985 && rtx_equal_p (XEXP (false, 1), true))
3986 term1 = true, false = XEXP (false, 0), true = const0_rtx;
3987
3988 term2 = gen_binary (AND, GET_MODE (SET_SRC (x)),
3989 XEXP (XEXP (SET_SRC (x), 0), 0), true);
3990 term3 = gen_binary (AND, GET_MODE (SET_SRC (x)),
3991 gen_unary (NOT, GET_MODE (SET_SRC (x)),
3992 XEXP (XEXP (SET_SRC (x), 0), 0)),
3993 false);
3994
3995 SUBST (SET_SRC (x),
3996 gen_binary (IOR, GET_MODE (SET_SRC (x)),
3997 gen_binary (IOR, GET_MODE (SET_SRC (x)),
3998 term1, term2),
3999 term3));
4000 }
4001 #endif
4002 break;
4003
4004 case AND:
4005 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
4006 {
4007 x = simplify_and_const_int (x, mode, XEXP (x, 0),
4008 INTVAL (XEXP (x, 1)));
4009
4010 /* If we have (ior (and (X C1) C2)) and the next restart would be
4011 the last, simplify this by making C1 as small as possible
4012 and then exit. */
4013 if (n_restarts >= 3 && GET_CODE (x) == IOR
4014 && GET_CODE (XEXP (x, 0)) == AND
4015 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4016 && GET_CODE (XEXP (x, 1)) == CONST_INT)
4017 {
4018 temp = gen_binary (AND, mode, XEXP (XEXP (x, 0), 0),
4019 GEN_INT (INTVAL (XEXP (XEXP (x, 0), 1))
4020 & ~ INTVAL (XEXP (x, 1))));
4021 return gen_binary (IOR, mode, temp, XEXP (x, 1));
4022 }
4023
4024 if (GET_CODE (x) != AND)
4025 goto restart;
4026 }
4027
4028 /* Convert (A | B) & A to A. */
4029 if (GET_CODE (XEXP (x, 0)) == IOR
4030 && (rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1))
4031 || rtx_equal_p (XEXP (XEXP (x, 0), 1), XEXP (x, 1)))
4032 && ! side_effects_p (XEXP (XEXP (x, 0), 0))
4033 && ! side_effects_p (XEXP (XEXP (x, 0), 1)))
4034 return XEXP (x, 1);
4035
4036 /* Convert (A ^ B) & A to A & (~ B) since the latter is often a single
4037 insn (and may simplify more). */
4038 else if (GET_CODE (XEXP (x, 0)) == XOR
4039 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1))
4040 && ! side_effects_p (XEXP (x, 1)))
4041 {
4042 x = gen_binary (AND, mode,
4043 gen_unary (NOT, mode, XEXP (XEXP (x, 0), 1)),
4044 XEXP (x, 1));
4045 goto restart;
4046 }
4047 else if (GET_CODE (XEXP (x, 0)) == XOR
4048 && rtx_equal_p (XEXP (XEXP (x, 0), 1), XEXP (x, 1))
4049 && ! side_effects_p (XEXP (x, 1)))
4050 {
4051 x = gen_binary (AND, mode,
4052 gen_unary (NOT, mode, XEXP (XEXP (x, 0), 0)),
4053 XEXP (x, 1));
4054 goto restart;
4055 }
4056
4057 /* Similarly for (~ (A ^ B)) & A. */
4058 else if (GET_CODE (XEXP (x, 0)) == NOT
4059 && GET_CODE (XEXP (XEXP (x, 0), 0)) == XOR
4060 && rtx_equal_p (XEXP (XEXP (XEXP (x, 0), 0), 0), XEXP (x, 1))
4061 && ! side_effects_p (XEXP (x, 1)))
4062 {
4063 x = gen_binary (AND, mode, XEXP (XEXP (XEXP (x, 0), 0), 1),
4064 XEXP (x, 1));
4065 goto restart;
4066 }
4067 else if (GET_CODE (XEXP (x, 0)) == NOT
4068 && GET_CODE (XEXP (XEXP (x, 0), 0)) == XOR
4069 && rtx_equal_p (XEXP (XEXP (XEXP (x, 0), 0), 1), XEXP (x, 1))
4070 && ! side_effects_p (XEXP (x, 1)))
4071 {
4072 x = gen_binary (AND, mode, XEXP (XEXP (XEXP (x, 0), 0), 0),
4073 XEXP (x, 1));
4074 goto restart;
4075 }
4076
4077 /* If we have (and A B) with A not an object but that is known to
4078 be -1 or 0, this is equivalent to the expression
4079 (if_then_else (ne A (const_int 0)) B (const_int 0))
4080 We make this conversion because it may allow further
4081 simplifications and then allow use of conditional move insns.
4082 If the machine doesn't have condition moves, code in case SET
4083 will convert the IF_THEN_ELSE back to the logical operation.
4084 We build the IF_THEN_ELSE here in case further simplification
4085 is possible (e.g., we can convert it to ABS). */
4086
4087 if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) != 'o'
4088 && ! (GET_CODE (XEXP (x, 0)) == SUBREG
4089 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0)))) == 'o')
4090 && (num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
4091 == GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))))
4092 {
4093 rtx op0 = XEXP (x, 0);
4094 rtx op1 = const0_rtx;
4095 enum rtx_code comp_code
4096 = simplify_comparison (NE, &op0, &op1);
4097
4098 x = gen_rtx_combine (IF_THEN_ELSE, mode,
4099 gen_binary (comp_code, VOIDmode, op0, op1),
4100 XEXP (x, 1), const0_rtx);
4101 goto restart;
4102 }
4103
4104 /* In the following group of tests (and those in case IOR below),
4105 we start with some combination of logical operations and apply
4106 the distributive law followed by the inverse distributive law.
4107 Most of the time, this results in no change. However, if some of
4108 the operands are the same or inverses of each other, simplifications
4109 will result.
4110
4111 For example, (and (ior A B) (not B)) can occur as the result of
4112 expanding a bit field assignment. When we apply the distributive
4113 law to this, we get (ior (and (A (not B))) (and (B (not B)))),
4114 which then simplifies to (and (A (not B))). */
4115
4116 /* If we have (and (ior A B) C), apply the distributive law and then
4117 the inverse distributive law to see if things simplify. */
4118
4119 if (GET_CODE (XEXP (x, 0)) == IOR || GET_CODE (XEXP (x, 0)) == XOR)
4120 {
4121 x = apply_distributive_law
4122 (gen_binary (GET_CODE (XEXP (x, 0)), mode,
4123 gen_binary (AND, mode,
4124 XEXP (XEXP (x, 0), 0), XEXP (x, 1)),
4125 gen_binary (AND, mode,
4126 XEXP (XEXP (x, 0), 1), XEXP (x, 1))));
4127 if (GET_CODE (x) != AND)
4128 goto restart;
4129 }
4130
4131 if (GET_CODE (XEXP (x, 1)) == IOR || GET_CODE (XEXP (x, 1)) == XOR)
4132 {
4133 x = apply_distributive_law
4134 (gen_binary (GET_CODE (XEXP (x, 1)), mode,
4135 gen_binary (AND, mode,
4136 XEXP (XEXP (x, 1), 0), XEXP (x, 0)),
4137 gen_binary (AND, mode,
4138 XEXP (XEXP (x, 1), 1), XEXP (x, 0))));
4139 if (GET_CODE (x) != AND)
4140 goto restart;
4141 }
4142
4143 /* Similarly, taking advantage of the fact that
4144 (and (not A) (xor B C)) == (xor (ior A B) (ior A C)) */
4145
4146 if (GET_CODE (XEXP (x, 0)) == NOT && GET_CODE (XEXP (x, 1)) == XOR)
4147 {
4148 x = apply_distributive_law
4149 (gen_binary (XOR, mode,
4150 gen_binary (IOR, mode, XEXP (XEXP (x, 0), 0),
4151 XEXP (XEXP (x, 1), 0)),
4152 gen_binary (IOR, mode, XEXP (XEXP (x, 0), 0),
4153 XEXP (XEXP (x, 1), 1))));
4154 if (GET_CODE (x) != AND)
4155 goto restart;
4156 }
4157
4158 else if (GET_CODE (XEXP (x, 1)) == NOT && GET_CODE (XEXP (x, 0)) == XOR)
4159 {
4160 x = apply_distributive_law
4161 (gen_binary (XOR, mode,
4162 gen_binary (IOR, mode, XEXP (XEXP (x, 1), 0),
4163 XEXP (XEXP (x, 0), 0)),
4164 gen_binary (IOR, mode, XEXP (XEXP (x, 1), 0),
4165 XEXP (XEXP (x, 0), 1))));
4166 if (GET_CODE (x) != AND)
4167 goto restart;
4168 }
4169 break;
4170
4171 case IOR:
4172 /* (ior A C) is C if all bits of A that might be nonzero are on in C. */
4173 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4174 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4175 && (nonzero_bits (XEXP (x, 0), mode) & ~ INTVAL (XEXP (x, 1))) == 0)
4176 return XEXP (x, 1);
4177
4178 /* Convert (A & B) | A to A. */
4179 if (GET_CODE (XEXP (x, 0)) == AND
4180 && (rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1))
4181 || rtx_equal_p (XEXP (XEXP (x, 0), 1), XEXP (x, 1)))
4182 && ! side_effects_p (XEXP (XEXP (x, 0), 0))
4183 && ! side_effects_p (XEXP (XEXP (x, 0), 1)))
4184 return XEXP (x, 1);
4185
4186 /* If we have (ior (and A B) C), apply the distributive law and then
4187 the inverse distributive law to see if things simplify. */
4188
4189 if (GET_CODE (XEXP (x, 0)) == AND)
4190 {
4191 x = apply_distributive_law
4192 (gen_binary (AND, mode,
4193 gen_binary (IOR, mode,
4194 XEXP (XEXP (x, 0), 0), XEXP (x, 1)),
4195 gen_binary (IOR, mode,
4196 XEXP (XEXP (x, 0), 1), XEXP (x, 1))));
4197
4198 if (GET_CODE (x) != IOR)
4199 goto restart;
4200 }
4201
4202 if (GET_CODE (XEXP (x, 1)) == AND)
4203 {
4204 x = apply_distributive_law
4205 (gen_binary (AND, mode,
4206 gen_binary (IOR, mode,
4207 XEXP (XEXP (x, 1), 0), XEXP (x, 0)),
4208 gen_binary (IOR, mode,
4209 XEXP (XEXP (x, 1), 1), XEXP (x, 0))));
4210
4211 if (GET_CODE (x) != IOR)
4212 goto restart;
4213 }
4214
4215 /* Convert (ior (ashift A CX) (lshiftrt A CY)) where CX+CY equals the
4216 mode size to (rotate A CX). */
4217
4218 if (((GET_CODE (XEXP (x, 0)) == ASHIFT
4219 && GET_CODE (XEXP (x, 1)) == LSHIFTRT)
4220 || (GET_CODE (XEXP (x, 1)) == ASHIFT
4221 && GET_CODE (XEXP (x, 0)) == LSHIFTRT))
4222 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (XEXP (x, 1), 0))
4223 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4224 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
4225 && (INTVAL (XEXP (XEXP (x, 0), 1)) + INTVAL (XEXP (XEXP (x, 1), 1))
4226 == GET_MODE_BITSIZE (mode)))
4227 {
4228 rtx shift_count;
4229
4230 if (GET_CODE (XEXP (x, 0)) == ASHIFT)
4231 shift_count = XEXP (XEXP (x, 0), 1);
4232 else
4233 shift_count = XEXP (XEXP (x, 1), 1);
4234 x = gen_rtx (ROTATE, mode, XEXP (XEXP (x, 0), 0), shift_count);
4235 goto restart;
4236 }
4237 break;
4238
4239 case XOR:
4240 /* Convert (XOR (NOT x) (NOT y)) to (XOR x y).
4241 Also convert (XOR (NOT x) y) to (NOT (XOR x y)), similarly for
4242 (NOT y). */
4243 {
4244 int num_negated = 0;
4245 rtx in1 = XEXP (x, 0), in2 = XEXP (x, 1);
4246
4247 if (GET_CODE (in1) == NOT)
4248 num_negated++, in1 = XEXP (in1, 0);
4249 if (GET_CODE (in2) == NOT)
4250 num_negated++, in2 = XEXP (in2, 0);
4251
4252 if (num_negated == 2)
4253 {
4254 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4255 SUBST (XEXP (x, 1), XEXP (XEXP (x, 1), 0));
4256 }
4257 else if (num_negated == 1)
4258 {
4259 x = gen_unary (NOT, mode,
4260 gen_binary (XOR, mode, in1, in2));
4261 goto restart;
4262 }
4263 }
4264
4265 /* Convert (xor (and A B) B) to (and (not A) B). The latter may
4266 correspond to a machine insn or result in further simplifications
4267 if B is a constant. */
4268
4269 if (GET_CODE (XEXP (x, 0)) == AND
4270 && rtx_equal_p (XEXP (XEXP (x, 0), 1), XEXP (x, 1))
4271 && ! side_effects_p (XEXP (x, 1)))
4272 {
4273 x = gen_binary (AND, mode,
4274 gen_unary (NOT, mode, XEXP (XEXP (x, 0), 0)),
4275 XEXP (x, 1));
4276 goto restart;
4277 }
4278 else if (GET_CODE (XEXP (x, 0)) == AND
4279 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1))
4280 && ! side_effects_p (XEXP (x, 1)))
4281 {
4282 x = gen_binary (AND, mode,
4283 gen_unary (NOT, mode, XEXP (XEXP (x, 0), 1)),
4284 XEXP (x, 1));
4285 goto restart;
4286 }
4287
4288
4289 #if STORE_FLAG_VALUE == 1
4290 /* (xor (comparison foo bar) (const_int 1)) can become the reversed
4291 comparison. */
4292 if (XEXP (x, 1) == const1_rtx
4293 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
4294 && reversible_comparison_p (XEXP (x, 0)))
4295 return gen_rtx_combine (reverse_condition (GET_CODE (XEXP (x, 0))),
4296 mode, XEXP (XEXP (x, 0), 0),
4297 XEXP (XEXP (x, 0), 1));
4298
4299 /* (lshiftrt foo C) where C is the number of bits in FOO minus 1
4300 is (lt foo (const_int 0)), so we can perform the above
4301 simplification. */
4302
4303 if (XEXP (x, 1) == const1_rtx
4304 && GET_CODE (XEXP (x, 0)) == LSHIFTRT
4305 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4306 && INTVAL (XEXP (XEXP (x, 0), 1)) == GET_MODE_BITSIZE (mode) - 1)
4307 return gen_rtx_combine (GE, mode, XEXP (XEXP (x, 0), 0), const0_rtx);
4308 #endif
4309
4310 /* (xor (comparison foo bar) (const_int sign-bit))
4311 when STORE_FLAG_VALUE is the sign bit. */
4312 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4313 && (STORE_FLAG_VALUE
4314 == (HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
4315 && XEXP (x, 1) == const_true_rtx
4316 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
4317 && reversible_comparison_p (XEXP (x, 0)))
4318 return gen_rtx_combine (reverse_condition (GET_CODE (XEXP (x, 0))),
4319 mode, XEXP (XEXP (x, 0), 0),
4320 XEXP (XEXP (x, 0), 1));
4321 break;
4322
4323 case ABS:
4324 /* (abs (neg <foo>)) -> (abs <foo>) */
4325 if (GET_CODE (XEXP (x, 0)) == NEG)
4326 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4327
4328 /* If operand is something known to be positive, ignore the ABS. */
4329 if (GET_CODE (XEXP (x, 0)) == FFS || GET_CODE (XEXP (x, 0)) == ABS
4330 || ((GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4331 <= HOST_BITS_PER_WIDE_INT)
4332 && ((nonzero_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
4333 & ((HOST_WIDE_INT) 1
4334 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1)))
4335 == 0)))
4336 return XEXP (x, 0);
4337
4338
4339 /* If operand is known to be only -1 or 0, convert ABS to NEG. */
4340 if (num_sign_bit_copies (XEXP (x, 0), mode) == GET_MODE_BITSIZE (mode))
4341 {
4342 x = gen_rtx_combine (NEG, mode, XEXP (x, 0));
4343 goto restart;
4344 }
4345 break;
4346
4347 case FFS:
4348 /* (ffs (*_extend <X>)) = (ffs <X>) */
4349 if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND
4350 || GET_CODE (XEXP (x, 0)) == ZERO_EXTEND)
4351 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4352 break;
4353
4354 case FLOAT:
4355 /* (float (sign_extend <X>)) = (float <X>). */
4356 if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND)
4357 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4358 break;
4359
4360 case LSHIFT:
4361 case ASHIFT:
4362 case LSHIFTRT:
4363 case ASHIFTRT:
4364 case ROTATE:
4365 case ROTATERT:
4366 /* If this is a shift by a constant amount, simplify it. */
4367 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
4368 {
4369 x = simplify_shift_const (x, code, mode, XEXP (x, 0),
4370 INTVAL (XEXP (x, 1)));
4371 if (GET_CODE (x) != code)
4372 goto restart;
4373 }
4374
4375 #ifdef SHIFT_COUNT_TRUNCATED
4376 else if (GET_CODE (XEXP (x, 1)) != REG)
4377 SUBST (XEXP (x, 1),
4378 force_to_mode (XEXP (x, 1), GET_MODE (x),
4379 exact_log2 (GET_MODE_BITSIZE (GET_MODE (x))),
4380 NULL_RTX));
4381 #endif
4382
4383 break;
4384 }
4385
4386 return x;
4387 }
4388 \f
4389 /* We consider ZERO_EXTRACT, SIGN_EXTRACT, and SIGN_EXTEND as "compound
4390 operations" because they can be replaced with two more basic operations.
4391 ZERO_EXTEND is also considered "compound" because it can be replaced with
4392 an AND operation, which is simpler, though only one operation.
4393
4394 The function expand_compound_operation is called with an rtx expression
4395 and will convert it to the appropriate shifts and AND operations,
4396 simplifying at each stage.
4397
4398 The function make_compound_operation is called to convert an expression
4399 consisting of shifts and ANDs into the equivalent compound expression.
4400 It is the inverse of this function, loosely speaking. */
4401
4402 static rtx
4403 expand_compound_operation (x)
4404 rtx x;
4405 {
4406 int pos = 0, len;
4407 int unsignedp = 0;
4408 int modewidth;
4409 rtx tem;
4410
4411 switch (GET_CODE (x))
4412 {
4413 case ZERO_EXTEND:
4414 unsignedp = 1;
4415 case SIGN_EXTEND:
4416 /* We can't necessarily use a const_int for a multiword mode;
4417 it depends on implicitly extending the value.
4418 Since we don't know the right way to extend it,
4419 we can't tell whether the implicit way is right.
4420
4421 Even for a mode that is no wider than a const_int,
4422 we can't win, because we need to sign extend one of its bits through
4423 the rest of it, and we don't know which bit. */
4424 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
4425 return x;
4426
4427 if (! FAKE_EXTEND_SAFE_P (GET_MODE (XEXP (x, 0)), XEXP (x, 0)))
4428 return x;
4429
4430 len = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)));
4431 /* If the inner object has VOIDmode (the only way this can happen
4432 is if it is a ASM_OPERANDS), we can't do anything since we don't
4433 know how much masking to do. */
4434 if (len == 0)
4435 return x;
4436
4437 break;
4438
4439 case ZERO_EXTRACT:
4440 unsignedp = 1;
4441 case SIGN_EXTRACT:
4442 /* If the operand is a CLOBBER, just return it. */
4443 if (GET_CODE (XEXP (x, 0)) == CLOBBER)
4444 return XEXP (x, 0);
4445
4446 if (GET_CODE (XEXP (x, 1)) != CONST_INT
4447 || GET_CODE (XEXP (x, 2)) != CONST_INT
4448 || GET_MODE (XEXP (x, 0)) == VOIDmode)
4449 return x;
4450
4451 len = INTVAL (XEXP (x, 1));
4452 pos = INTVAL (XEXP (x, 2));
4453
4454 /* If this goes outside the object being extracted, replace the object
4455 with a (use (mem ...)) construct that only combine understands
4456 and is used only for this purpose. */
4457 if (len + pos > GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
4458 SUBST (XEXP (x, 0), gen_rtx (USE, GET_MODE (x), XEXP (x, 0)));
4459
4460 #if BITS_BIG_ENDIAN
4461 pos = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - len - pos;
4462 #endif
4463 break;
4464
4465 default:
4466 return x;
4467 }
4468
4469 /* If we reach here, we want to return a pair of shifts. The inner
4470 shift is a left shift of BITSIZE - POS - LEN bits. The outer
4471 shift is a right shift of BITSIZE - LEN bits. It is arithmetic or
4472 logical depending on the value of UNSIGNEDP.
4473
4474 If this was a ZERO_EXTEND or ZERO_EXTRACT, this pair of shifts will be
4475 converted into an AND of a shift.
4476
4477 We must check for the case where the left shift would have a negative
4478 count. This can happen in a case like (x >> 31) & 255 on machines
4479 that can't shift by a constant. On those machines, we would first
4480 combine the shift with the AND to produce a variable-position
4481 extraction. Then the constant of 31 would be substituted in to produce
4482 a such a position. */
4483
4484 modewidth = GET_MODE_BITSIZE (GET_MODE (x));
4485 if (modewidth >= pos - len)
4486 tem = simplify_shift_const (NULL_RTX, unsignedp ? LSHIFTRT : ASHIFTRT,
4487 GET_MODE (x),
4488 simplify_shift_const (NULL_RTX, ASHIFT,
4489 GET_MODE (x),
4490 XEXP (x, 0),
4491 modewidth - pos - len),
4492 modewidth - len);
4493
4494 else if (unsignedp && len < HOST_BITS_PER_WIDE_INT)
4495 tem = simplify_and_const_int (NULL_RTX, GET_MODE (x),
4496 simplify_shift_const (NULL_RTX, LSHIFTRT,
4497 GET_MODE (x),
4498 XEXP (x, 0), pos),
4499 ((HOST_WIDE_INT) 1 << len) - 1);
4500 else
4501 /* Any other cases we can't handle. */
4502 return x;
4503
4504
4505 /* If we couldn't do this for some reason, return the original
4506 expression. */
4507 if (GET_CODE (tem) == CLOBBER)
4508 return x;
4509
4510 return tem;
4511 }
4512 \f
4513 /* X is a SET which contains an assignment of one object into
4514 a part of another (such as a bit-field assignment, STRICT_LOW_PART,
4515 or certain SUBREGS). If possible, convert it into a series of
4516 logical operations.
4517
4518 We half-heartedly support variable positions, but do not at all
4519 support variable lengths. */
4520
4521 static rtx
4522 expand_field_assignment (x)
4523 rtx x;
4524 {
4525 rtx inner;
4526 rtx pos; /* Always counts from low bit. */
4527 int len;
4528 rtx mask;
4529 enum machine_mode compute_mode;
4530
4531 /* Loop until we find something we can't simplify. */
4532 while (1)
4533 {
4534 if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
4535 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG)
4536 {
4537 inner = SUBREG_REG (XEXP (SET_DEST (x), 0));
4538 len = GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)));
4539 pos = const0_rtx;
4540 }
4541 else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
4542 && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT)
4543 {
4544 inner = XEXP (SET_DEST (x), 0);
4545 len = INTVAL (XEXP (SET_DEST (x), 1));
4546 pos = XEXP (SET_DEST (x), 2);
4547
4548 /* If the position is constant and spans the width of INNER,
4549 surround INNER with a USE to indicate this. */
4550 if (GET_CODE (pos) == CONST_INT
4551 && INTVAL (pos) + len > GET_MODE_BITSIZE (GET_MODE (inner)))
4552 inner = gen_rtx (USE, GET_MODE (SET_DEST (x)), inner);
4553
4554 #if BITS_BIG_ENDIAN
4555 if (GET_CODE (pos) == CONST_INT)
4556 pos = GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner)) - len
4557 - INTVAL (pos));
4558 else if (GET_CODE (pos) == MINUS
4559 && GET_CODE (XEXP (pos, 1)) == CONST_INT
4560 && (INTVAL (XEXP (pos, 1))
4561 == GET_MODE_BITSIZE (GET_MODE (inner)) - len))
4562 /* If position is ADJUST - X, new position is X. */
4563 pos = XEXP (pos, 0);
4564 else
4565 pos = gen_binary (MINUS, GET_MODE (pos),
4566 GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner))
4567 - len),
4568 pos);
4569 #endif
4570 }
4571
4572 /* A SUBREG between two modes that occupy the same numbers of words
4573 can be done by moving the SUBREG to the source. */
4574 else if (GET_CODE (SET_DEST (x)) == SUBREG
4575 && (((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
4576 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
4577 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
4578 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
4579 {
4580 x = gen_rtx (SET, VOIDmode, SUBREG_REG (SET_DEST (x)),
4581 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (SET_DEST (x))),
4582 SET_SRC (x)));
4583 continue;
4584 }
4585 else
4586 break;
4587
4588 while (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
4589 inner = SUBREG_REG (inner);
4590
4591 compute_mode = GET_MODE (inner);
4592
4593 /* Compute a mask of LEN bits, if we can do this on the host machine. */
4594 if (len < HOST_BITS_PER_WIDE_INT)
4595 mask = GEN_INT (((HOST_WIDE_INT) 1 << len) - 1);
4596 else
4597 break;
4598
4599 /* Now compute the equivalent expression. Make a copy of INNER
4600 for the SET_DEST in case it is a MEM into which we will substitute;
4601 we don't want shared RTL in that case. */
4602 x = gen_rtx (SET, VOIDmode, copy_rtx (inner),
4603 gen_binary (IOR, compute_mode,
4604 gen_binary (AND, compute_mode,
4605 gen_unary (NOT, compute_mode,
4606 gen_binary (ASHIFT,
4607 compute_mode,
4608 mask, pos)),
4609 inner),
4610 gen_binary (ASHIFT, compute_mode,
4611 gen_binary (AND, compute_mode,
4612 gen_lowpart_for_combine
4613 (compute_mode,
4614 SET_SRC (x)),
4615 mask),
4616 pos)));
4617 }
4618
4619 return x;
4620 }
4621 \f
4622 /* Return an RTX for a reference to LEN bits of INNER. If POS_RTX is nonzero,
4623 it is an RTX that represents a variable starting position; otherwise,
4624 POS is the (constant) starting bit position (counted from the LSB).
4625
4626 INNER may be a USE. This will occur when we started with a bitfield
4627 that went outside the boundary of the object in memory, which is
4628 allowed on most machines. To isolate this case, we produce a USE
4629 whose mode is wide enough and surround the MEM with it. The only
4630 code that understands the USE is this routine. If it is not removed,
4631 it will cause the resulting insn not to match.
4632
4633 UNSIGNEDP is non-zero for an unsigned reference and zero for a
4634 signed reference.
4635
4636 IN_DEST is non-zero if this is a reference in the destination of a
4637 SET. This is used when a ZERO_ or SIGN_EXTRACT isn't needed. If non-zero,
4638 a STRICT_LOW_PART will be used, if zero, ZERO_EXTEND or SIGN_EXTEND will
4639 be used.
4640
4641 IN_COMPARE is non-zero if we are in a COMPARE. This means that a
4642 ZERO_EXTRACT should be built even for bits starting at bit 0.
4643
4644 MODE is the desired mode of the result (if IN_DEST == 0). */
4645
4646 static rtx
4647 make_extraction (mode, inner, pos, pos_rtx, len,
4648 unsignedp, in_dest, in_compare)
4649 enum machine_mode mode;
4650 rtx inner;
4651 int pos;
4652 rtx pos_rtx;
4653 int len;
4654 int unsignedp;
4655 int in_dest, in_compare;
4656 {
4657 /* This mode describes the size of the storage area
4658 to fetch the overall value from. Within that, we
4659 ignore the POS lowest bits, etc. */
4660 enum machine_mode is_mode = GET_MODE (inner);
4661 enum machine_mode inner_mode;
4662 enum machine_mode wanted_mem_mode = byte_mode;
4663 enum machine_mode pos_mode = word_mode;
4664 enum machine_mode extraction_mode = word_mode;
4665 enum machine_mode tmode = mode_for_size (len, MODE_INT, 1);
4666 int spans_byte = 0;
4667 rtx new = 0;
4668 rtx orig_pos_rtx = pos_rtx;
4669
4670 /* Get some information about INNER and get the innermost object. */
4671 if (GET_CODE (inner) == USE)
4672 /* (use:SI (mem:QI foo)) stands for (mem:SI foo). */
4673 /* We don't need to adjust the position because we set up the USE
4674 to pretend that it was a full-word object. */
4675 spans_byte = 1, inner = XEXP (inner, 0);
4676 else if (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
4677 {
4678 /* If going from (subreg:SI (mem:QI ...)) to (mem:QI ...),
4679 consider just the QI as the memory to extract from.
4680 The subreg adds or removes high bits; its mode is
4681 irrelevant to the meaning of this extraction,
4682 since POS and LEN count from the lsb. */
4683 if (GET_CODE (SUBREG_REG (inner)) == MEM)
4684 is_mode = GET_MODE (SUBREG_REG (inner));
4685 inner = SUBREG_REG (inner);
4686 }
4687
4688 inner_mode = GET_MODE (inner);
4689
4690 if (pos_rtx && GET_CODE (pos_rtx) == CONST_INT)
4691 pos = INTVAL (pos_rtx), pos_rtx = 0;
4692
4693 /* See if this can be done without an extraction. We never can if the
4694 width of the field is not the same as that of some integer mode. For
4695 registers, we can only avoid the extraction if the position is at the
4696 low-order bit and this is either not in the destination or we have the
4697 appropriate STRICT_LOW_PART operation available.
4698
4699 For MEM, we can avoid an extract if the field starts on an appropriate
4700 boundary and we can change the mode of the memory reference. However,
4701 we cannot directly access the MEM if we have a USE and the underlying
4702 MEM is not TMODE. This combination means that MEM was being used in a
4703 context where bits outside its mode were being referenced; that is only
4704 valid in bit-field insns. */
4705
4706 if (tmode != BLKmode
4707 && ! (spans_byte && inner_mode != tmode)
4708 && ((pos_rtx == 0 && pos == 0 && GET_CODE (inner) != MEM
4709 && (! in_dest
4710 || (GET_CODE (inner) == REG
4711 && (movstrict_optab->handlers[(int) tmode].insn_code
4712 != CODE_FOR_nothing))))
4713 || (GET_CODE (inner) == MEM && pos_rtx == 0
4714 && (pos
4715 % (STRICT_ALIGNMENT ? GET_MODE_ALIGNMENT (tmode)
4716 : BITS_PER_UNIT)) == 0
4717 /* We can't do this if we are widening INNER_MODE (it
4718 may not be aligned, for one thing). */
4719 && GET_MODE_BITSIZE (inner_mode) >= GET_MODE_BITSIZE (tmode)
4720 && (inner_mode == tmode
4721 || (! mode_dependent_address_p (XEXP (inner, 0))
4722 && ! MEM_VOLATILE_P (inner))))))
4723 {
4724 /* If INNER is a MEM, make a new MEM that encompasses just the desired
4725 field. If the original and current mode are the same, we need not
4726 adjust the offset. Otherwise, we do if bytes big endian.
4727
4728 If INNER is not a MEM, get a piece consisting of the just the field
4729 of interest (in this case POS must be 0). */
4730
4731 if (GET_CODE (inner) == MEM)
4732 {
4733 int offset;
4734 /* POS counts from lsb, but make OFFSET count in memory order. */
4735 if (BYTES_BIG_ENDIAN)
4736 offset = (GET_MODE_BITSIZE (is_mode) - len - pos) / BITS_PER_UNIT;
4737 else
4738 offset = pos / BITS_PER_UNIT;
4739
4740 new = gen_rtx (MEM, tmode, plus_constant (XEXP (inner, 0), offset));
4741 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (inner);
4742 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (inner);
4743 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (inner);
4744 }
4745 else if (GET_CODE (inner) == REG)
4746 /* We can't call gen_lowpart_for_combine here since we always want
4747 a SUBREG and it would sometimes return a new hard register. */
4748 new = gen_rtx (SUBREG, tmode, inner,
4749 (WORDS_BIG_ENDIAN
4750 && GET_MODE_SIZE (inner_mode) > UNITS_PER_WORD
4751 ? ((GET_MODE_SIZE (inner_mode) - GET_MODE_SIZE (tmode))
4752 / UNITS_PER_WORD)
4753 : 0));
4754 else
4755 new = force_to_mode (inner, tmode, len, NULL_RTX);
4756
4757 /* If this extraction is going into the destination of a SET,
4758 make a STRICT_LOW_PART unless we made a MEM. */
4759
4760 if (in_dest)
4761 return (GET_CODE (new) == MEM ? new
4762 : (GET_CODE (new) != SUBREG
4763 ? gen_rtx (CLOBBER, tmode, const0_rtx)
4764 : gen_rtx_combine (STRICT_LOW_PART, VOIDmode, new)));
4765
4766 /* Otherwise, sign- or zero-extend unless we already are in the
4767 proper mode. */
4768
4769 return (mode == tmode ? new
4770 : gen_rtx_combine (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
4771 mode, new));
4772 }
4773
4774 /* Unless this is a COMPARE or we have a funny memory reference,
4775 don't do anything with zero-extending field extracts starting at
4776 the low-order bit since they are simple AND operations. */
4777 if (pos_rtx == 0 && pos == 0 && ! in_dest
4778 && ! in_compare && ! spans_byte && unsignedp)
4779 return 0;
4780
4781 /* Get the mode to use should INNER be a MEM, the mode for the position,
4782 and the mode for the result. */
4783 #ifdef HAVE_insv
4784 if (in_dest)
4785 {
4786 wanted_mem_mode = insn_operand_mode[(int) CODE_FOR_insv][0];
4787 pos_mode = insn_operand_mode[(int) CODE_FOR_insv][2];
4788 extraction_mode = insn_operand_mode[(int) CODE_FOR_insv][3];
4789 }
4790 #endif
4791
4792 #ifdef HAVE_extzv
4793 if (! in_dest && unsignedp)
4794 {
4795 wanted_mem_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
4796 pos_mode = insn_operand_mode[(int) CODE_FOR_extzv][3];
4797 extraction_mode = insn_operand_mode[(int) CODE_FOR_extzv][0];
4798 }
4799 #endif
4800
4801 #ifdef HAVE_extv
4802 if (! in_dest && ! unsignedp)
4803 {
4804 wanted_mem_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
4805 pos_mode = insn_operand_mode[(int) CODE_FOR_extv][3];
4806 extraction_mode = insn_operand_mode[(int) CODE_FOR_extv][0];
4807 }
4808 #endif
4809
4810 /* Never narrow an object, since that might not be safe. */
4811
4812 if (mode != VOIDmode
4813 && GET_MODE_SIZE (extraction_mode) < GET_MODE_SIZE (mode))
4814 extraction_mode = mode;
4815
4816 if (pos_rtx && GET_MODE (pos_rtx) != VOIDmode
4817 && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
4818 pos_mode = GET_MODE (pos_rtx);
4819
4820 /* If this is not from memory or we have to change the mode of memory and
4821 cannot, the desired mode is EXTRACTION_MODE. */
4822 if (GET_CODE (inner) != MEM
4823 || (inner_mode != wanted_mem_mode
4824 && (mode_dependent_address_p (XEXP (inner, 0))
4825 || MEM_VOLATILE_P (inner))))
4826 wanted_mem_mode = extraction_mode;
4827
4828 #if BITS_BIG_ENDIAN
4829 /* If position is constant, compute new position. Otherwise, build
4830 subtraction. */
4831 if (pos_rtx == 0)
4832 pos = (MAX (GET_MODE_BITSIZE (is_mode), GET_MODE_BITSIZE (wanted_mem_mode))
4833 - len - pos);
4834 else
4835 pos_rtx
4836 = gen_rtx_combine (MINUS, GET_MODE (pos_rtx),
4837 GEN_INT (MAX (GET_MODE_BITSIZE (is_mode),
4838 GET_MODE_BITSIZE (wanted_mem_mode))
4839 - len),
4840 pos_rtx);
4841 #endif
4842
4843 /* If INNER has a wider mode, make it smaller. If this is a constant
4844 extract, try to adjust the byte to point to the byte containing
4845 the value. */
4846 if (wanted_mem_mode != VOIDmode
4847 && GET_MODE_SIZE (wanted_mem_mode) < GET_MODE_SIZE (is_mode)
4848 && ((GET_CODE (inner) == MEM
4849 && (inner_mode == wanted_mem_mode
4850 || (! mode_dependent_address_p (XEXP (inner, 0))
4851 && ! MEM_VOLATILE_P (inner))))))
4852 {
4853 int offset = 0;
4854
4855 /* The computations below will be correct if the machine is big
4856 endian in both bits and bytes or little endian in bits and bytes.
4857 If it is mixed, we must adjust. */
4858
4859 #if BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
4860 if (! spans_byte && is_mode != wanted_mem_mode)
4861 offset = (GET_MODE_SIZE (is_mode)
4862 - GET_MODE_SIZE (wanted_mem_mode) - offset);
4863 #endif
4864
4865 /* If bytes are big endian and we had a paradoxical SUBREG, we must
4866 adjust OFFSET to compensate. */
4867 #if BYTES_BIG_ENDIAN
4868 if (! spans_byte
4869 && GET_MODE_SIZE (inner_mode) < GET_MODE_SIZE (is_mode))
4870 offset -= GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (inner_mode);
4871 #endif
4872
4873 /* If this is a constant position, we can move to the desired byte. */
4874 if (pos_rtx == 0)
4875 {
4876 offset += pos / BITS_PER_UNIT;
4877 pos %= GET_MODE_BITSIZE (wanted_mem_mode);
4878 }
4879
4880 if (offset != 0 || inner_mode != wanted_mem_mode)
4881 {
4882 rtx newmem = gen_rtx (MEM, wanted_mem_mode,
4883 plus_constant (XEXP (inner, 0), offset));
4884 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (inner);
4885 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (inner);
4886 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (inner);
4887 inner = newmem;
4888 }
4889 }
4890
4891 /* If INNER is not memory, we can always get it into the proper mode. */
4892 else if (GET_CODE (inner) != MEM)
4893 inner = force_to_mode (inner, extraction_mode,
4894 (pos < 0 ? GET_MODE_BITSIZE (extraction_mode)
4895 : len + pos),
4896 NULL_RTX);
4897
4898 /* Adjust mode of POS_RTX, if needed. If we want a wider mode, we
4899 have to zero extend. Otherwise, we can just use a SUBREG. */
4900 if (pos_rtx != 0
4901 && GET_MODE_SIZE (pos_mode) > GET_MODE_SIZE (GET_MODE (pos_rtx)))
4902 pos_rtx = gen_rtx_combine (ZERO_EXTEND, pos_mode, pos_rtx);
4903 else if (pos_rtx != 0
4904 && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
4905 pos_rtx = gen_lowpart_for_combine (pos_mode, pos_rtx);
4906
4907 /* Make POS_RTX unless we already have it and it is correct. If we don't
4908 have a POS_RTX but we do have an ORIG_POS_RTX, the latter must
4909 be a CONST_INT. */
4910 if (pos_rtx == 0 && orig_pos_rtx != 0 && INTVAL (orig_pos_rtx) == pos)
4911 pos_rtx = orig_pos_rtx;
4912
4913 else if (pos_rtx == 0)
4914 pos_rtx = GEN_INT (pos);
4915
4916 /* Make the required operation. See if we can use existing rtx. */
4917 new = gen_rtx_combine (unsignedp ? ZERO_EXTRACT : SIGN_EXTRACT,
4918 extraction_mode, inner, GEN_INT (len), pos_rtx);
4919 if (! in_dest)
4920 new = gen_lowpart_for_combine (mode, new);
4921
4922 return new;
4923 }
4924 \f
4925 /* Look at the expression rooted at X. Look for expressions
4926 equivalent to ZERO_EXTRACT, SIGN_EXTRACT, ZERO_EXTEND, SIGN_EXTEND.
4927 Form these expressions.
4928
4929 Return the new rtx, usually just X.
4930
4931 Also, for machines like the Vax that don't have logical shift insns,
4932 try to convert logical to arithmetic shift operations in cases where
4933 they are equivalent. This undoes the canonicalizations to logical
4934 shifts done elsewhere.
4935
4936 We try, as much as possible, to re-use rtl expressions to save memory.
4937
4938 IN_CODE says what kind of expression we are processing. Normally, it is
4939 SET. In a memory address (inside a MEM, PLUS or minus, the latter two
4940 being kludges), it is MEM. When processing the arguments of a comparison
4941 or a COMPARE against zero, it is COMPARE. */
4942
4943 static rtx
4944 make_compound_operation (x, in_code)
4945 rtx x;
4946 enum rtx_code in_code;
4947 {
4948 enum rtx_code code = GET_CODE (x);
4949 enum machine_mode mode = GET_MODE (x);
4950 int mode_width = GET_MODE_BITSIZE (mode);
4951 enum rtx_code next_code;
4952 int i, count;
4953 rtx new = 0;
4954 char *fmt;
4955
4956 /* Select the code to be used in recursive calls. Once we are inside an
4957 address, we stay there. If we have a comparison, set to COMPARE,
4958 but once inside, go back to our default of SET. */
4959
4960 next_code = (code == MEM || code == PLUS || code == MINUS ? MEM
4961 : ((code == COMPARE || GET_RTX_CLASS (code) == '<')
4962 && XEXP (x, 1) == const0_rtx) ? COMPARE
4963 : in_code == COMPARE ? SET : in_code);
4964
4965 /* Process depending on the code of this operation. If NEW is set
4966 non-zero, it will be returned. */
4967
4968 switch (code)
4969 {
4970 case ASHIFT:
4971 case LSHIFT:
4972 /* Convert shifts by constants into multiplications if inside
4973 an address. */
4974 if (in_code == MEM && GET_CODE (XEXP (x, 1)) == CONST_INT
4975 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
4976 && INTVAL (XEXP (x, 1)) >= 0)
4977 new = gen_rtx_combine (MULT, mode, XEXP (x, 0),
4978 GEN_INT ((HOST_WIDE_INT) 1
4979 << INTVAL (XEXP (x, 1))));
4980 break;
4981
4982 case AND:
4983 /* If the second operand is not a constant, we can't do anything
4984 with it. */
4985 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
4986 break;
4987
4988 /* If the constant is a power of two minus one and the first operand
4989 is a logical right shift, make an extraction. */
4990 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
4991 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
4992 new = make_extraction (mode, XEXP (XEXP (x, 0), 0), 0,
4993 XEXP (XEXP (x, 0), 1), i, 1,
4994 0, in_code == COMPARE);
4995
4996 /* Same as previous, but for (subreg (lshiftrt ...)) in first op. */
4997 else if (GET_CODE (XEXP (x, 0)) == SUBREG
4998 && subreg_lowpart_p (XEXP (x, 0))
4999 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == LSHIFTRT
5000 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
5001 new = make_extraction (GET_MODE (SUBREG_REG (XEXP (x, 0))),
5002 XEXP (SUBREG_REG (XEXP (x, 0)), 0), 0,
5003 XEXP (SUBREG_REG (XEXP (x, 0)), 1), i, 1,
5004 0, in_code == COMPARE);
5005
5006
5007 /* If we are have (and (rotate X C) M) and C is larger than the number
5008 of bits in M, this is an extraction. */
5009
5010 else if (GET_CODE (XEXP (x, 0)) == ROTATE
5011 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5012 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0
5013 && i <= INTVAL (XEXP (XEXP (x, 0), 1)))
5014 new = make_extraction (mode, XEXP (XEXP (x, 0), 0),
5015 (GET_MODE_BITSIZE (mode)
5016 - INTVAL (XEXP (XEXP (x, 0), 1))),
5017 NULL_RTX, i, 1, 0, in_code == COMPARE);
5018
5019 /* On machines without logical shifts, if the operand of the AND is
5020 a logical shift and our mask turns off all the propagated sign
5021 bits, we can replace the logical shift with an arithmetic shift. */
5022 else if (ashr_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
5023 && (lshr_optab->handlers[(int) mode].insn_code
5024 == CODE_FOR_nothing)
5025 && GET_CODE (XEXP (x, 0)) == LSHIFTRT
5026 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5027 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
5028 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
5029 && mode_width <= HOST_BITS_PER_WIDE_INT)
5030 {
5031 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
5032
5033 mask >>= INTVAL (XEXP (XEXP (x, 0), 1));
5034 if ((INTVAL (XEXP (x, 1)) & ~mask) == 0)
5035 SUBST (XEXP (x, 0),
5036 gen_rtx_combine (ASHIFTRT, mode, XEXP (XEXP (x, 0), 0),
5037 XEXP (XEXP (x, 0), 1)));
5038 }
5039
5040 /* If the constant is one less than a power of two, this might be
5041 representable by an extraction even if no shift is present.
5042 If it doesn't end up being a ZERO_EXTEND, we will ignore it unless
5043 we are in a COMPARE. */
5044 else if ((i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
5045 new = make_extraction (mode, XEXP (x, 0), 0, NULL_RTX, i, 1,
5046 0, in_code == COMPARE);
5047
5048 /* If we are in a comparison and this is an AND with a power of two,
5049 convert this into the appropriate bit extract. */
5050 else if (in_code == COMPARE
5051 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
5052 new = make_extraction (mode, XEXP (x, 0), i, NULL_RTX, 1, 1, 0, 1);
5053
5054 break;
5055
5056 case LSHIFTRT:
5057 /* If the sign bit is known to be zero, replace this with an
5058 arithmetic shift. */
5059 if (ashr_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing
5060 && lshr_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
5061 && mode_width <= HOST_BITS_PER_WIDE_INT
5062 && (nonzero_bits (XEXP (x, 0), mode) & (1 << (mode_width - 1))) == 0)
5063 {
5064 new = gen_rtx_combine (ASHIFTRT, mode, XEXP (x, 0), XEXP (x, 1));
5065 break;
5066 }
5067
5068 /* ... fall through ... */
5069
5070 case ASHIFTRT:
5071 /* If we have (ashiftrt (ashift foo C1) C2) with C2 >= C1,
5072 this is a SIGN_EXTRACT. */
5073 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5074 && GET_CODE (XEXP (x, 0)) == ASHIFT
5075 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5076 && INTVAL (XEXP (x, 1)) >= INTVAL (XEXP (XEXP (x, 0), 1)))
5077 new = make_extraction (mode, XEXP (XEXP (x, 0), 0),
5078 (INTVAL (XEXP (x, 1))
5079 - INTVAL (XEXP (XEXP (x, 0), 1))),
5080 NULL_RTX, mode_width - INTVAL (XEXP (x, 1)),
5081 code == LSHIFTRT, 0, in_code == COMPARE);
5082
5083 /* Similarly if we have (ashifrt (OP (ashift foo C1) C3) C2). In these
5084 cases, we are better off returning a SIGN_EXTEND of the operation. */
5085
5086 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5087 && (GET_CODE (XEXP (x, 0)) == IOR || GET_CODE (XEXP (x, 0)) == AND
5088 || GET_CODE (XEXP (x, 0)) == XOR
5089 || GET_CODE (XEXP (x, 0)) == PLUS)
5090 && GET_CODE (XEXP (XEXP (x, 0), 0)) == ASHIFT
5091 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
5092 && INTVAL (XEXP (x, 1)) >= INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))
5093 && INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1)) < HOST_BITS_PER_WIDE_INT
5094 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5095 && (INTVAL (XEXP (XEXP (x, 0), 1))
5096 & (((HOST_WIDE_INT) 1
5097 << INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))) - 1)) == 0)
5098 {
5099 HOST_WIDE_INT newop1
5100 = (INTVAL (XEXP (XEXP (x, 0), 1))
5101 >> INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1)));
5102
5103 new = make_extraction (mode,
5104 gen_binary (GET_CODE (XEXP (x, 0)), mode,
5105 XEXP (XEXP (XEXP (x, 0), 0), 0),
5106 GEN_INT (newop1)),
5107 (INTVAL (XEXP (x, 1))
5108 - INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))),
5109 NULL_RTX, mode_width - INTVAL (XEXP (x, 1)),
5110 code == LSHIFTRT, 0, in_code == COMPARE);
5111 }
5112
5113 /* Similarly for (ashiftrt (neg (ashift FOO C1)) C2). */
5114 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5115 && GET_CODE (XEXP (x, 0)) == NEG
5116 && GET_CODE (XEXP (XEXP (x, 0), 0)) == ASHIFT
5117 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
5118 && INTVAL (XEXP (x, 1)) >= INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1)))
5119 new = make_extraction (mode,
5120 gen_unary (GET_CODE (XEXP (x, 0)), mode,
5121 XEXP (XEXP (XEXP (x, 0), 0), 0)),
5122 (INTVAL (XEXP (x, 1))
5123 - INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))),
5124 NULL_RTX, mode_width - INTVAL (XEXP (x, 1)),
5125 code == LSHIFTRT, 0, in_code == COMPARE);
5126 break;
5127 }
5128
5129 if (new)
5130 {
5131 x = gen_lowpart_for_combine (mode, new);
5132 code = GET_CODE (x);
5133 }
5134
5135 /* Now recursively process each operand of this operation. */
5136 fmt = GET_RTX_FORMAT (code);
5137 for (i = 0; i < GET_RTX_LENGTH (code); i++)
5138 if (fmt[i] == 'e')
5139 {
5140 new = make_compound_operation (XEXP (x, i), next_code);
5141 SUBST (XEXP (x, i), new);
5142 }
5143
5144 return x;
5145 }
5146 \f
5147 /* Given M see if it is a value that would select a field of bits
5148 within an item, but not the entire word. Return -1 if not.
5149 Otherwise, return the starting position of the field, where 0 is the
5150 low-order bit.
5151
5152 *PLEN is set to the length of the field. */
5153
5154 static int
5155 get_pos_from_mask (m, plen)
5156 unsigned HOST_WIDE_INT m;
5157 int *plen;
5158 {
5159 /* Get the bit number of the first 1 bit from the right, -1 if none. */
5160 int pos = exact_log2 (m & - m);
5161
5162 if (pos < 0)
5163 return -1;
5164
5165 /* Now shift off the low-order zero bits and see if we have a power of
5166 two minus 1. */
5167 *plen = exact_log2 ((m >> pos) + 1);
5168
5169 if (*plen <= 0)
5170 return -1;
5171
5172 return pos;
5173 }
5174 \f
5175 /* Rewrite X so that it is an expression in MODE. We only care about the
5176 low-order BITS bits so we can ignore AND operations that just clear
5177 higher-order bits.
5178
5179 Also, if REG is non-zero and X is a register equal in value to REG,
5180 replace X with REG. */
5181
5182 static rtx
5183 force_to_mode (x, mode, bits, reg)
5184 rtx x;
5185 enum machine_mode mode;
5186 int bits;
5187 rtx reg;
5188 {
5189 enum rtx_code code = GET_CODE (x);
5190 enum machine_mode op_mode = mode;
5191
5192 /* If X is narrower than MODE or if BITS is larger than the size of MODE,
5193 just get X in the proper mode. */
5194
5195 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode)
5196 || bits > GET_MODE_BITSIZE (mode))
5197 return gen_lowpart_for_combine (mode, x);
5198
5199 switch (code)
5200 {
5201 case SIGN_EXTEND:
5202 case ZERO_EXTEND:
5203 case ZERO_EXTRACT:
5204 case SIGN_EXTRACT:
5205 x = expand_compound_operation (x);
5206 if (GET_CODE (x) != code)
5207 return force_to_mode (x, mode, bits, reg);
5208 break;
5209
5210 case REG:
5211 if (reg != 0 && (rtx_equal_p (get_last_value (reg), x)
5212 || rtx_equal_p (reg, get_last_value (x))))
5213 x = reg;
5214 break;
5215
5216 case CONST_INT:
5217 if (bits < HOST_BITS_PER_WIDE_INT)
5218 x = GEN_INT (INTVAL (x) & (((HOST_WIDE_INT) 1 << bits) - 1));
5219 return x;
5220
5221 case SUBREG:
5222 /* Ignore low-order SUBREGs. */
5223 if (subreg_lowpart_p (x))
5224 return force_to_mode (SUBREG_REG (x), mode, bits, reg);
5225 break;
5226
5227 case AND:
5228 /* If this is an AND with a constant. Otherwise, we fall through to
5229 do the general binary case. */
5230
5231 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
5232 {
5233 HOST_WIDE_INT mask = INTVAL (XEXP (x, 1));
5234 int len = exact_log2 (mask + 1);
5235 rtx op = XEXP (x, 0);
5236
5237 /* If this is masking some low-order bits, we may be able to
5238 impose a stricter constraint on what bits of the operand are
5239 required. */
5240
5241 op = force_to_mode (op, mode, len > 0 ? MIN (len, bits) : bits,
5242 reg);
5243
5244 if (bits < HOST_BITS_PER_WIDE_INT)
5245 mask &= ((HOST_WIDE_INT) 1 << bits) - 1;
5246
5247 /* If we have no AND in MODE, use the original mode for the
5248 operation. */
5249
5250 if (and_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
5251 op_mode = GET_MODE (x);
5252
5253 x = simplify_and_const_int (x, op_mode, op, mask);
5254
5255 /* If X is still an AND, see if it is an AND with a mask that
5256 is just some low-order bits. If so, and it is BITS wide (it
5257 can't be wider), we don't need it. */
5258
5259 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
5260 && bits < HOST_BITS_PER_WIDE_INT
5261 && INTVAL (XEXP (x, 1)) == ((HOST_WIDE_INT) 1 << bits) - 1)
5262 x = XEXP (x, 0);
5263
5264 break;
5265 }
5266
5267 /* ... fall through ... */
5268
5269 case PLUS:
5270 case MINUS:
5271 case MULT:
5272 case IOR:
5273 case XOR:
5274 /* For most binary operations, just propagate into the operation and
5275 change the mode if we have an operation of that mode. */
5276
5277 if ((code == PLUS
5278 && add_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
5279 || (code == MINUS
5280 && sub_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
5281 || (code == MULT && (smul_optab->handlers[(int) mode].insn_code
5282 == CODE_FOR_nothing))
5283 || (code == AND
5284 && and_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
5285 || (code == IOR
5286 && ior_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
5287 || (code == XOR && (xor_optab->handlers[(int) mode].insn_code
5288 == CODE_FOR_nothing)))
5289 op_mode = GET_MODE (x);
5290
5291 x = gen_binary (code, op_mode,
5292 gen_lowpart_for_combine (op_mode,
5293 force_to_mode (XEXP (x, 0),
5294 mode, bits,
5295 reg)),
5296 gen_lowpart_for_combine (op_mode,
5297 force_to_mode (XEXP (x, 1),
5298 mode, bits,
5299 reg)));
5300 break;
5301
5302 case ASHIFT:
5303 case LSHIFT:
5304 /* For left shifts, do the same, but just for the first operand.
5305 If the shift count is a constant, we need even fewer bits of the
5306 first operand. */
5307
5308 if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) < bits)
5309 bits -= INTVAL (XEXP (x, 1));
5310
5311 if ((code == ASHIFT
5312 && ashl_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
5313 || (code == LSHIFT && (lshl_optab->handlers[(int) mode].insn_code
5314 == CODE_FOR_nothing)))
5315 op_mode = GET_MODE (x);
5316
5317 x = gen_binary (code, op_mode,
5318 gen_lowpart_for_combine (op_mode,
5319 force_to_mode (XEXP (x, 0),
5320 mode, bits,
5321 reg)),
5322 XEXP (x, 1));
5323 break;
5324
5325 case LSHIFTRT:
5326 /* Here we can only do something if the shift count is a constant and
5327 the count plus BITS is no larger than the width of MODE, we can do
5328 the shift in MODE. */
5329
5330 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5331 && INTVAL (XEXP (x, 1)) + bits <= GET_MODE_BITSIZE (mode))
5332 {
5333 rtx inner = force_to_mode (XEXP (x, 0), mode,
5334 bits + INTVAL (XEXP (x, 1)), reg);
5335
5336 if (lshr_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
5337 op_mode = GET_MODE (x);
5338
5339 x = gen_binary (LSHIFTRT, op_mode,
5340 gen_lowpart_for_combine (op_mode, inner),
5341 XEXP (x, 1));
5342 }
5343 break;
5344
5345 case ASHIFTRT:
5346 /* If this is a sign-extension operation that just affects bits
5347 we don't care about, remove it. */
5348
5349 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5350 && INTVAL (XEXP (x, 1)) >= 0
5351 && INTVAL (XEXP (x, 1)) <= GET_MODE_BITSIZE (GET_MODE (x)) - bits
5352 && GET_CODE (XEXP (x, 0)) == ASHIFT
5353 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5354 && INTVAL (XEXP (XEXP (x, 0), 1)) == INTVAL (XEXP (x, 1)))
5355 return force_to_mode (XEXP (XEXP (x, 0), 0), mode, bits, reg);
5356 break;
5357
5358 case NEG:
5359 case NOT:
5360 if ((code == NEG
5361 && neg_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
5362 || (code == NOT && (one_cmpl_optab->handlers[(int) mode].insn_code
5363 == CODE_FOR_nothing)))
5364 op_mode = GET_MODE (x);
5365
5366 /* Handle these similarly to the way we handle most binary operations. */
5367 x = gen_unary (code, op_mode,
5368 gen_lowpart_for_combine (op_mode,
5369 force_to_mode (XEXP (x, 0), mode,
5370 bits, reg)));
5371 break;
5372
5373 case IF_THEN_ELSE:
5374 /* We have no way of knowing if the IF_THEN_ELSE can itself be
5375 written in a narrower mode. We play it safe and do not do so. */
5376
5377 SUBST (XEXP (x, 1),
5378 gen_lowpart_for_combine (GET_MODE (x),
5379 force_to_mode (XEXP (x, 1), mode,
5380 bits, reg)));
5381 SUBST (XEXP (x, 2),
5382 gen_lowpart_for_combine (GET_MODE (x),
5383 force_to_mode (XEXP (x, 2), mode,
5384 bits, reg)));
5385 break;
5386 }
5387
5388 /* Ensure we return a value of the proper mode. */
5389 return gen_lowpart_for_combine (mode, x);
5390 }
5391 \f
5392 /* Return the value of expression X given the fact that condition COND
5393 is known to be true when applied to REG as its first operand and VAL
5394 as its second. X is known to not be shared and so can be modified in
5395 place.
5396
5397 We only handle the simplest cases, and specifically those cases that
5398 arise with IF_THEN_ELSE expressions. */
5399
5400 static rtx
5401 known_cond (x, cond, reg, val)
5402 rtx x;
5403 enum rtx_code cond;
5404 rtx reg, val;
5405 {
5406 enum rtx_code code = GET_CODE (x);
5407 rtx new, temp;
5408 char *fmt;
5409 int i, j;
5410
5411 if (side_effects_p (x))
5412 return x;
5413
5414 if (cond == EQ && rtx_equal_p (x, reg))
5415 return val;
5416
5417 /* If X is (abs REG) and we know something about REG's relationship
5418 with zero, we may be able to simplify this. */
5419
5420 if (code == ABS && rtx_equal_p (XEXP (x, 0), reg) && val == const0_rtx)
5421 switch (cond)
5422 {
5423 case GE: case GT: case EQ:
5424 return XEXP (x, 0);
5425 case LT: case LE:
5426 return gen_unary (NEG, GET_MODE (XEXP (x, 0)), XEXP (x, 0));
5427 }
5428
5429 /* The only other cases we handle are MIN, MAX, and comparisons if the
5430 operands are the same as REG and VAL. */
5431
5432 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
5433 {
5434 if (rtx_equal_p (XEXP (x, 0), val))
5435 cond = swap_condition (cond), temp = val, val = reg, reg = temp;
5436
5437 if (rtx_equal_p (XEXP (x, 0), reg) && rtx_equal_p (XEXP (x, 1), val))
5438 {
5439 if (GET_RTX_CLASS (code) == '<')
5440 return (comparison_dominates_p (cond, code) ? const_true_rtx
5441 : (comparison_dominates_p (cond,
5442 reverse_condition (code))
5443 ? const0_rtx : x));
5444
5445 else if (code == SMAX || code == SMIN
5446 || code == UMIN || code == UMAX)
5447 {
5448 int unsignedp = (code == UMIN || code == UMAX);
5449
5450 if (code == SMAX || code == UMAX)
5451 cond = reverse_condition (cond);
5452
5453 switch (cond)
5454 {
5455 case GE: case GT:
5456 return unsignedp ? x : XEXP (x, 1);
5457 case LE: case LT:
5458 return unsignedp ? x : XEXP (x, 0);
5459 case GEU: case GTU:
5460 return unsignedp ? XEXP (x, 1) : x;
5461 case LEU: case LTU:
5462 return unsignedp ? XEXP (x, 0) : x;
5463 }
5464 }
5465 }
5466 }
5467
5468 fmt = GET_RTX_FORMAT (code);
5469 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5470 {
5471 if (fmt[i] == 'e')
5472 SUBST (XEXP (x, i), known_cond (XEXP (x, i), cond, reg, val));
5473 else if (fmt[i] == 'E')
5474 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5475 SUBST (XVECEXP (x, i, j), known_cond (XVECEXP (x, i, j),
5476 cond, reg, val));
5477 }
5478
5479 return x;
5480 }
5481 \f
5482 /* See if X, a SET operation, can be rewritten as a bit-field assignment.
5483 Return that assignment if so.
5484
5485 We only handle the most common cases. */
5486
5487 static rtx
5488 make_field_assignment (x)
5489 rtx x;
5490 {
5491 rtx dest = SET_DEST (x);
5492 rtx src = SET_SRC (x);
5493 rtx ourdest;
5494 rtx assign;
5495 HOST_WIDE_INT c1;
5496 int pos, len;
5497 rtx other;
5498 enum machine_mode mode;
5499
5500 /* If SRC was (and (not (ashift (const_int 1) POS)) DEST), this is
5501 a clear of a one-bit field. We will have changed it to
5502 (and (rotate (const_int -2) POS) DEST), so check for that. Also check
5503 for a SUBREG. */
5504
5505 if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == ROTATE
5506 && GET_CODE (XEXP (XEXP (src, 0), 0)) == CONST_INT
5507 && INTVAL (XEXP (XEXP (src, 0), 0)) == -2
5508 && (rtx_equal_p (dest, XEXP (src, 1))
5509 || rtx_equal_p (dest, get_last_value (XEXP (src, 1)))
5510 || rtx_equal_p (get_last_value (dest), XEXP (src, 1))))
5511 {
5512 assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
5513 1, 1, 1, 0);
5514 return gen_rtx (SET, VOIDmode, assign, const0_rtx);
5515 }
5516
5517 else if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == SUBREG
5518 && subreg_lowpart_p (XEXP (src, 0))
5519 && (GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
5520 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (src, 0)))))
5521 && GET_CODE (SUBREG_REG (XEXP (src, 0))) == ROTATE
5522 && INTVAL (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == -2
5523 && (rtx_equal_p (dest, XEXP (src, 1))
5524 || rtx_equal_p (dest, get_last_value (XEXP (src, 1)))
5525 || rtx_equal_p (get_last_value (dest), XEXP (src, 1))))
5526 {
5527 assign = make_extraction (VOIDmode, dest, 0,
5528 XEXP (SUBREG_REG (XEXP (src, 0)), 1),
5529 1, 1, 1, 0);
5530 return gen_rtx (SET, VOIDmode, assign, const0_rtx);
5531 }
5532
5533 /* If SRC is (ior (ashift (const_int 1) POS DEST)), this is a set of a
5534 one-bit field. */
5535 else if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == ASHIFT
5536 && XEXP (XEXP (src, 0), 0) == const1_rtx
5537 && (rtx_equal_p (dest, XEXP (src, 1))
5538 || rtx_equal_p (dest, get_last_value (XEXP (src, 1)))
5539 || rtx_equal_p (get_last_value (dest), XEXP (src, 1))))
5540 {
5541 assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
5542 1, 1, 1, 0);
5543 return gen_rtx (SET, VOIDmode, assign, const1_rtx);
5544 }
5545
5546 /* The other case we handle is assignments into a constant-position
5547 field. They look like (ior (and DEST C1) OTHER). If C1 represents
5548 a mask that has all one bits except for a group of zero bits and
5549 OTHER is known to have zeros where C1 has ones, this is such an
5550 assignment. Compute the position and length from C1. Shift OTHER
5551 to the appropriate position, force it to the required mode, and
5552 make the extraction. Check for the AND in both operands. */
5553
5554 if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == AND
5555 && GET_CODE (XEXP (XEXP (src, 0), 1)) == CONST_INT
5556 && (rtx_equal_p (XEXP (XEXP (src, 0), 0), dest)
5557 || rtx_equal_p (XEXP (XEXP (src, 0), 0), get_last_value (dest))
5558 || rtx_equal_p (get_last_value (XEXP (XEXP (src, 0), 1)), dest)))
5559 c1 = INTVAL (XEXP (XEXP (src, 0), 1)), other = XEXP (src, 1);
5560 else if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 1)) == AND
5561 && GET_CODE (XEXP (XEXP (src, 1), 1)) == CONST_INT
5562 && (rtx_equal_p (XEXP (XEXP (src, 1), 0), dest)
5563 || rtx_equal_p (XEXP (XEXP (src, 1), 0), get_last_value (dest))
5564 || rtx_equal_p (get_last_value (XEXP (XEXP (src, 1), 0)),
5565 dest)))
5566 c1 = INTVAL (XEXP (XEXP (src, 1), 1)), other = XEXP (src, 0);
5567 else
5568 return x;
5569
5570 pos = get_pos_from_mask (~c1, &len);
5571 if (pos < 0 || pos + len > GET_MODE_BITSIZE (GET_MODE (dest))
5572 || (GET_MODE_BITSIZE (GET_MODE (other)) <= HOST_BITS_PER_WIDE_INT
5573 && (c1 & nonzero_bits (other, GET_MODE (other))) != 0))
5574 return x;
5575
5576 assign = make_extraction (VOIDmode, dest, pos, NULL_RTX, len, 1, 1, 0);
5577
5578 /* The mode to use for the source is the mode of the assignment, or of
5579 what is inside a possible STRICT_LOW_PART. */
5580 mode = (GET_CODE (assign) == STRICT_LOW_PART
5581 ? GET_MODE (XEXP (assign, 0)) : GET_MODE (assign));
5582
5583 /* Shift OTHER right POS places and make it the source, restricting it
5584 to the proper length and mode. */
5585
5586 src = force_to_mode (simplify_shift_const (NULL_RTX, LSHIFTRT,
5587 GET_MODE (src), other, pos),
5588 mode, len, dest);
5589
5590 return gen_rtx_combine (SET, VOIDmode, assign, src);
5591 }
5592 \f
5593 /* See if X is of the form (+ (* a c) (* b c)) and convert to (* (+ a b) c)
5594 if so. */
5595
5596 static rtx
5597 apply_distributive_law (x)
5598 rtx x;
5599 {
5600 enum rtx_code code = GET_CODE (x);
5601 rtx lhs, rhs, other;
5602 rtx tem;
5603 enum rtx_code inner_code;
5604
5605 /* Distributivity is not true for floating point.
5606 It can change the value. So don't do it.
5607 -- rms and moshier@world.std.com. */
5608 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
5609 return x;
5610
5611 /* The outer operation can only be one of the following: */
5612 if (code != IOR && code != AND && code != XOR
5613 && code != PLUS && code != MINUS)
5614 return x;
5615
5616 lhs = XEXP (x, 0), rhs = XEXP (x, 1);
5617
5618 /* If either operand is a primitive we can't do anything, so get out fast. */
5619 if (GET_RTX_CLASS (GET_CODE (lhs)) == 'o'
5620 || GET_RTX_CLASS (GET_CODE (rhs)) == 'o')
5621 return x;
5622
5623 lhs = expand_compound_operation (lhs);
5624 rhs = expand_compound_operation (rhs);
5625 inner_code = GET_CODE (lhs);
5626 if (inner_code != GET_CODE (rhs))
5627 return x;
5628
5629 /* See if the inner and outer operations distribute. */
5630 switch (inner_code)
5631 {
5632 case LSHIFTRT:
5633 case ASHIFTRT:
5634 case AND:
5635 case IOR:
5636 /* These all distribute except over PLUS. */
5637 if (code == PLUS || code == MINUS)
5638 return x;
5639 break;
5640
5641 case MULT:
5642 if (code != PLUS && code != MINUS)
5643 return x;
5644 break;
5645
5646 case ASHIFT:
5647 case LSHIFT:
5648 /* These are also multiplies, so they distribute over everything. */
5649 break;
5650
5651 case SUBREG:
5652 /* Non-paradoxical SUBREGs distributes over all operations, provided
5653 the inner modes and word numbers are the same, this is an extraction
5654 of a low-order part, we don't convert an fp operation to int or
5655 vice versa, and we would not be converting a single-word
5656 operation into a multi-word operation. The latter test is not
5657 required, but it prevents generating unneeded multi-word operations.
5658 Some of the previous tests are redundant given the latter test, but
5659 are retained because they are required for correctness.
5660
5661 We produce the result slightly differently in this case. */
5662
5663 if (GET_MODE (SUBREG_REG (lhs)) != GET_MODE (SUBREG_REG (rhs))
5664 || SUBREG_WORD (lhs) != SUBREG_WORD (rhs)
5665 || ! subreg_lowpart_p (lhs)
5666 || (GET_MODE_CLASS (GET_MODE (lhs))
5667 != GET_MODE_CLASS (GET_MODE (SUBREG_REG (lhs))))
5668 || (GET_MODE_SIZE (GET_MODE (lhs))
5669 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))))
5670 || GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))) > UNITS_PER_WORD)
5671 return x;
5672
5673 tem = gen_binary (code, GET_MODE (SUBREG_REG (lhs)),
5674 SUBREG_REG (lhs), SUBREG_REG (rhs));
5675 return gen_lowpart_for_combine (GET_MODE (x), tem);
5676
5677 default:
5678 return x;
5679 }
5680
5681 /* Set LHS and RHS to the inner operands (A and B in the example
5682 above) and set OTHER to the common operand (C in the example).
5683 These is only one way to do this unless the inner operation is
5684 commutative. */
5685 if (GET_RTX_CLASS (inner_code) == 'c'
5686 && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 0)))
5687 other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 1);
5688 else if (GET_RTX_CLASS (inner_code) == 'c'
5689 && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 1)))
5690 other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 0);
5691 else if (GET_RTX_CLASS (inner_code) == 'c'
5692 && rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 0)))
5693 other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 1);
5694 else if (rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 1)))
5695 other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 0);
5696 else
5697 return x;
5698
5699 /* Form the new inner operation, seeing if it simplifies first. */
5700 tem = gen_binary (code, GET_MODE (x), lhs, rhs);
5701
5702 /* There is one exception to the general way of distributing:
5703 (a ^ b) | (a ^ c) -> (~a) & (b ^ c) */
5704 if (code == XOR && inner_code == IOR)
5705 {
5706 inner_code = AND;
5707 other = gen_unary (NOT, GET_MODE (x), other);
5708 }
5709
5710 /* We may be able to continuing distributing the result, so call
5711 ourselves recursively on the inner operation before forming the
5712 outer operation, which we return. */
5713 return gen_binary (inner_code, GET_MODE (x),
5714 apply_distributive_law (tem), other);
5715 }
5716 \f
5717 /* We have X, a logical `and' of VAROP with the constant CONSTOP, to be done
5718 in MODE.
5719
5720 Return an equivalent form, if different from X. Otherwise, return X. If
5721 X is zero, we are to always construct the equivalent form. */
5722
5723 static rtx
5724 simplify_and_const_int (x, mode, varop, constop)
5725 rtx x;
5726 enum machine_mode mode;
5727 rtx varop;
5728 unsigned HOST_WIDE_INT constop;
5729 {
5730 register enum machine_mode tmode;
5731 register rtx temp;
5732 unsigned HOST_WIDE_INT nonzero;
5733
5734 /* There is a large class of optimizations based on the principle that
5735 some operations produce results where certain bits are known to be zero,
5736 and hence are not significant to the AND. For example, if we have just
5737 done a left shift of one bit, the low-order bit is known to be zero and
5738 hence an AND with a mask of ~1 would not do anything.
5739
5740 At the end of the following loop, we set:
5741
5742 VAROP to be the item to be AND'ed with;
5743 CONSTOP to the constant value to AND it with. */
5744
5745 while (1)
5746 {
5747 /* If we ever encounter a mode wider than the host machine's widest
5748 integer size, we can't compute the masks accurately, so give up. */
5749 if (GET_MODE_BITSIZE (GET_MODE (varop)) > HOST_BITS_PER_WIDE_INT)
5750 break;
5751
5752 /* Unless one of the cases below does a `continue',
5753 a `break' will be executed to exit the loop. */
5754
5755 switch (GET_CODE (varop))
5756 {
5757 case CLOBBER:
5758 /* If VAROP is a (clobber (const_int)), return it since we know
5759 we are generating something that won't match. */
5760 return varop;
5761
5762 #if ! BITS_BIG_ENDIAN
5763 case USE:
5764 /* VAROP is a (use (mem ..)) that was made from a bit-field
5765 extraction that spanned the boundary of the MEM. If we are
5766 now masking so it is within that boundary, we don't need the
5767 USE any more. */
5768 if ((constop & ~ GET_MODE_MASK (GET_MODE (XEXP (varop, 0)))) == 0)
5769 {
5770 varop = XEXP (varop, 0);
5771 continue;
5772 }
5773 break;
5774 #endif
5775
5776 case SUBREG:
5777 if (subreg_lowpart_p (varop)
5778 /* We can ignore the effect this SUBREG if it narrows the mode
5779 or, on machines where byte operations extend, if the
5780 constant masks to zero all the bits the mode doesn't have. */
5781 && ((GET_MODE_SIZE (GET_MODE (varop))
5782 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop))))
5783 #ifdef BYTE_LOADS_EXTEND
5784 || (0 == (constop
5785 & GET_MODE_MASK (GET_MODE (varop))
5786 & ~ GET_MODE_MASK (GET_MODE (SUBREG_REG (varop)))))
5787 #endif
5788 ))
5789 {
5790 varop = SUBREG_REG (varop);
5791 continue;
5792 }
5793 break;
5794
5795 case ZERO_EXTRACT:
5796 case SIGN_EXTRACT:
5797 case ZERO_EXTEND:
5798 case SIGN_EXTEND:
5799 /* Try to expand these into a series of shifts and then work
5800 with that result. If we can't, for example, if the extract
5801 isn't at a fixed position, give up. */
5802 temp = expand_compound_operation (varop);
5803 if (temp != varop)
5804 {
5805 varop = temp;
5806 continue;
5807 }
5808 break;
5809
5810 case AND:
5811 if (GET_CODE (XEXP (varop, 1)) == CONST_INT)
5812 {
5813 constop &= INTVAL (XEXP (varop, 1));
5814 varop = XEXP (varop, 0);
5815 continue;
5816 }
5817 break;
5818
5819 case IOR:
5820 case XOR:
5821 /* If VAROP is (ior (lshiftrt FOO C1) C2), try to commute the IOR and
5822 LSHIFT so we end up with an (and (lshiftrt (ior ...) ...) ...)
5823 operation which may be a bitfield extraction. */
5824
5825 if (GET_CODE (XEXP (varop, 0)) == LSHIFTRT
5826 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
5827 && INTVAL (XEXP (XEXP (varop, 0), 1)) >= 0
5828 && INTVAL (XEXP (XEXP (varop, 0), 1)) < HOST_BITS_PER_WIDE_INT
5829 && GET_CODE (XEXP (varop, 1)) == CONST_INT
5830 && (INTVAL (XEXP (varop, 1))
5831 & ~ nonzero_bits (XEXP (varop, 0), GET_MODE (varop)) == 0))
5832 {
5833 temp = GEN_INT ((INTVAL (XEXP (varop, 1)) & constop)
5834 << INTVAL (XEXP (XEXP (varop, 0), 1)));
5835 temp = gen_binary (GET_CODE (varop), GET_MODE (varop),
5836 XEXP (XEXP (varop, 0), 0), temp);
5837 varop = gen_rtx_combine (LSHIFTRT, GET_MODE (varop),
5838 temp, XEXP (varop, 1));
5839 continue;
5840 }
5841
5842 /* Apply the AND to both branches of the IOR or XOR, then try to
5843 apply the distributive law. This may eliminate operations
5844 if either branch can be simplified because of the AND.
5845 It may also make some cases more complex, but those cases
5846 probably won't match a pattern either with or without this. */
5847 return
5848 gen_lowpart_for_combine
5849 (mode, apply_distributive_law
5850 (gen_rtx_combine
5851 (GET_CODE (varop), GET_MODE (varop),
5852 simplify_and_const_int (NULL_RTX, GET_MODE (varop),
5853 XEXP (varop, 0), constop),
5854 simplify_and_const_int (NULL_RTX, GET_MODE (varop),
5855 XEXP (varop, 1), constop))));
5856
5857 case NOT:
5858 /* (and (not FOO)) is (and (xor FOO CONST_OP)) so if FOO is an
5859 LSHIFTRT we can do the same as above. */
5860
5861 if (GET_CODE (XEXP (varop, 0)) == LSHIFTRT
5862 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
5863 && INTVAL (XEXP (XEXP (varop, 0), 1)) >= 0
5864 && INTVAL (XEXP (XEXP (varop, 0), 1)) < HOST_BITS_PER_WIDE_INT)
5865 {
5866 temp = GEN_INT (constop << INTVAL (XEXP (XEXP (varop, 0), 1)));
5867 temp = gen_binary (XOR, GET_MODE (varop),
5868 XEXP (XEXP (varop, 0), 0), temp);
5869 varop = gen_rtx_combine (LSHIFTRT, GET_MODE (varop),
5870 temp, XEXP (XEXP (varop, 0), 1));
5871 continue;
5872 }
5873 break;
5874
5875 case ASHIFTRT:
5876 /* If we are just looking for the sign bit, we don't need this
5877 shift at all, even if it has a variable count. */
5878 if (constop == ((HOST_WIDE_INT) 1
5879 << (GET_MODE_BITSIZE (GET_MODE (varop)) - 1)))
5880 {
5881 varop = XEXP (varop, 0);
5882 continue;
5883 }
5884
5885 /* If this is a shift by a constant, get a mask that contains
5886 those bits that are not copies of the sign bit. We then have
5887 two cases: If CONSTOP only includes those bits, this can be
5888 a logical shift, which may allow simplifications. If CONSTOP
5889 is a single-bit field not within those bits, we are requesting
5890 a copy of the sign bit and hence can shift the sign bit to
5891 the appropriate location. */
5892 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
5893 && INTVAL (XEXP (varop, 1)) >= 0
5894 && INTVAL (XEXP (varop, 1)) < HOST_BITS_PER_WIDE_INT)
5895 {
5896 int i = -1;
5897
5898 nonzero = GET_MODE_MASK (GET_MODE (varop));
5899 nonzero >>= INTVAL (XEXP (varop, 1));
5900
5901 if ((constop & ~ nonzero) == 0
5902 || (i = exact_log2 (constop)) >= 0)
5903 {
5904 varop = simplify_shift_const
5905 (varop, LSHIFTRT, GET_MODE (varop), XEXP (varop, 0),
5906 i < 0 ? INTVAL (XEXP (varop, 1))
5907 : GET_MODE_BITSIZE (GET_MODE (varop)) - 1 - i);
5908 if (GET_CODE (varop) != ASHIFTRT)
5909 continue;
5910 }
5911 }
5912
5913 /* If our mask is 1, convert this to a LSHIFTRT. This can be done
5914 even if the shift count isn't a constant. */
5915 if (constop == 1)
5916 varop = gen_rtx_combine (LSHIFTRT, GET_MODE (varop),
5917 XEXP (varop, 0), XEXP (varop, 1));
5918 break;
5919
5920 case LSHIFTRT:
5921 /* If we have (and (lshiftrt FOO C1) C2) where the combination of the
5922 shift and AND produces only copies of the sign bit (C2 is one less
5923 than a power of two), we can do this with just a shift. */
5924
5925 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
5926 && ((INTVAL (XEXP (varop, 1))
5927 + num_sign_bit_copies (XEXP (varop, 0),
5928 GET_MODE (XEXP (varop, 0))))
5929 >= GET_MODE_BITSIZE (GET_MODE (varop)))
5930 && exact_log2 (constop + 1) >= 0)
5931 varop
5932 = gen_rtx_combine (LSHIFTRT, GET_MODE (varop), XEXP (varop, 0),
5933 GEN_INT (GET_MODE_BITSIZE (GET_MODE (varop))
5934 - exact_log2 (constop + 1)));
5935 break;
5936
5937 case NE:
5938 /* (and (ne FOO 0) CONST) can be (and FOO CONST) if CONST is
5939 included in STORE_FLAG_VALUE and FOO has no bits that might be
5940 nonzero not in CONST. */
5941 if ((constop & ~ STORE_FLAG_VALUE) == 0
5942 && XEXP (varop, 0) == const0_rtx
5943 && (nonzero_bits (XEXP (varop, 0), mode) & ~ constop) == 0)
5944 {
5945 varop = XEXP (varop, 0);
5946 continue;
5947 }
5948 break;
5949
5950 case PLUS:
5951 /* In (and (plus FOO C1) M), if M is a mask that just turns off
5952 low-order bits (as in an alignment operation) and FOO is already
5953 aligned to that boundary, we can convert remove this AND
5954 and possibly the PLUS if it is now adding zero. */
5955 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
5956 && exact_log2 (-constop) >= 0
5957 && (nonzero_bits (XEXP (varop, 0), mode) & ~ constop) == 0)
5958 {
5959 varop = plus_constant (XEXP (varop, 0),
5960 INTVAL (XEXP (varop, 1)) & constop);
5961 constop = ~0;
5962 break;
5963 }
5964
5965 /* ... fall through ... */
5966
5967 case MINUS:
5968 /* In (and (plus (and FOO M1) BAR) M2), if M1 and M2 are one
5969 less than powers of two and M2 is narrower than M1, we can
5970 eliminate the inner AND. This occurs when incrementing
5971 bit fields. */
5972
5973 if (GET_CODE (XEXP (varop, 0)) == ZERO_EXTRACT
5974 || GET_CODE (XEXP (varop, 0)) == ZERO_EXTEND)
5975 SUBST (XEXP (varop, 0),
5976 expand_compound_operation (XEXP (varop, 0)));
5977
5978 if (GET_CODE (XEXP (varop, 0)) == AND
5979 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
5980 && exact_log2 (constop + 1) >= 0
5981 && exact_log2 (INTVAL (XEXP (XEXP (varop, 0), 1)) + 1) >= 0
5982 && (~ INTVAL (XEXP (XEXP (varop, 0), 1)) & constop) == 0)
5983 SUBST (XEXP (varop, 0), XEXP (XEXP (varop, 0), 0));
5984 break;
5985 }
5986
5987 break;
5988 }
5989
5990 /* If we have reached a constant, this whole thing is constant. */
5991 if (GET_CODE (varop) == CONST_INT)
5992 return GEN_INT (constop & INTVAL (varop));
5993
5994 /* See what bits may be nonzero in VAROP. Unlike the general case of
5995 a call to nonzero_bits, here we don't care about bits outside
5996 MODE. */
5997
5998 nonzero = nonzero_bits (varop, mode) & GET_MODE_MASK (mode);
5999
6000 /* Turn off all bits in the constant that are known to already be zero.
6001 Thus, if the AND isn't needed at all, we will have CONSTOP == NONZERO_BITS
6002 which is tested below. */
6003
6004 constop &= nonzero;
6005
6006 /* If we don't have any bits left, return zero. */
6007 if (constop == 0)
6008 return const0_rtx;
6009
6010 /* Get VAROP in MODE. Try to get a SUBREG if not. Don't make a new SUBREG
6011 if we already had one (just check for the simplest cases). */
6012 if (x && GET_CODE (XEXP (x, 0)) == SUBREG
6013 && GET_MODE (XEXP (x, 0)) == mode
6014 && SUBREG_REG (XEXP (x, 0)) == varop)
6015 varop = XEXP (x, 0);
6016 else
6017 varop = gen_lowpart_for_combine (mode, varop);
6018
6019 /* If we can't make the SUBREG, try to return what we were given. */
6020 if (GET_CODE (varop) == CLOBBER)
6021 return x ? x : varop;
6022
6023 /* If we are only masking insignificant bits, return VAROP. */
6024 if (constop == nonzero)
6025 x = varop;
6026
6027 /* Otherwise, return an AND. See how much, if any, of X we can use. */
6028 else if (x == 0 || GET_CODE (x) != AND || GET_MODE (x) != mode)
6029 x = gen_rtx_combine (AND, mode, varop, GEN_INT (constop));
6030
6031 else
6032 {
6033 if (GET_CODE (XEXP (x, 1)) != CONST_INT
6034 || INTVAL (XEXP (x, 1)) != constop)
6035 SUBST (XEXP (x, 1), GEN_INT (constop));
6036
6037 SUBST (XEXP (x, 0), varop);
6038 }
6039
6040 return x;
6041 }
6042 \f
6043 /* Given an expression, X, compute which bits in X can be non-zero.
6044 We don't care about bits outside of those defined in MODE.
6045
6046 For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
6047 a shift, AND, or zero_extract, we can do better. */
6048
6049 static unsigned HOST_WIDE_INT
6050 nonzero_bits (x, mode)
6051 rtx x;
6052 enum machine_mode mode;
6053 {
6054 unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
6055 unsigned HOST_WIDE_INT inner_nz;
6056 enum rtx_code code;
6057 int mode_width = GET_MODE_BITSIZE (mode);
6058 rtx tem;
6059
6060 /* If X is wider than MODE, use its mode instead. */
6061 if (GET_MODE_BITSIZE (GET_MODE (x)) > mode_width)
6062 {
6063 mode = GET_MODE (x);
6064 nonzero = GET_MODE_MASK (mode);
6065 mode_width = GET_MODE_BITSIZE (mode);
6066 }
6067
6068 if (mode_width > HOST_BITS_PER_WIDE_INT)
6069 /* Our only callers in this case look for single bit values. So
6070 just return the mode mask. Those tests will then be false. */
6071 return nonzero;
6072
6073 code = GET_CODE (x);
6074 switch (code)
6075 {
6076 case REG:
6077 #ifdef STACK_BOUNDARY
6078 /* If this is the stack pointer, we may know something about its
6079 alignment. If PUSH_ROUNDING is defined, it is possible for the
6080 stack to be momentarily aligned only to that amount, so we pick
6081 the least alignment. */
6082
6083 if (x == stack_pointer_rtx)
6084 {
6085 int sp_alignment = STACK_BOUNDARY / BITS_PER_UNIT;
6086
6087 #ifdef PUSH_ROUNDING
6088 sp_alignment = MIN (PUSH_ROUNDING (1), sp_alignment);
6089 #endif
6090
6091 return nonzero & ~ (sp_alignment - 1);
6092 }
6093 #endif
6094
6095 /* If X is a register whose value we can find, use that value.
6096 Otherwise, use the previously-computed nonzero bits for this
6097 register. */
6098
6099 tem = get_last_value (x);
6100 if (tem)
6101 return nonzero_bits (tem, mode);
6102 else if (nonzero_sign_valid && reg_nonzero_bits[REGNO (x)])
6103 return reg_nonzero_bits[REGNO (x)] & nonzero;
6104 else
6105 return nonzero;
6106
6107 case CONST_INT:
6108 return INTVAL (x);
6109
6110 #ifdef BYTE_LOADS_ZERO_EXTEND
6111 case MEM:
6112 /* In many, if not most, RISC machines, reading a byte from memory
6113 zeros the rest of the register. Noticing that fact saves a lot
6114 of extra zero-extends. */
6115 nonzero &= GET_MODE_MASK (GET_MODE (x));
6116 break;
6117 #endif
6118
6119 #if STORE_FLAG_VALUE == 1
6120 case EQ: case NE:
6121 case GT: case GTU:
6122 case LT: case LTU:
6123 case GE: case GEU:
6124 case LE: case LEU:
6125
6126 if (GET_MODE_CLASS (mode) == MODE_INT)
6127 nonzero = 1;
6128
6129 /* A comparison operation only sets the bits given by its mode. The
6130 rest are set undefined. */
6131 if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
6132 nonzero |= (GET_MODE_MASK (mode) & ~ GET_MODE_MASK (GET_MODE (x)));
6133 break;
6134 #endif
6135
6136 case NEG:
6137 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
6138 == GET_MODE_BITSIZE (GET_MODE (x)))
6139 nonzero = 1;
6140
6141 if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
6142 nonzero |= (GET_MODE_MASK (mode) & ~ GET_MODE_MASK (GET_MODE (x)));
6143 break;
6144
6145 case ABS:
6146 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
6147 == GET_MODE_BITSIZE (GET_MODE (x)))
6148 nonzero = 1;
6149 break;
6150
6151 case TRUNCATE:
6152 nonzero &= (nonzero_bits (XEXP (x, 0), mode) & GET_MODE_MASK (mode));
6153 break;
6154
6155 case ZERO_EXTEND:
6156 nonzero &= nonzero_bits (XEXP (x, 0), mode);
6157 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
6158 nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
6159 break;
6160
6161 case SIGN_EXTEND:
6162 /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
6163 Otherwise, show all the bits in the outer mode but not the inner
6164 may be non-zero. */
6165 inner_nz = nonzero_bits (XEXP (x, 0), mode);
6166 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
6167 {
6168 inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
6169 if (inner_nz &
6170 (((HOST_WIDE_INT) 1
6171 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1))))
6172 inner_nz |= (GET_MODE_MASK (mode)
6173 & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
6174 }
6175
6176 nonzero &= inner_nz;
6177 break;
6178
6179 case AND:
6180 nonzero &= (nonzero_bits (XEXP (x, 0), mode)
6181 & nonzero_bits (XEXP (x, 1), mode));
6182 break;
6183
6184 case XOR: case IOR:
6185 case UMIN: case UMAX: case SMIN: case SMAX:
6186 nonzero &= (nonzero_bits (XEXP (x, 0), mode)
6187 | nonzero_bits (XEXP (x, 1), mode));
6188 break;
6189
6190 case PLUS: case MINUS:
6191 case MULT:
6192 case DIV: case UDIV:
6193 case MOD: case UMOD:
6194 /* We can apply the rules of arithmetic to compute the number of
6195 high- and low-order zero bits of these operations. We start by
6196 computing the width (position of the highest-order non-zero bit)
6197 and the number of low-order zero bits for each value. */
6198 {
6199 unsigned HOST_WIDE_INT nz0 = nonzero_bits (XEXP (x, 0), mode);
6200 unsigned HOST_WIDE_INT nz1 = nonzero_bits (XEXP (x, 1), mode);
6201 int width0 = floor_log2 (nz0) + 1;
6202 int width1 = floor_log2 (nz1) + 1;
6203 int low0 = floor_log2 (nz0 & -nz0);
6204 int low1 = floor_log2 (nz1 & -nz1);
6205 int op0_maybe_minusp = (nz0 & ((HOST_WIDE_INT) 1 << (mode_width - 1)));
6206 int op1_maybe_minusp = (nz1 & ((HOST_WIDE_INT) 1 << (mode_width - 1)));
6207 int result_width = mode_width;
6208 int result_low = 0;
6209
6210 switch (code)
6211 {
6212 case PLUS:
6213 result_width = MAX (width0, width1) + 1;
6214 result_low = MIN (low0, low1);
6215 break;
6216 case MINUS:
6217 result_low = MIN (low0, low1);
6218 break;
6219 case MULT:
6220 result_width = width0 + width1;
6221 result_low = low0 + low1;
6222 break;
6223 case DIV:
6224 if (! op0_maybe_minusp && ! op1_maybe_minusp)
6225 result_width = width0;
6226 break;
6227 case UDIV:
6228 result_width = width0;
6229 break;
6230 case MOD:
6231 if (! op0_maybe_minusp && ! op1_maybe_minusp)
6232 result_width = MIN (width0, width1);
6233 result_low = MIN (low0, low1);
6234 break;
6235 case UMOD:
6236 result_width = MIN (width0, width1);
6237 result_low = MIN (low0, low1);
6238 break;
6239 }
6240
6241 if (result_width < mode_width)
6242 nonzero &= ((HOST_WIDE_INT) 1 << result_width) - 1;
6243
6244 if (result_low > 0)
6245 nonzero &= ~ (((HOST_WIDE_INT) 1 << result_low) - 1);
6246 }
6247 break;
6248
6249 case ZERO_EXTRACT:
6250 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6251 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
6252 nonzero &= ((HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
6253 break;
6254
6255 case SUBREG:
6256 /* If this is a SUBREG formed for a promoted variable that has
6257 been zero-extended, we know that at least the high-order bits
6258 are zero, though others might be too. */
6259
6260 if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x))
6261 nonzero = (GET_MODE_MASK (GET_MODE (x))
6262 & nonzero_bits (SUBREG_REG (x), GET_MODE (x)));
6263
6264 /* If the inner mode is a single word for both the host and target
6265 machines, we can compute this from which bits of the inner
6266 object might be nonzero. */
6267 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) <= BITS_PER_WORD
6268 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
6269 <= HOST_BITS_PER_WIDE_INT))
6270 {
6271 nonzero &= nonzero_bits (SUBREG_REG (x), mode);
6272 #ifndef BYTE_LOADS_EXTEND
6273 /* On many CISC machines, accessing an object in a wider mode
6274 causes the high-order bits to become undefined. So they are
6275 not known to be zero. */
6276 if (GET_MODE_SIZE (GET_MODE (x))
6277 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
6278 nonzero |= (GET_MODE_MASK (GET_MODE (x))
6279 & ~ GET_MODE_MASK (GET_MODE (SUBREG_REG (x))));
6280 #endif
6281 }
6282 break;
6283
6284 case ASHIFTRT:
6285 case LSHIFTRT:
6286 case ASHIFT:
6287 case LSHIFT:
6288 case ROTATE:
6289 /* The nonzero bits are in two classes: any bits within MODE
6290 that aren't in GET_MODE (x) are always significant. The rest of the
6291 nonzero bits are those that are significant in the operand of
6292 the shift when shifted the appropriate number of bits. This
6293 shows that high-order bits are cleared by the right shift and
6294 low-order bits by left shifts. */
6295 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6296 && INTVAL (XEXP (x, 1)) >= 0
6297 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
6298 {
6299 enum machine_mode inner_mode = GET_MODE (x);
6300 int width = GET_MODE_BITSIZE (inner_mode);
6301 int count = INTVAL (XEXP (x, 1));
6302 unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
6303 unsigned HOST_WIDE_INT op_nonzero = nonzero_bits (XEXP (x, 0), mode);
6304 unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask;
6305 unsigned HOST_WIDE_INT outer = 0;
6306
6307 if (mode_width > width)
6308 outer = (op_nonzero & nonzero & ~ mode_mask);
6309
6310 if (code == LSHIFTRT)
6311 inner >>= count;
6312 else if (code == ASHIFTRT)
6313 {
6314 inner >>= count;
6315
6316 /* If the sign bit may have been nonzero before the shift, we
6317 need to mark all the places it could have been copied to
6318 by the shift as possibly nonzero. */
6319 if (inner & ((HOST_WIDE_INT) 1 << (width - 1 - count)))
6320 inner |= (((HOST_WIDE_INT) 1 << count) - 1) << (width - count);
6321 }
6322 else if (code == LSHIFT || code == ASHIFT)
6323 inner <<= count;
6324 else
6325 inner = ((inner << (count % width)
6326 | (inner >> (width - (count % width)))) & mode_mask);
6327
6328 nonzero &= (outer | inner);
6329 }
6330 break;
6331
6332 case FFS:
6333 /* This is at most the number of bits in the mode. */
6334 nonzero = ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width) + 1)) - 1;
6335 break;
6336
6337 case IF_THEN_ELSE:
6338 nonzero &= (nonzero_bits (XEXP (x, 1), mode)
6339 | nonzero_bits (XEXP (x, 2), mode));
6340 break;
6341 }
6342
6343 return nonzero;
6344 }
6345 \f
6346 /* Return the number of bits at the high-order end of X that are known to
6347 be equal to the sign bit. This number will always be between 1 and
6348 the number of bits in the mode of X. MODE is the mode to be used
6349 if X is VOIDmode. */
6350
6351 static int
6352 num_sign_bit_copies (x, mode)
6353 rtx x;
6354 enum machine_mode mode;
6355 {
6356 enum rtx_code code = GET_CODE (x);
6357 int bitwidth;
6358 int num0, num1, result;
6359 unsigned HOST_WIDE_INT nonzero;
6360 rtx tem;
6361
6362 /* If we weren't given a mode, use the mode of X. If the mode is still
6363 VOIDmode, we don't know anything. */
6364
6365 if (mode == VOIDmode)
6366 mode = GET_MODE (x);
6367
6368 if (mode == VOIDmode)
6369 return 1;
6370
6371 bitwidth = GET_MODE_BITSIZE (mode);
6372
6373 switch (code)
6374 {
6375 case REG:
6376 if (nonzero_sign_valid && reg_sign_bit_copies[REGNO (x)] != 0)
6377 return reg_sign_bit_copies[REGNO (x)];
6378
6379 tem = get_last_value (x);
6380 if (tem != 0)
6381 return num_sign_bit_copies (tem, mode);
6382 break;
6383
6384 #ifdef BYTE_LOADS_SIGN_EXTEND
6385 case MEM:
6386 /* Some RISC machines sign-extend all loads of smaller than a word. */
6387 return MAX (1, bitwidth - GET_MODE_BITSIZE (GET_MODE (x)) + 1);
6388 #endif
6389
6390 case CONST_INT:
6391 /* If the constant is negative, take its 1's complement and remask.
6392 Then see how many zero bits we have. */
6393 nonzero = INTVAL (x) & GET_MODE_MASK (mode);
6394 if (bitwidth <= HOST_BITS_PER_WIDE_INT
6395 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
6396 nonzero = (~ nonzero) & GET_MODE_MASK (mode);
6397
6398 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
6399
6400 case SUBREG:
6401 /* If this is a SUBREG for a promoted object that is sign-extended
6402 and we are looking at it in a wider mode, we know that at least the
6403 high-order bits are known to be sign bit copies. */
6404
6405 if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x))
6406 return (GET_MODE_BITSIZE (mode) - GET_MODE_BITSIZE (GET_MODE (x))
6407 + num_sign_bit_copies (SUBREG_REG (x), GET_MODE (x)));
6408
6409 /* For a smaller object, just ignore the high bits. */
6410 if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
6411 {
6412 num0 = num_sign_bit_copies (SUBREG_REG (x), VOIDmode);
6413 return MAX (1, (num0
6414 - (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
6415 - bitwidth)));
6416 }
6417
6418 #ifdef BYTE_LOADS_EXTEND
6419 /* For paradoxical SUBREGs, just look inside since, on machines with
6420 one of these defined, we assume that operations are actually
6421 performed on the full register. Note that we are passing MODE
6422 to the recursive call, so the number of sign bit copies will
6423 remain relative to that mode, not the inner mode. */
6424
6425 if (GET_MODE_SIZE (GET_MODE (x))
6426 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
6427 return num_sign_bit_copies (SUBREG_REG (x), mode);
6428 #endif
6429
6430 break;
6431
6432 case SIGN_EXTRACT:
6433 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
6434 return MAX (1, bitwidth - INTVAL (XEXP (x, 1)));
6435 break;
6436
6437 case SIGN_EXTEND:
6438 return (bitwidth - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
6439 + num_sign_bit_copies (XEXP (x, 0), VOIDmode));
6440
6441 case TRUNCATE:
6442 /* For a smaller object, just ignore the high bits. */
6443 num0 = num_sign_bit_copies (XEXP (x, 0), VOIDmode);
6444 return MAX (1, (num0 - (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
6445 - bitwidth)));
6446
6447 case NOT:
6448 return num_sign_bit_copies (XEXP (x, 0), mode);
6449
6450 case ROTATE: case ROTATERT:
6451 /* If we are rotating left by a number of bits less than the number
6452 of sign bit copies, we can just subtract that amount from the
6453 number. */
6454 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6455 && INTVAL (XEXP (x, 1)) >= 0 && INTVAL (XEXP (x, 1)) < bitwidth)
6456 {
6457 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
6458 return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
6459 : bitwidth - INTVAL (XEXP (x, 1))));
6460 }
6461 break;
6462
6463 case NEG:
6464 /* In general, this subtracts one sign bit copy. But if the value
6465 is known to be positive, the number of sign bit copies is the
6466 same as that of the input. Finally, if the input has just one bit
6467 that might be nonzero, all the bits are copies of the sign bit. */
6468 nonzero = nonzero_bits (XEXP (x, 0), mode);
6469 if (nonzero == 1)
6470 return bitwidth;
6471
6472 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
6473 if (num0 > 1
6474 && bitwidth <= HOST_BITS_PER_WIDE_INT
6475 && (((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero))
6476 num0--;
6477
6478 return num0;
6479
6480 case IOR: case AND: case XOR:
6481 case SMIN: case SMAX: case UMIN: case UMAX:
6482 /* Logical operations will preserve the number of sign-bit copies.
6483 MIN and MAX operations always return one of the operands. */
6484 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
6485 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
6486 return MIN (num0, num1);
6487
6488 case PLUS: case MINUS:
6489 /* For addition and subtraction, we can have a 1-bit carry. However,
6490 if we are subtracting 1 from a positive number, there will not
6491 be such a carry. Furthermore, if the positive number is known to
6492 be 0 or 1, we know the result is either -1 or 0. */
6493
6494 if (code == PLUS && XEXP (x, 1) == constm1_rtx
6495 && bitwidth <= HOST_BITS_PER_INT)
6496 {
6497 nonzero = nonzero_bits (XEXP (x, 0), mode);
6498 if ((((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0)
6499 return (nonzero == 1 || nonzero == 0 ? bitwidth
6500 : bitwidth - floor_log2 (nonzero) - 1);
6501 }
6502
6503 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
6504 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
6505 return MAX (1, MIN (num0, num1) - 1);
6506
6507 case MULT:
6508 /* The number of bits of the product is the sum of the number of
6509 bits of both terms. However, unless one of the terms if known
6510 to be positive, we must allow for an additional bit since negating
6511 a negative number can remove one sign bit copy. */
6512
6513 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
6514 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
6515
6516 result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
6517 if (result > 0
6518 && bitwidth <= HOST_BITS_PER_INT
6519 && ((nonzero_bits (XEXP (x, 0), mode)
6520 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
6521 && (nonzero_bits (XEXP (x, 1), mode)
6522 & ((HOST_WIDE_INT) 1 << (bitwidth - 1)) != 0))
6523 result--;
6524
6525 return MAX (1, result);
6526
6527 case UDIV:
6528 /* The result must be <= the first operand. */
6529 return num_sign_bit_copies (XEXP (x, 0), mode);
6530
6531 case UMOD:
6532 /* The result must be <= the scond operand. */
6533 return num_sign_bit_copies (XEXP (x, 1), mode);
6534
6535 case DIV:
6536 /* Similar to unsigned division, except that we have to worry about
6537 the case where the divisor is negative, in which case we have
6538 to add 1. */
6539 result = num_sign_bit_copies (XEXP (x, 0), mode);
6540 if (result > 1
6541 && bitwidth <= HOST_BITS_PER_WIDE_INT
6542 && (nonzero_bits (XEXP (x, 1), mode)
6543 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
6544 result --;
6545
6546 return result;
6547
6548 case MOD:
6549 result = num_sign_bit_copies (XEXP (x, 1), mode);
6550 if (result > 1
6551 && bitwidth <= HOST_BITS_PER_WIDE_INT
6552 && (nonzero_bits (XEXP (x, 1), mode)
6553 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
6554 result --;
6555
6556 return result;
6557
6558 case ASHIFTRT:
6559 /* Shifts by a constant add to the number of bits equal to the
6560 sign bit. */
6561 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
6562 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6563 && INTVAL (XEXP (x, 1)) > 0)
6564 num0 = MIN (bitwidth, num0 + INTVAL (XEXP (x, 1)));
6565
6566 return num0;
6567
6568 case ASHIFT:
6569 case LSHIFT:
6570 /* Left shifts destroy copies. */
6571 if (GET_CODE (XEXP (x, 1)) != CONST_INT
6572 || INTVAL (XEXP (x, 1)) < 0
6573 || INTVAL (XEXP (x, 1)) >= bitwidth)
6574 return 1;
6575
6576 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
6577 return MAX (1, num0 - INTVAL (XEXP (x, 1)));
6578
6579 case IF_THEN_ELSE:
6580 num0 = num_sign_bit_copies (XEXP (x, 1), mode);
6581 num1 = num_sign_bit_copies (XEXP (x, 2), mode);
6582 return MIN (num0, num1);
6583
6584 #if STORE_FLAG_VALUE == -1
6585 case EQ: case NE: case GE: case GT: case LE: case LT:
6586 case GEU: case GTU: case LEU: case LTU:
6587 return bitwidth;
6588 #endif
6589 }
6590
6591 /* If we haven't been able to figure it out by one of the above rules,
6592 see if some of the high-order bits are known to be zero. If so,
6593 count those bits and return one less than that amount. If we can't
6594 safely compute the mask for this mode, always return BITWIDTH. */
6595
6596 if (bitwidth > HOST_BITS_PER_WIDE_INT)
6597 return 1;
6598
6599 nonzero = nonzero_bits (x, mode);
6600 return (nonzero == GET_MODE_MASK (mode)
6601 ? 1 : bitwidth - floor_log2 (nonzero) - 1);
6602 }
6603 \f
6604 /* Return the number of "extended" bits there are in X, when interpreted
6605 as a quantity in MODE whose signedness is indicated by UNSIGNEDP. For
6606 unsigned quantities, this is the number of high-order zero bits.
6607 For signed quantities, this is the number of copies of the sign bit
6608 minus 1. In both case, this function returns the number of "spare"
6609 bits. For example, if two quantities for which this function returns
6610 at least 1 are added, the addition is known not to overflow.
6611
6612 This function will always return 0 unless called during combine, which
6613 implies that it must be called from a define_split. */
6614
6615 int
6616 extended_count (x, mode, unsignedp)
6617 rtx x;
6618 enum machine_mode mode;
6619 int unsignedp;
6620 {
6621 if (nonzero_sign_valid == 0)
6622 return 0;
6623
6624 return (unsignedp
6625 ? (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
6626 && (GET_MODE_BITSIZE (mode) - 1
6627 - floor_log2 (nonzero_bits (x, mode))))
6628 : num_sign_bit_copies (x, mode) - 1);
6629 }
6630 \f
6631 /* This function is called from `simplify_shift_const' to merge two
6632 outer operations. Specifically, we have already found that we need
6633 to perform operation *POP0 with constant *PCONST0 at the outermost
6634 position. We would now like to also perform OP1 with constant CONST1
6635 (with *POP0 being done last).
6636
6637 Return 1 if we can do the operation and update *POP0 and *PCONST0 with
6638 the resulting operation. *PCOMP_P is set to 1 if we would need to
6639 complement the innermost operand, otherwise it is unchanged.
6640
6641 MODE is the mode in which the operation will be done. No bits outside
6642 the width of this mode matter. It is assumed that the width of this mode
6643 is smaller than or equal to HOST_BITS_PER_WIDE_INT.
6644
6645 If *POP0 or OP1 are NIL, it means no operation is required. Only NEG, PLUS,
6646 IOR, XOR, and AND are supported. We may set *POP0 to SET if the proper
6647 result is simply *PCONST0.
6648
6649 If the resulting operation cannot be expressed as one operation, we
6650 return 0 and do not change *POP0, *PCONST0, and *PCOMP_P. */
6651
6652 static int
6653 merge_outer_ops (pop0, pconst0, op1, const1, mode, pcomp_p)
6654 enum rtx_code *pop0;
6655 HOST_WIDE_INT *pconst0;
6656 enum rtx_code op1;
6657 HOST_WIDE_INT const1;
6658 enum machine_mode mode;
6659 int *pcomp_p;
6660 {
6661 enum rtx_code op0 = *pop0;
6662 HOST_WIDE_INT const0 = *pconst0;
6663
6664 const0 &= GET_MODE_MASK (mode);
6665 const1 &= GET_MODE_MASK (mode);
6666
6667 /* If OP0 is an AND, clear unimportant bits in CONST1. */
6668 if (op0 == AND)
6669 const1 &= const0;
6670
6671 /* If OP0 or OP1 is NIL, this is easy. Similarly if they are the same or
6672 if OP0 is SET. */
6673
6674 if (op1 == NIL || op0 == SET)
6675 return 1;
6676
6677 else if (op0 == NIL)
6678 op0 = op1, const0 = const1;
6679
6680 else if (op0 == op1)
6681 {
6682 switch (op0)
6683 {
6684 case AND:
6685 const0 &= const1;
6686 break;
6687 case IOR:
6688 const0 |= const1;
6689 break;
6690 case XOR:
6691 const0 ^= const1;
6692 break;
6693 case PLUS:
6694 const0 += const1;
6695 break;
6696 case NEG:
6697 op0 = NIL;
6698 break;
6699 }
6700 }
6701
6702 /* Otherwise, if either is a PLUS or NEG, we can't do anything. */
6703 else if (op0 == PLUS || op1 == PLUS || op0 == NEG || op1 == NEG)
6704 return 0;
6705
6706 /* If the two constants aren't the same, we can't do anything. The
6707 remaining six cases can all be done. */
6708 else if (const0 != const1)
6709 return 0;
6710
6711 else
6712 switch (op0)
6713 {
6714 case IOR:
6715 if (op1 == AND)
6716 /* (a & b) | b == b */
6717 op0 = SET;
6718 else /* op1 == XOR */
6719 /* (a ^ b) | b == a | b */
6720 ;
6721 break;
6722
6723 case XOR:
6724 if (op1 == AND)
6725 /* (a & b) ^ b == (~a) & b */
6726 op0 = AND, *pcomp_p = 1;
6727 else /* op1 == IOR */
6728 /* (a | b) ^ b == a & ~b */
6729 op0 = AND, *pconst0 = ~ const0;
6730 break;
6731
6732 case AND:
6733 if (op1 == IOR)
6734 /* (a | b) & b == b */
6735 op0 = SET;
6736 else /* op1 == XOR */
6737 /* (a ^ b) & b) == (~a) & b */
6738 *pcomp_p = 1;
6739 break;
6740 }
6741
6742 /* Check for NO-OP cases. */
6743 const0 &= GET_MODE_MASK (mode);
6744 if (const0 == 0
6745 && (op0 == IOR || op0 == XOR || op0 == PLUS))
6746 op0 = NIL;
6747 else if (const0 == 0 && op0 == AND)
6748 op0 = SET;
6749 else if (const0 == GET_MODE_MASK (mode) && op0 == AND)
6750 op0 = NIL;
6751
6752 *pop0 = op0;
6753 *pconst0 = const0;
6754
6755 return 1;
6756 }
6757 \f
6758 /* Simplify a shift of VAROP by COUNT bits. CODE says what kind of shift.
6759 The result of the shift is RESULT_MODE. X, if non-zero, is an expression
6760 that we started with.
6761
6762 The shift is normally computed in the widest mode we find in VAROP, as
6763 long as it isn't a different number of words than RESULT_MODE. Exceptions
6764 are ASHIFTRT and ROTATE, which are always done in their original mode, */
6765
6766 static rtx
6767 simplify_shift_const (x, code, result_mode, varop, count)
6768 rtx x;
6769 enum rtx_code code;
6770 enum machine_mode result_mode;
6771 rtx varop;
6772 int count;
6773 {
6774 enum rtx_code orig_code = code;
6775 int orig_count = count;
6776 enum machine_mode mode = result_mode;
6777 enum machine_mode shift_mode, tmode;
6778 int mode_words
6779 = (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
6780 /* We form (outer_op (code varop count) (outer_const)). */
6781 enum rtx_code outer_op = NIL;
6782 HOST_WIDE_INT outer_const;
6783 rtx const_rtx;
6784 int complement_p = 0;
6785 rtx new;
6786
6787 /* If we were given an invalid count, don't do anything except exactly
6788 what was requested. */
6789
6790 if (count < 0 || count > GET_MODE_BITSIZE (mode))
6791 {
6792 if (x)
6793 return x;
6794
6795 return gen_rtx (code, mode, varop, GEN_INT (count));
6796 }
6797
6798 /* Unless one of the branches of the `if' in this loop does a `continue',
6799 we will `break' the loop after the `if'. */
6800
6801 while (count != 0)
6802 {
6803 /* If we have an operand of (clobber (const_int 0)), just return that
6804 value. */
6805 if (GET_CODE (varop) == CLOBBER)
6806 return varop;
6807
6808 /* If we discovered we had to complement VAROP, leave. Making a NOT
6809 here would cause an infinite loop. */
6810 if (complement_p)
6811 break;
6812
6813 /* Convert ROTATETRT to ROTATE. */
6814 if (code == ROTATERT)
6815 code = ROTATE, count = GET_MODE_BITSIZE (result_mode) - count;
6816
6817 /* Canonicalize LSHIFT to ASHIFT. */
6818 if (code == LSHIFT)
6819 code = ASHIFT;
6820
6821 /* We need to determine what mode we will do the shift in. If the
6822 shift is a ASHIFTRT or ROTATE, we must always do it in the mode it
6823 was originally done in. Otherwise, we can do it in MODE, the widest
6824 mode encountered. */
6825 shift_mode = (code == ASHIFTRT || code == ROTATE ? result_mode : mode);
6826
6827 /* Handle cases where the count is greater than the size of the mode
6828 minus 1. For ASHIFT, use the size minus one as the count (this can
6829 occur when simplifying (lshiftrt (ashiftrt ..))). For rotates,
6830 take the count modulo the size. For other shifts, the result is
6831 zero.
6832
6833 Since these shifts are being produced by the compiler by combining
6834 multiple operations, each of which are defined, we know what the
6835 result is supposed to be. */
6836
6837 if (count > GET_MODE_BITSIZE (shift_mode) - 1)
6838 {
6839 if (code == ASHIFTRT)
6840 count = GET_MODE_BITSIZE (shift_mode) - 1;
6841 else if (code == ROTATE || code == ROTATERT)
6842 count %= GET_MODE_BITSIZE (shift_mode);
6843 else
6844 {
6845 /* We can't simply return zero because there may be an
6846 outer op. */
6847 varop = const0_rtx;
6848 count = 0;
6849 break;
6850 }
6851 }
6852
6853 /* Negative counts are invalid and should not have been made (a
6854 programmer-specified negative count should have been handled
6855 above). */
6856 else if (count < 0)
6857 abort ();
6858
6859 /* An arithmetic right shift of a quantity known to be -1 or 0
6860 is a no-op. */
6861 if (code == ASHIFTRT
6862 && (num_sign_bit_copies (varop, shift_mode)
6863 == GET_MODE_BITSIZE (shift_mode)))
6864 {
6865 count = 0;
6866 break;
6867 }
6868
6869 /* If we are doing an arithmetic right shift and discarding all but
6870 the sign bit copies, this is equivalent to doing a shift by the
6871 bitsize minus one. Convert it into that shift because it will often
6872 allow other simplifications. */
6873
6874 if (code == ASHIFTRT
6875 && (count + num_sign_bit_copies (varop, shift_mode)
6876 >= GET_MODE_BITSIZE (shift_mode)))
6877 count = GET_MODE_BITSIZE (shift_mode) - 1;
6878
6879 /* We simplify the tests below and elsewhere by converting
6880 ASHIFTRT to LSHIFTRT if we know the sign bit is clear.
6881 `make_compound_operation' will convert it to a ASHIFTRT for
6882 those machines (such as Vax) that don't have a LSHIFTRT. */
6883 if (GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT
6884 && code == ASHIFTRT
6885 && ((nonzero_bits (varop, shift_mode)
6886 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (shift_mode) - 1)))
6887 == 0))
6888 code = LSHIFTRT;
6889
6890 switch (GET_CODE (varop))
6891 {
6892 case SIGN_EXTEND:
6893 case ZERO_EXTEND:
6894 case SIGN_EXTRACT:
6895 case ZERO_EXTRACT:
6896 new = expand_compound_operation (varop);
6897 if (new != varop)
6898 {
6899 varop = new;
6900 continue;
6901 }
6902 break;
6903
6904 case MEM:
6905 /* If we have (xshiftrt (mem ...) C) and C is MODE_WIDTH
6906 minus the width of a smaller mode, we can do this with a
6907 SIGN_EXTEND or ZERO_EXTEND from the narrower memory location. */
6908 if ((code == ASHIFTRT || code == LSHIFTRT)
6909 && ! mode_dependent_address_p (XEXP (varop, 0))
6910 && ! MEM_VOLATILE_P (varop)
6911 && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
6912 MODE_INT, 1)) != BLKmode)
6913 {
6914 #if BYTES_BIG_ENDIAN
6915 new = gen_rtx (MEM, tmode, XEXP (varop, 0));
6916 #else
6917 new = gen_rtx (MEM, tmode,
6918 plus_constant (XEXP (varop, 0),
6919 count / BITS_PER_UNIT));
6920 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (varop);
6921 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (varop);
6922 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (varop);
6923 #endif
6924 varop = gen_rtx_combine (code == ASHIFTRT ? SIGN_EXTEND
6925 : ZERO_EXTEND, mode, new);
6926 count = 0;
6927 continue;
6928 }
6929 break;
6930
6931 case USE:
6932 /* Similar to the case above, except that we can only do this if
6933 the resulting mode is the same as that of the underlying
6934 MEM and adjust the address depending on the *bits* endianness
6935 because of the way that bit-field extract insns are defined. */
6936 if ((code == ASHIFTRT || code == LSHIFTRT)
6937 && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
6938 MODE_INT, 1)) != BLKmode
6939 && tmode == GET_MODE (XEXP (varop, 0)))
6940 {
6941 #if BITS_BIG_ENDIAN
6942 new = XEXP (varop, 0);
6943 #else
6944 new = copy_rtx (XEXP (varop, 0));
6945 SUBST (XEXP (new, 0),
6946 plus_constant (XEXP (new, 0),
6947 count / BITS_PER_UNIT));
6948 #endif
6949
6950 varop = gen_rtx_combine (code == ASHIFTRT ? SIGN_EXTEND
6951 : ZERO_EXTEND, mode, new);
6952 count = 0;
6953 continue;
6954 }
6955 break;
6956
6957 case SUBREG:
6958 /* If VAROP is a SUBREG, strip it as long as the inner operand has
6959 the same number of words as what we've seen so far. Then store
6960 the widest mode in MODE. */
6961 if (subreg_lowpart_p (varop)
6962 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
6963 > GET_MODE_SIZE (GET_MODE (varop)))
6964 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
6965 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
6966 == mode_words))
6967 {
6968 varop = SUBREG_REG (varop);
6969 if (GET_MODE_SIZE (GET_MODE (varop)) > GET_MODE_SIZE (mode))
6970 mode = GET_MODE (varop);
6971 continue;
6972 }
6973 break;
6974
6975 case MULT:
6976 /* Some machines use MULT instead of ASHIFT because MULT
6977 is cheaper. But it is still better on those machines to
6978 merge two shifts into one. */
6979 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
6980 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
6981 {
6982 varop = gen_binary (ASHIFT, GET_MODE (varop), XEXP (varop, 0),
6983 GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));;
6984 continue;
6985 }
6986 break;
6987
6988 case UDIV:
6989 /* Similar, for when divides are cheaper. */
6990 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
6991 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
6992 {
6993 varop = gen_binary (LSHIFTRT, GET_MODE (varop), XEXP (varop, 0),
6994 GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));
6995 continue;
6996 }
6997 break;
6998
6999 case ASHIFTRT:
7000 /* If we are extracting just the sign bit of an arithmetic right
7001 shift, that shift is not needed. */
7002 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1)
7003 {
7004 varop = XEXP (varop, 0);
7005 continue;
7006 }
7007
7008 /* ... fall through ... */
7009
7010 case LSHIFTRT:
7011 case ASHIFT:
7012 case LSHIFT:
7013 case ROTATE:
7014 /* Here we have two nested shifts. The result is usually the
7015 AND of a new shift with a mask. We compute the result below. */
7016 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
7017 && INTVAL (XEXP (varop, 1)) >= 0
7018 && INTVAL (XEXP (varop, 1)) < GET_MODE_BITSIZE (GET_MODE (varop))
7019 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
7020 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7021 {
7022 enum rtx_code first_code = GET_CODE (varop);
7023 int first_count = INTVAL (XEXP (varop, 1));
7024 unsigned HOST_WIDE_INT mask;
7025 rtx mask_rtx;
7026 rtx inner;
7027
7028 if (first_code == LSHIFT)
7029 first_code = ASHIFT;
7030
7031 /* We have one common special case. We can't do any merging if
7032 the inner code is an ASHIFTRT of a smaller mode. However, if
7033 we have (ashift:M1 (subreg:M1 (ashiftrt:M2 FOO C1) 0) C2)
7034 with C2 == GET_MODE_BITSIZE (M1) - GET_MODE_BITSIZE (M2),
7035 we can convert it to
7036 (ashiftrt:M1 (ashift:M1 (and:M1 (subreg:M1 FOO 0 C2) C3) C1).
7037 This simplifies certain SIGN_EXTEND operations. */
7038 if (code == ASHIFT && first_code == ASHIFTRT
7039 && (GET_MODE_BITSIZE (result_mode)
7040 - GET_MODE_BITSIZE (GET_MODE (varop))) == count)
7041 {
7042 /* C3 has the low-order C1 bits zero. */
7043
7044 mask = (GET_MODE_MASK (mode)
7045 & ~ (((HOST_WIDE_INT) 1 << first_count) - 1));
7046
7047 varop = simplify_and_const_int (NULL_RTX, result_mode,
7048 XEXP (varop, 0), mask);
7049 varop = simplify_shift_const (NULL_RTX, ASHIFT, result_mode,
7050 varop, count);
7051 count = first_count;
7052 code = ASHIFTRT;
7053 continue;
7054 }
7055
7056 /* If this was (ashiftrt (ashift foo C1) C2) and FOO has more
7057 than C1 high-order bits equal to the sign bit, we can convert
7058 this to either an ASHIFT or a ASHIFTRT depending on the
7059 two counts.
7060
7061 We cannot do this if VAROP's mode is not SHIFT_MODE. */
7062
7063 if (code == ASHIFTRT && first_code == ASHIFT
7064 && GET_MODE (varop) == shift_mode
7065 && (num_sign_bit_copies (XEXP (varop, 0), shift_mode)
7066 > first_count))
7067 {
7068 count -= first_count;
7069 if (count < 0)
7070 count = - count, code = ASHIFT;
7071 varop = XEXP (varop, 0);
7072 continue;
7073 }
7074
7075 /* There are some cases we can't do. If CODE is ASHIFTRT,
7076 we can only do this if FIRST_CODE is also ASHIFTRT.
7077
7078 We can't do the case when CODE is ROTATE and FIRST_CODE is
7079 ASHIFTRT.
7080
7081 If the mode of this shift is not the mode of the outer shift,
7082 we can't do this if either shift is ASHIFTRT or ROTATE.
7083
7084 Finally, we can't do any of these if the mode is too wide
7085 unless the codes are the same.
7086
7087 Handle the case where the shift codes are the same
7088 first. */
7089
7090 if (code == first_code)
7091 {
7092 if (GET_MODE (varop) != result_mode
7093 && (code == ASHIFTRT || code == ROTATE))
7094 break;
7095
7096 count += first_count;
7097 varop = XEXP (varop, 0);
7098 continue;
7099 }
7100
7101 if (code == ASHIFTRT
7102 || (code == ROTATE && first_code == ASHIFTRT)
7103 || GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT
7104 || (GET_MODE (varop) != result_mode
7105 && (first_code == ASHIFTRT || first_code == ROTATE
7106 || code == ROTATE)))
7107 break;
7108
7109 /* To compute the mask to apply after the shift, shift the
7110 nonzero bits of the inner shift the same way the
7111 outer shift will. */
7112
7113 mask_rtx = GEN_INT (nonzero_bits (varop, GET_MODE (varop)));
7114
7115 mask_rtx
7116 = simplify_binary_operation (code, result_mode, mask_rtx,
7117 GEN_INT (count));
7118
7119 /* Give up if we can't compute an outer operation to use. */
7120 if (mask_rtx == 0
7121 || GET_CODE (mask_rtx) != CONST_INT
7122 || ! merge_outer_ops (&outer_op, &outer_const, AND,
7123 INTVAL (mask_rtx),
7124 result_mode, &complement_p))
7125 break;
7126
7127 /* If the shifts are in the same direction, we add the
7128 counts. Otherwise, we subtract them. */
7129 if ((code == ASHIFTRT || code == LSHIFTRT)
7130 == (first_code == ASHIFTRT || first_code == LSHIFTRT))
7131 count += first_count;
7132 else
7133 count -= first_count;
7134
7135 /* If COUNT is positive, the new shift is usually CODE,
7136 except for the two exceptions below, in which case it is
7137 FIRST_CODE. If the count is negative, FIRST_CODE should
7138 always be used */
7139 if (count > 0
7140 && ((first_code == ROTATE && code == ASHIFT)
7141 || (first_code == ASHIFTRT && code == LSHIFTRT)))
7142 code = first_code;
7143 else if (count < 0)
7144 code = first_code, count = - count;
7145
7146 varop = XEXP (varop, 0);
7147 continue;
7148 }
7149
7150 /* If we have (A << B << C) for any shift, we can convert this to
7151 (A << C << B). This wins if A is a constant. Only try this if
7152 B is not a constant. */
7153
7154 else if (GET_CODE (varop) == code
7155 && GET_CODE (XEXP (varop, 1)) != CONST_INT
7156 && 0 != (new
7157 = simplify_binary_operation (code, mode,
7158 XEXP (varop, 0),
7159 GEN_INT (count))))
7160 {
7161 varop = gen_rtx_combine (code, mode, new, XEXP (varop, 1));
7162 count = 0;
7163 continue;
7164 }
7165 break;
7166
7167 case NOT:
7168 /* Make this fit the case below. */
7169 varop = gen_rtx_combine (XOR, mode, XEXP (varop, 0),
7170 GEN_INT (GET_MODE_MASK (mode)));
7171 continue;
7172
7173 case IOR:
7174 case AND:
7175 case XOR:
7176 /* If we have (xshiftrt (ior (plus X (const_int -1)) X) C)
7177 with C the size of VAROP - 1 and the shift is logical if
7178 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
7179 we have an (le X 0) operation. If we have an arithmetic shift
7180 and STORE_FLAG_VALUE is 1 or we have a logical shift with
7181 STORE_FLAG_VALUE of -1, we have a (neg (le X 0)) operation. */
7182
7183 if (GET_CODE (varop) == IOR && GET_CODE (XEXP (varop, 0)) == PLUS
7184 && XEXP (XEXP (varop, 0), 1) == constm1_rtx
7185 && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
7186 && (code == LSHIFTRT || code == ASHIFTRT)
7187 && count == GET_MODE_BITSIZE (GET_MODE (varop)) - 1
7188 && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
7189 {
7190 count = 0;
7191 varop = gen_rtx_combine (LE, GET_MODE (varop), XEXP (varop, 1),
7192 const0_rtx);
7193
7194 if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
7195 varop = gen_rtx_combine (NEG, GET_MODE (varop), varop);
7196
7197 continue;
7198 }
7199
7200 /* If we have (shift (logical)), move the logical to the outside
7201 to allow it to possibly combine with another logical and the
7202 shift to combine with another shift. This also canonicalizes to
7203 what a ZERO_EXTRACT looks like. Also, some machines have
7204 (and (shift)) insns. */
7205
7206 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
7207 && (new = simplify_binary_operation (code, result_mode,
7208 XEXP (varop, 1),
7209 GEN_INT (count))) != 0
7210 && merge_outer_ops (&outer_op, &outer_const, GET_CODE (varop),
7211 INTVAL (new), result_mode, &complement_p))
7212 {
7213 varop = XEXP (varop, 0);
7214 continue;
7215 }
7216
7217 /* If we can't do that, try to simplify the shift in each arm of the
7218 logical expression, make a new logical expression, and apply
7219 the inverse distributive law. */
7220 {
7221 rtx lhs = simplify_shift_const (NULL_RTX, code, result_mode,
7222 XEXP (varop, 0), count);
7223 rtx rhs = simplify_shift_const (NULL_RTX, code, result_mode,
7224 XEXP (varop, 1), count);
7225
7226 varop = gen_binary (GET_CODE (varop), result_mode, lhs, rhs);
7227 varop = apply_distributive_law (varop);
7228
7229 count = 0;
7230 }
7231 break;
7232
7233 case EQ:
7234 /* convert (lshift (eq FOO 0) C) to (xor FOO 1) if STORE_FLAG_VALUE
7235 says that the sign bit can be tested, FOO has mode MODE, C is
7236 GET_MODE_BITSIZE (MODE) - 1, and FOO has only the low-order bit
7237 may be nonzero. */
7238 if (code == LSHIFT
7239 && XEXP (varop, 1) == const0_rtx
7240 && GET_MODE (XEXP (varop, 0)) == result_mode
7241 && count == GET_MODE_BITSIZE (result_mode) - 1
7242 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
7243 && ((STORE_FLAG_VALUE
7244 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (result_mode) - 1))))
7245 && nonzero_bits (XEXP (varop, 0), result_mode) == 1
7246 && merge_outer_ops (&outer_op, &outer_const, XOR,
7247 (HOST_WIDE_INT) 1, result_mode,
7248 &complement_p))
7249 {
7250 varop = XEXP (varop, 0);
7251 count = 0;
7252 continue;
7253 }
7254 break;
7255
7256 case NEG:
7257 /* (lshiftrt (neg A) C) where A is either 0 or 1 and C is one less
7258 than the number of bits in the mode is equivalent to A. */
7259 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1
7260 && nonzero_bits (XEXP (varop, 0), result_mode) == 1)
7261 {
7262 varop = XEXP (varop, 0);
7263 count = 0;
7264 continue;
7265 }
7266
7267 /* NEG commutes with ASHIFT since it is multiplication. Move the
7268 NEG outside to allow shifts to combine. */
7269 if (code == ASHIFT
7270 && merge_outer_ops (&outer_op, &outer_const, NEG,
7271 (HOST_WIDE_INT) 0, result_mode,
7272 &complement_p))
7273 {
7274 varop = XEXP (varop, 0);
7275 continue;
7276 }
7277 break;
7278
7279 case PLUS:
7280 /* (lshiftrt (plus A -1) C) where A is either 0 or 1 and C
7281 is one less than the number of bits in the mode is
7282 equivalent to (xor A 1). */
7283 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1
7284 && XEXP (varop, 1) == constm1_rtx
7285 && nonzero_bits (XEXP (varop, 0), result_mode) == 1
7286 && merge_outer_ops (&outer_op, &outer_const, XOR,
7287 (HOST_WIDE_INT) 1, result_mode,
7288 &complement_p))
7289 {
7290 count = 0;
7291 varop = XEXP (varop, 0);
7292 continue;
7293 }
7294
7295 /* If we have (xshiftrt (plus FOO BAR) C), and the only bits
7296 that might be nonzero in BAR are those being shifted out and those
7297 bits are known zero in FOO, we can replace the PLUS with FOO.
7298 Similarly in the other operand order. This code occurs when
7299 we are computing the size of a variable-size array. */
7300
7301 if ((code == ASHIFTRT || code == LSHIFTRT)
7302 && count < HOST_BITS_PER_WIDE_INT
7303 && nonzero_bits (XEXP (varop, 1), result_mode) >> count == 0
7304 && (nonzero_bits (XEXP (varop, 1), result_mode)
7305 & nonzero_bits (XEXP (varop, 0), result_mode)) == 0)
7306 {
7307 varop = XEXP (varop, 0);
7308 continue;
7309 }
7310 else if ((code == ASHIFTRT || code == LSHIFTRT)
7311 && count < HOST_BITS_PER_WIDE_INT
7312 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
7313 && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
7314 >> count)
7315 && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
7316 & nonzero_bits (XEXP (varop, 1),
7317 result_mode)))
7318 {
7319 varop = XEXP (varop, 1);
7320 continue;
7321 }
7322
7323 /* (ashift (plus foo C) N) is (plus (ashift foo N) C'). */
7324 if (code == ASHIFT
7325 && GET_CODE (XEXP (varop, 1)) == CONST_INT
7326 && (new = simplify_binary_operation (ASHIFT, result_mode,
7327 XEXP (varop, 1),
7328 GEN_INT (count))) != 0
7329 && merge_outer_ops (&outer_op, &outer_const, PLUS,
7330 INTVAL (new), result_mode, &complement_p))
7331 {
7332 varop = XEXP (varop, 0);
7333 continue;
7334 }
7335 break;
7336
7337 case MINUS:
7338 /* If we have (xshiftrt (minus (ashiftrt X C)) X) C)
7339 with C the size of VAROP - 1 and the shift is logical if
7340 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
7341 we have a (gt X 0) operation. If the shift is arithmetic with
7342 STORE_FLAG_VALUE of 1 or logical with STORE_FLAG_VALUE == -1,
7343 we have a (neg (gt X 0)) operation. */
7344
7345 if (GET_CODE (XEXP (varop, 0)) == ASHIFTRT
7346 && count == GET_MODE_BITSIZE (GET_MODE (varop)) - 1
7347 && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
7348 && (code == LSHIFTRT || code == ASHIFTRT)
7349 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
7350 && INTVAL (XEXP (XEXP (varop, 0), 1)) == count
7351 && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
7352 {
7353 count = 0;
7354 varop = gen_rtx_combine (GT, GET_MODE (varop), XEXP (varop, 1),
7355 const0_rtx);
7356
7357 if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
7358 varop = gen_rtx_combine (NEG, GET_MODE (varop), varop);
7359
7360 continue;
7361 }
7362 break;
7363 }
7364
7365 break;
7366 }
7367
7368 /* We need to determine what mode to do the shift in. If the shift is
7369 a ASHIFTRT or ROTATE, we must always do it in the mode it was originally
7370 done in. Otherwise, we can do it in MODE, the widest mode encountered.
7371 The code we care about is that of the shift that will actually be done,
7372 not the shift that was originally requested. */
7373 shift_mode = (code == ASHIFTRT || code == ROTATE ? result_mode : mode);
7374
7375 /* We have now finished analyzing the shift. The result should be
7376 a shift of type CODE with SHIFT_MODE shifting VAROP COUNT places. If
7377 OUTER_OP is non-NIL, it is an operation that needs to be applied
7378 to the result of the shift. OUTER_CONST is the relevant constant,
7379 but we must turn off all bits turned off in the shift.
7380
7381 If we were passed a value for X, see if we can use any pieces of
7382 it. If not, make new rtx. */
7383
7384 if (x && GET_RTX_CLASS (GET_CODE (x)) == '2'
7385 && GET_CODE (XEXP (x, 1)) == CONST_INT
7386 && INTVAL (XEXP (x, 1)) == count)
7387 const_rtx = XEXP (x, 1);
7388 else
7389 const_rtx = GEN_INT (count);
7390
7391 if (x && GET_CODE (XEXP (x, 0)) == SUBREG
7392 && GET_MODE (XEXP (x, 0)) == shift_mode
7393 && SUBREG_REG (XEXP (x, 0)) == varop)
7394 varop = XEXP (x, 0);
7395 else if (GET_MODE (varop) != shift_mode)
7396 varop = gen_lowpart_for_combine (shift_mode, varop);
7397
7398 /* If we can't make the SUBREG, try to return what we were given. */
7399 if (GET_CODE (varop) == CLOBBER)
7400 return x ? x : varop;
7401
7402 new = simplify_binary_operation (code, shift_mode, varop, const_rtx);
7403 if (new != 0)
7404 x = new;
7405 else
7406 {
7407 if (x == 0 || GET_CODE (x) != code || GET_MODE (x) != shift_mode)
7408 x = gen_rtx_combine (code, shift_mode, varop, const_rtx);
7409
7410 SUBST (XEXP (x, 0), varop);
7411 SUBST (XEXP (x, 1), const_rtx);
7412 }
7413
7414 /* If we were doing a LSHIFTRT in a wider mode than it was originally,
7415 turn off all the bits that the shift would have turned off. */
7416 if (orig_code == LSHIFTRT && result_mode != shift_mode)
7417 x = simplify_and_const_int (NULL_RTX, shift_mode, x,
7418 GET_MODE_MASK (result_mode) >> orig_count);
7419
7420 /* Do the remainder of the processing in RESULT_MODE. */
7421 x = gen_lowpart_for_combine (result_mode, x);
7422
7423 /* If COMPLEMENT_P is set, we have to complement X before doing the outer
7424 operation. */
7425 if (complement_p)
7426 x = gen_unary (NOT, result_mode, x);
7427
7428 if (outer_op != NIL)
7429 {
7430 if (GET_MODE_BITSIZE (result_mode) < HOST_BITS_PER_WIDE_INT)
7431 outer_const &= GET_MODE_MASK (result_mode);
7432
7433 if (outer_op == AND)
7434 x = simplify_and_const_int (NULL_RTX, result_mode, x, outer_const);
7435 else if (outer_op == SET)
7436 /* This means that we have determined that the result is
7437 equivalent to a constant. This should be rare. */
7438 x = GEN_INT (outer_const);
7439 else if (GET_RTX_CLASS (outer_op) == '1')
7440 x = gen_unary (outer_op, result_mode, x);
7441 else
7442 x = gen_binary (outer_op, result_mode, x, GEN_INT (outer_const));
7443 }
7444
7445 return x;
7446 }
7447 \f
7448 /* Like recog, but we receive the address of a pointer to a new pattern.
7449 We try to match the rtx that the pointer points to.
7450 If that fails, we may try to modify or replace the pattern,
7451 storing the replacement into the same pointer object.
7452
7453 Modifications include deletion or addition of CLOBBERs.
7454
7455 PNOTES is a pointer to a location where any REG_UNUSED notes added for
7456 the CLOBBERs are placed.
7457
7458 The value is the final insn code from the pattern ultimately matched,
7459 or -1. */
7460
7461 static int
7462 recog_for_combine (pnewpat, insn, pnotes)
7463 rtx *pnewpat;
7464 rtx insn;
7465 rtx *pnotes;
7466 {
7467 register rtx pat = *pnewpat;
7468 int insn_code_number;
7469 int num_clobbers_to_add = 0;
7470 int i;
7471 rtx notes = 0;
7472
7473 /* Is the result of combination a valid instruction? */
7474 insn_code_number = recog (pat, insn, &num_clobbers_to_add);
7475
7476 /* If it isn't, there is the possibility that we previously had an insn
7477 that clobbered some register as a side effect, but the combined
7478 insn doesn't need to do that. So try once more without the clobbers
7479 unless this represents an ASM insn. */
7480
7481 if (insn_code_number < 0 && ! check_asm_operands (pat)
7482 && GET_CODE (pat) == PARALLEL)
7483 {
7484 int pos;
7485
7486 for (pos = 0, i = 0; i < XVECLEN (pat, 0); i++)
7487 if (GET_CODE (XVECEXP (pat, 0, i)) != CLOBBER)
7488 {
7489 if (i != pos)
7490 SUBST (XVECEXP (pat, 0, pos), XVECEXP (pat, 0, i));
7491 pos++;
7492 }
7493
7494 SUBST_INT (XVECLEN (pat, 0), pos);
7495
7496 if (pos == 1)
7497 pat = XVECEXP (pat, 0, 0);
7498
7499 insn_code_number = recog (pat, insn, &num_clobbers_to_add);
7500 }
7501
7502 /* If we had any clobbers to add, make a new pattern than contains
7503 them. Then check to make sure that all of them are dead. */
7504 if (num_clobbers_to_add)
7505 {
7506 rtx newpat = gen_rtx (PARALLEL, VOIDmode,
7507 gen_rtvec (GET_CODE (pat) == PARALLEL
7508 ? XVECLEN (pat, 0) + num_clobbers_to_add
7509 : num_clobbers_to_add + 1));
7510
7511 if (GET_CODE (pat) == PARALLEL)
7512 for (i = 0; i < XVECLEN (pat, 0); i++)
7513 XVECEXP (newpat, 0, i) = XVECEXP (pat, 0, i);
7514 else
7515 XVECEXP (newpat, 0, 0) = pat;
7516
7517 add_clobbers (newpat, insn_code_number);
7518
7519 for (i = XVECLEN (newpat, 0) - num_clobbers_to_add;
7520 i < XVECLEN (newpat, 0); i++)
7521 {
7522 if (GET_CODE (XEXP (XVECEXP (newpat, 0, i), 0)) == REG
7523 && ! reg_dead_at_p (XEXP (XVECEXP (newpat, 0, i), 0), insn))
7524 return -1;
7525 notes = gen_rtx (EXPR_LIST, REG_UNUSED,
7526 XEXP (XVECEXP (newpat, 0, i), 0), notes);
7527 }
7528 pat = newpat;
7529 }
7530
7531 *pnewpat = pat;
7532 *pnotes = notes;
7533
7534 return insn_code_number;
7535 }
7536 \f
7537 /* Like gen_lowpart but for use by combine. In combine it is not possible
7538 to create any new pseudoregs. However, it is safe to create
7539 invalid memory addresses, because combine will try to recognize
7540 them and all they will do is make the combine attempt fail.
7541
7542 If for some reason this cannot do its job, an rtx
7543 (clobber (const_int 0)) is returned.
7544 An insn containing that will not be recognized. */
7545
7546 #undef gen_lowpart
7547
7548 static rtx
7549 gen_lowpart_for_combine (mode, x)
7550 enum machine_mode mode;
7551 register rtx x;
7552 {
7553 rtx result;
7554
7555 if (GET_MODE (x) == mode)
7556 return x;
7557
7558 /* We can only support MODE being wider than a word if X is a
7559 constant integer or has a mode the same size. */
7560
7561 if (GET_MODE_SIZE (mode) > UNITS_PER_WORD
7562 && ! ((GET_MODE (x) == VOIDmode
7563 && (GET_CODE (x) == CONST_INT
7564 || GET_CODE (x) == CONST_DOUBLE))
7565 || GET_MODE_SIZE (GET_MODE (x)) == GET_MODE_SIZE (mode)))
7566 return gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
7567
7568 /* X might be a paradoxical (subreg (mem)). In that case, gen_lowpart
7569 won't know what to do. So we will strip off the SUBREG here and
7570 process normally. */
7571 if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
7572 {
7573 x = SUBREG_REG (x);
7574 if (GET_MODE (x) == mode)
7575 return x;
7576 }
7577
7578 result = gen_lowpart_common (mode, x);
7579 if (result)
7580 return result;
7581
7582 if (GET_CODE (x) == MEM)
7583 {
7584 register int offset = 0;
7585 rtx new;
7586
7587 /* Refuse to work on a volatile memory ref or one with a mode-dependent
7588 address. */
7589 if (MEM_VOLATILE_P (x) || mode_dependent_address_p (XEXP (x, 0)))
7590 return gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
7591
7592 /* If we want to refer to something bigger than the original memref,
7593 generate a perverse subreg instead. That will force a reload
7594 of the original memref X. */
7595 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode))
7596 return gen_rtx (SUBREG, mode, x, 0);
7597
7598 #if WORDS_BIG_ENDIAN
7599 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
7600 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
7601 #endif
7602 #if BYTES_BIG_ENDIAN
7603 /* Adjust the address so that the address-after-the-data
7604 is unchanged. */
7605 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
7606 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
7607 #endif
7608 new = gen_rtx (MEM, mode, plus_constant (XEXP (x, 0), offset));
7609 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
7610 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
7611 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
7612 return new;
7613 }
7614
7615 /* If X is a comparison operator, rewrite it in a new mode. This
7616 probably won't match, but may allow further simplifications. */
7617 else if (GET_RTX_CLASS (GET_CODE (x)) == '<')
7618 return gen_rtx_combine (GET_CODE (x), mode, XEXP (x, 0), XEXP (x, 1));
7619
7620 /* If we couldn't simplify X any other way, just enclose it in a
7621 SUBREG. Normally, this SUBREG won't match, but some patterns may
7622 include an explicit SUBREG or we may simplify it further in combine. */
7623 else
7624 {
7625 int word = 0;
7626
7627 if (WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD)
7628 word = ((GET_MODE_SIZE (GET_MODE (x))
7629 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD))
7630 / UNITS_PER_WORD);
7631 return gen_rtx (SUBREG, mode, x, word);
7632 }
7633 }
7634 \f
7635 /* Make an rtx expression. This is a subset of gen_rtx and only supports
7636 expressions of 1, 2, or 3 operands, each of which are rtx expressions.
7637
7638 If the identical expression was previously in the insn (in the undobuf),
7639 it will be returned. Only if it is not found will a new expression
7640 be made. */
7641
7642 /*VARARGS2*/
7643 static rtx
7644 gen_rtx_combine (va_alist)
7645 va_dcl
7646 {
7647 va_list p;
7648 enum rtx_code code;
7649 enum machine_mode mode;
7650 int n_args;
7651 rtx args[3];
7652 int i, j;
7653 char *fmt;
7654 rtx rt;
7655
7656 va_start (p);
7657 code = va_arg (p, enum rtx_code);
7658 mode = va_arg (p, enum machine_mode);
7659 n_args = GET_RTX_LENGTH (code);
7660 fmt = GET_RTX_FORMAT (code);
7661
7662 if (n_args == 0 || n_args > 3)
7663 abort ();
7664
7665 /* Get each arg and verify that it is supposed to be an expression. */
7666 for (j = 0; j < n_args; j++)
7667 {
7668 if (*fmt++ != 'e')
7669 abort ();
7670
7671 args[j] = va_arg (p, rtx);
7672 }
7673
7674 /* See if this is in undobuf. Be sure we don't use objects that came
7675 from another insn; this could produce circular rtl structures. */
7676
7677 for (i = previous_num_undos; i < undobuf.num_undo; i++)
7678 if (!undobuf.undo[i].is_int
7679 && GET_CODE (undobuf.undo[i].old_contents.rtx) == code
7680 && GET_MODE (undobuf.undo[i].old_contents.rtx) == mode)
7681 {
7682 for (j = 0; j < n_args; j++)
7683 if (XEXP (undobuf.undo[i].old_contents.rtx, j) != args[j])
7684 break;
7685
7686 if (j == n_args)
7687 return undobuf.undo[i].old_contents.rtx;
7688 }
7689
7690 /* Otherwise make a new rtx. We know we have 1, 2, or 3 args.
7691 Use rtx_alloc instead of gen_rtx because it's faster on RISC. */
7692 rt = rtx_alloc (code);
7693 PUT_MODE (rt, mode);
7694 XEXP (rt, 0) = args[0];
7695 if (n_args > 1)
7696 {
7697 XEXP (rt, 1) = args[1];
7698 if (n_args > 2)
7699 XEXP (rt, 2) = args[2];
7700 }
7701 return rt;
7702 }
7703
7704 /* These routines make binary and unary operations by first seeing if they
7705 fold; if not, a new expression is allocated. */
7706
7707 static rtx
7708 gen_binary (code, mode, op0, op1)
7709 enum rtx_code code;
7710 enum machine_mode mode;
7711 rtx op0, op1;
7712 {
7713 rtx result;
7714 rtx tem;
7715
7716 if (GET_RTX_CLASS (code) == 'c'
7717 && (GET_CODE (op0) == CONST_INT
7718 || (CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)))
7719 tem = op0, op0 = op1, op1 = tem;
7720
7721 if (GET_RTX_CLASS (code) == '<')
7722 {
7723 enum machine_mode op_mode = GET_MODE (op0);
7724 if (op_mode == VOIDmode)
7725 op_mode = GET_MODE (op1);
7726 result = simplify_relational_operation (code, op_mode, op0, op1);
7727 }
7728 else
7729 result = simplify_binary_operation (code, mode, op0, op1);
7730
7731 if (result)
7732 return result;
7733
7734 /* Put complex operands first and constants second. */
7735 if (GET_RTX_CLASS (code) == 'c'
7736 && ((CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)
7737 || (GET_RTX_CLASS (GET_CODE (op0)) == 'o'
7738 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')
7739 || (GET_CODE (op0) == SUBREG
7740 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op0))) == 'o'
7741 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')))
7742 return gen_rtx_combine (code, mode, op1, op0);
7743
7744 return gen_rtx_combine (code, mode, op0, op1);
7745 }
7746
7747 static rtx
7748 gen_unary (code, mode, op0)
7749 enum rtx_code code;
7750 enum machine_mode mode;
7751 rtx op0;
7752 {
7753 rtx result = simplify_unary_operation (code, mode, op0, mode);
7754
7755 if (result)
7756 return result;
7757
7758 return gen_rtx_combine (code, mode, op0);
7759 }
7760 \f
7761 /* Simplify a comparison between *POP0 and *POP1 where CODE is the
7762 comparison code that will be tested.
7763
7764 The result is a possibly different comparison code to use. *POP0 and
7765 *POP1 may be updated.
7766
7767 It is possible that we might detect that a comparison is either always
7768 true or always false. However, we do not perform general constant
7769 folding in combine, so this knowledge isn't useful. Such tautologies
7770 should have been detected earlier. Hence we ignore all such cases. */
7771
7772 static enum rtx_code
7773 simplify_comparison (code, pop0, pop1)
7774 enum rtx_code code;
7775 rtx *pop0;
7776 rtx *pop1;
7777 {
7778 rtx op0 = *pop0;
7779 rtx op1 = *pop1;
7780 rtx tem, tem1;
7781 int i;
7782 enum machine_mode mode, tmode;
7783
7784 /* Try a few ways of applying the same transformation to both operands. */
7785 while (1)
7786 {
7787 /* If both operands are the same constant shift, see if we can ignore the
7788 shift. We can if the shift is a rotate or if the bits shifted out of
7789 this shift are known to be zero for both inputs and if the type of
7790 comparison is compatible with the shift. */
7791 if (GET_CODE (op0) == GET_CODE (op1)
7792 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
7793 && ((GET_CODE (op0) == ROTATE && (code == NE || code == EQ))
7794 || ((GET_CODE (op0) == LSHIFTRT
7795 || GET_CODE (op0) == ASHIFT || GET_CODE (op0) == LSHIFT)
7796 && (code != GT && code != LT && code != GE && code != LE))
7797 || (GET_CODE (op0) == ASHIFTRT
7798 && (code != GTU && code != LTU
7799 && code != GEU && code != GEU)))
7800 && GET_CODE (XEXP (op0, 1)) == CONST_INT
7801 && INTVAL (XEXP (op0, 1)) >= 0
7802 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
7803 && XEXP (op0, 1) == XEXP (op1, 1))
7804 {
7805 enum machine_mode mode = GET_MODE (op0);
7806 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
7807 int shift_count = INTVAL (XEXP (op0, 1));
7808
7809 if (GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFTRT)
7810 mask &= (mask >> shift_count) << shift_count;
7811 else if (GET_CODE (op0) == ASHIFT || GET_CODE (op0) == LSHIFT)
7812 mask = (mask & (mask << shift_count)) >> shift_count;
7813
7814 if ((nonzero_bits (XEXP (op0, 0), mode) & ~ mask) == 0
7815 && (nonzero_bits (XEXP (op1, 0), mode) & ~ mask) == 0)
7816 op0 = XEXP (op0, 0), op1 = XEXP (op1, 0);
7817 else
7818 break;
7819 }
7820
7821 /* If both operands are AND's of a paradoxical SUBREG by constant, the
7822 SUBREGs are of the same mode, and, in both cases, the AND would
7823 be redundant if the comparison was done in the narrower mode,
7824 do the comparison in the narrower mode (e.g., we are AND'ing with 1
7825 and the operand's possibly nonzero bits are 0xffffff01; in that case
7826 if we only care about QImode, we don't need the AND). This case
7827 occurs if the output mode of an scc insn is not SImode and
7828 STORE_FLAG_VALUE == 1 (e.g., the 386). */
7829
7830 else if (GET_CODE (op0) == AND && GET_CODE (op1) == AND
7831 && GET_CODE (XEXP (op0, 1)) == CONST_INT
7832 && GET_CODE (XEXP (op1, 1)) == CONST_INT
7833 && GET_CODE (XEXP (op0, 0)) == SUBREG
7834 && GET_CODE (XEXP (op1, 0)) == SUBREG
7835 && (GET_MODE_SIZE (GET_MODE (XEXP (op0, 0)))
7836 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (op0, 0)))))
7837 && (GET_MODE (SUBREG_REG (XEXP (op0, 0)))
7838 == GET_MODE (SUBREG_REG (XEXP (op1, 0))))
7839 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (XEXP (op0, 0))))
7840 <= HOST_BITS_PER_WIDE_INT)
7841 && (nonzero_bits (SUBREG_REG (XEXP (op0, 0)),
7842 GET_MODE (SUBREG_REG (XEXP (op0, 0))))
7843 & ~ INTVAL (XEXP (op0, 1))) == 0
7844 && (nonzero_bits (SUBREG_REG (XEXP (op1, 0)),
7845 GET_MODE (SUBREG_REG (XEXP (op1, 0))))
7846 & ~ INTVAL (XEXP (op1, 1))) == 0)
7847 {
7848 op0 = SUBREG_REG (XEXP (op0, 0));
7849 op1 = SUBREG_REG (XEXP (op1, 0));
7850
7851 /* the resulting comparison is always unsigned since we masked off
7852 the original sign bit. */
7853 code = unsigned_condition (code);
7854 }
7855 else
7856 break;
7857 }
7858
7859 /* If the first operand is a constant, swap the operands and adjust the
7860 comparison code appropriately. */
7861 if (CONSTANT_P (op0))
7862 {
7863 tem = op0, op0 = op1, op1 = tem;
7864 code = swap_condition (code);
7865 }
7866
7867 /* We now enter a loop during which we will try to simplify the comparison.
7868 For the most part, we only are concerned with comparisons with zero,
7869 but some things may really be comparisons with zero but not start
7870 out looking that way. */
7871
7872 while (GET_CODE (op1) == CONST_INT)
7873 {
7874 enum machine_mode mode = GET_MODE (op0);
7875 int mode_width = GET_MODE_BITSIZE (mode);
7876 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
7877 int equality_comparison_p;
7878 int sign_bit_comparison_p;
7879 int unsigned_comparison_p;
7880 HOST_WIDE_INT const_op;
7881
7882 /* We only want to handle integral modes. This catches VOIDmode,
7883 CCmode, and the floating-point modes. An exception is that we
7884 can handle VOIDmode if OP0 is a COMPARE or a comparison
7885 operation. */
7886
7887 if (GET_MODE_CLASS (mode) != MODE_INT
7888 && ! (mode == VOIDmode
7889 && (GET_CODE (op0) == COMPARE
7890 || GET_RTX_CLASS (GET_CODE (op0)) == '<')))
7891 break;
7892
7893 /* Get the constant we are comparing against and turn off all bits
7894 not on in our mode. */
7895 const_op = INTVAL (op1);
7896 if (mode_width <= HOST_BITS_PER_WIDE_INT)
7897 const_op &= mask;
7898
7899 /* If we are comparing against a constant power of two and the value
7900 being compared can only have that single bit nonzero (e.g., it was
7901 `and'ed with that bit), we can replace this with a comparison
7902 with zero. */
7903 if (const_op
7904 && (code == EQ || code == NE || code == GE || code == GEU
7905 || code == LT || code == LTU)
7906 && mode_width <= HOST_BITS_PER_WIDE_INT
7907 && exact_log2 (const_op) >= 0
7908 && nonzero_bits (op0, mode) == const_op)
7909 {
7910 code = (code == EQ || code == GE || code == GEU ? NE : EQ);
7911 op1 = const0_rtx, const_op = 0;
7912 }
7913
7914 /* Similarly, if we are comparing a value known to be either -1 or
7915 0 with -1, change it to the opposite comparison against zero. */
7916
7917 if (const_op == -1
7918 && (code == EQ || code == NE || code == GT || code == LE
7919 || code == GEU || code == LTU)
7920 && num_sign_bit_copies (op0, mode) == mode_width)
7921 {
7922 code = (code == EQ || code == LE || code == GEU ? NE : EQ);
7923 op1 = const0_rtx, const_op = 0;
7924 }
7925
7926 /* Do some canonicalizations based on the comparison code. We prefer
7927 comparisons against zero and then prefer equality comparisons.
7928 If we can reduce the size of a constant, we will do that too. */
7929
7930 switch (code)
7931 {
7932 case LT:
7933 /* < C is equivalent to <= (C - 1) */
7934 if (const_op > 0)
7935 {
7936 const_op -= 1;
7937 op1 = GEN_INT (const_op);
7938 code = LE;
7939 /* ... fall through to LE case below. */
7940 }
7941 else
7942 break;
7943
7944 case LE:
7945 /* <= C is equivalent to < (C + 1); we do this for C < 0 */
7946 if (const_op < 0)
7947 {
7948 const_op += 1;
7949 op1 = GEN_INT (const_op);
7950 code = LT;
7951 }
7952
7953 /* If we are doing a <= 0 comparison on a value known to have
7954 a zero sign bit, we can replace this with == 0. */
7955 else if (const_op == 0
7956 && mode_width <= HOST_BITS_PER_WIDE_INT
7957 && (nonzero_bits (op0, mode)
7958 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
7959 code = EQ;
7960 break;
7961
7962 case GE:
7963 /* >= C is equivalent to > (C - 1). */
7964 if (const_op > 0)
7965 {
7966 const_op -= 1;
7967 op1 = GEN_INT (const_op);
7968 code = GT;
7969 /* ... fall through to GT below. */
7970 }
7971 else
7972 break;
7973
7974 case GT:
7975 /* > C is equivalent to >= (C + 1); we do this for C < 0*/
7976 if (const_op < 0)
7977 {
7978 const_op += 1;
7979 op1 = GEN_INT (const_op);
7980 code = GE;
7981 }
7982
7983 /* If we are doing a > 0 comparison on a value known to have
7984 a zero sign bit, we can replace this with != 0. */
7985 else if (const_op == 0
7986 && mode_width <= HOST_BITS_PER_WIDE_INT
7987 && (nonzero_bits (op0, mode)
7988 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
7989 code = NE;
7990 break;
7991
7992 case LTU:
7993 /* < C is equivalent to <= (C - 1). */
7994 if (const_op > 0)
7995 {
7996 const_op -= 1;
7997 op1 = GEN_INT (const_op);
7998 code = LEU;
7999 /* ... fall through ... */
8000 }
8001
8002 /* (unsigned) < 0x80000000 is equivalent to >= 0. */
8003 else if (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1))
8004 {
8005 const_op = 0, op1 = const0_rtx;
8006 code = GE;
8007 break;
8008 }
8009 else
8010 break;
8011
8012 case LEU:
8013 /* unsigned <= 0 is equivalent to == 0 */
8014 if (const_op == 0)
8015 code = EQ;
8016
8017 /* (unsigned) <= 0x7fffffff is equivalent to >= 0. */
8018 else if (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1)
8019 {
8020 const_op = 0, op1 = const0_rtx;
8021 code = GE;
8022 }
8023 break;
8024
8025 case GEU:
8026 /* >= C is equivalent to < (C - 1). */
8027 if (const_op > 1)
8028 {
8029 const_op -= 1;
8030 op1 = GEN_INT (const_op);
8031 code = GTU;
8032 /* ... fall through ... */
8033 }
8034
8035 /* (unsigned) >= 0x80000000 is equivalent to < 0. */
8036 else if (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1))
8037 {
8038 const_op = 0, op1 = const0_rtx;
8039 code = LT;
8040 }
8041 else
8042 break;
8043
8044 case GTU:
8045 /* unsigned > 0 is equivalent to != 0 */
8046 if (const_op == 0)
8047 code = NE;
8048
8049 /* (unsigned) > 0x7fffffff is equivalent to < 0. */
8050 else if (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1)
8051 {
8052 const_op = 0, op1 = const0_rtx;
8053 code = LT;
8054 }
8055 break;
8056 }
8057
8058 /* Compute some predicates to simplify code below. */
8059
8060 equality_comparison_p = (code == EQ || code == NE);
8061 sign_bit_comparison_p = ((code == LT || code == GE) && const_op == 0);
8062 unsigned_comparison_p = (code == LTU || code == LEU || code == GTU
8063 || code == LEU);
8064
8065 /* Now try cases based on the opcode of OP0. If none of the cases
8066 does a "continue", we exit this loop immediately after the
8067 switch. */
8068
8069 switch (GET_CODE (op0))
8070 {
8071 case ZERO_EXTRACT:
8072 /* If we are extracting a single bit from a variable position in
8073 a constant that has only a single bit set and are comparing it
8074 with zero, we can convert this into an equality comparison
8075 between the position and the location of the single bit. We can't
8076 do this if bit endian and we don't have an extzv since we then
8077 can't know what mode to use for the endianness adjustment. */
8078
8079 #if ! BITS_BIG_ENDIAN || defined (HAVE_extzv)
8080 if (GET_CODE (XEXP (op0, 0)) == CONST_INT
8081 && XEXP (op0, 1) == const1_rtx
8082 && equality_comparison_p && const_op == 0
8083 && (i = exact_log2 (INTVAL (XEXP (op0, 0)))) >= 0)
8084 {
8085 #if BITS_BIG_ENDIAN
8086 i = (GET_MODE_BITSIZE
8087 (insn_operand_mode[(int) CODE_FOR_extzv][1]) - 1 - i);
8088 #endif
8089
8090 op0 = XEXP (op0, 2);
8091 op1 = GEN_INT (i);
8092 const_op = i;
8093
8094 /* Result is nonzero iff shift count is equal to I. */
8095 code = reverse_condition (code);
8096 continue;
8097 }
8098 #endif
8099
8100 /* ... fall through ... */
8101
8102 case SIGN_EXTRACT:
8103 tem = expand_compound_operation (op0);
8104 if (tem != op0)
8105 {
8106 op0 = tem;
8107 continue;
8108 }
8109 break;
8110
8111 case NOT:
8112 /* If testing for equality, we can take the NOT of the constant. */
8113 if (equality_comparison_p
8114 && (tem = simplify_unary_operation (NOT, mode, op1, mode)) != 0)
8115 {
8116 op0 = XEXP (op0, 0);
8117 op1 = tem;
8118 continue;
8119 }
8120
8121 /* If just looking at the sign bit, reverse the sense of the
8122 comparison. */
8123 if (sign_bit_comparison_p)
8124 {
8125 op0 = XEXP (op0, 0);
8126 code = (code == GE ? LT : GE);
8127 continue;
8128 }
8129 break;
8130
8131 case NEG:
8132 /* If testing for equality, we can take the NEG of the constant. */
8133 if (equality_comparison_p
8134 && (tem = simplify_unary_operation (NEG, mode, op1, mode)) != 0)
8135 {
8136 op0 = XEXP (op0, 0);
8137 op1 = tem;
8138 continue;
8139 }
8140
8141 /* The remaining cases only apply to comparisons with zero. */
8142 if (const_op != 0)
8143 break;
8144
8145 /* When X is ABS or is known positive,
8146 (neg X) is < 0 if and only if X != 0. */
8147
8148 if (sign_bit_comparison_p
8149 && (GET_CODE (XEXP (op0, 0)) == ABS
8150 || (mode_width <= HOST_BITS_PER_WIDE_INT
8151 && (nonzero_bits (XEXP (op0, 0), mode)
8152 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)))
8153 {
8154 op0 = XEXP (op0, 0);
8155 code = (code == LT ? NE : EQ);
8156 continue;
8157 }
8158
8159 /* If we have NEG of something whose two high-order bits are the
8160 same, we know that "(-a) < 0" is equivalent to "a > 0". */
8161 if (num_sign_bit_copies (op0, mode) >= 2)
8162 {
8163 op0 = XEXP (op0, 0);
8164 code = swap_condition (code);
8165 continue;
8166 }
8167 break;
8168
8169 case ROTATE:
8170 /* If we are testing equality and our count is a constant, we
8171 can perform the inverse operation on our RHS. */
8172 if (equality_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
8173 && (tem = simplify_binary_operation (ROTATERT, mode,
8174 op1, XEXP (op0, 1))) != 0)
8175 {
8176 op0 = XEXP (op0, 0);
8177 op1 = tem;
8178 continue;
8179 }
8180
8181 /* If we are doing a < 0 or >= 0 comparison, it means we are testing
8182 a particular bit. Convert it to an AND of a constant of that
8183 bit. This will be converted into a ZERO_EXTRACT. */
8184 if (const_op == 0 && sign_bit_comparison_p
8185 && GET_CODE (XEXP (op0, 1)) == CONST_INT
8186 && mode_width <= HOST_BITS_PER_WIDE_INT)
8187 {
8188 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
8189 ((HOST_WIDE_INT) 1
8190 << (mode_width - 1
8191 - INTVAL (XEXP (op0, 1)))));
8192 code = (code == LT ? NE : EQ);
8193 continue;
8194 }
8195
8196 /* ... fall through ... */
8197
8198 case ABS:
8199 /* ABS is ignorable inside an equality comparison with zero. */
8200 if (const_op == 0 && equality_comparison_p)
8201 {
8202 op0 = XEXP (op0, 0);
8203 continue;
8204 }
8205 break;
8206
8207
8208 case SIGN_EXTEND:
8209 /* Can simplify (compare (zero/sign_extend FOO) CONST)
8210 to (compare FOO CONST) if CONST fits in FOO's mode and we
8211 are either testing inequality or have an unsigned comparison
8212 with ZERO_EXTEND or a signed comparison with SIGN_EXTEND. */
8213 if (! unsigned_comparison_p
8214 && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
8215 <= HOST_BITS_PER_WIDE_INT)
8216 && ((unsigned HOST_WIDE_INT) const_op
8217 < (((HOST_WIDE_INT) 1
8218 << (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0))) - 1)))))
8219 {
8220 op0 = XEXP (op0, 0);
8221 continue;
8222 }
8223 break;
8224
8225 case SUBREG:
8226 /* Check for the case where we are comparing A - C1 with C2,
8227 both constants are smaller than 1/2 the maxium positive
8228 value in MODE, and the comparison is equality or unsigned.
8229 In that case, if A is either zero-extended to MODE or has
8230 sufficient sign bits so that the high-order bit in MODE
8231 is a copy of the sign in the inner mode, we can prove that it is
8232 safe to do the operation in the wider mode. This simplifies
8233 many range checks. */
8234
8235 if (mode_width <= HOST_BITS_PER_WIDE_INT
8236 && subreg_lowpart_p (op0)
8237 && GET_CODE (SUBREG_REG (op0)) == PLUS
8238 && GET_CODE (XEXP (SUBREG_REG (op0), 1)) == CONST_INT
8239 && INTVAL (XEXP (SUBREG_REG (op0), 1)) < 0
8240 && (- INTVAL (XEXP (SUBREG_REG (op0), 1))
8241 < GET_MODE_MASK (mode) / 2)
8242 && (unsigned) const_op < GET_MODE_MASK (mode) / 2
8243 && (0 == (nonzero_bits (XEXP (SUBREG_REG (op0), 0),
8244 GET_MODE (SUBREG_REG (op0)))
8245 & ~ GET_MODE_MASK (mode))
8246 || (num_sign_bit_copies (XEXP (SUBREG_REG (op0), 0),
8247 GET_MODE (SUBREG_REG (op0)))
8248 > (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
8249 - GET_MODE_BITSIZE (mode)))))
8250 {
8251 op0 = SUBREG_REG (op0);
8252 continue;
8253 }
8254
8255 /* If the inner mode is narrower and we are extracting the low part,
8256 we can treat the SUBREG as if it were a ZERO_EXTEND. */
8257 if (subreg_lowpart_p (op0)
8258 && GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) < mode_width)
8259 /* Fall through */ ;
8260 else
8261 break;
8262
8263 /* ... fall through ... */
8264
8265 case ZERO_EXTEND:
8266 if ((unsigned_comparison_p || equality_comparison_p)
8267 && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
8268 <= HOST_BITS_PER_WIDE_INT)
8269 && ((unsigned HOST_WIDE_INT) const_op
8270 < GET_MODE_MASK (GET_MODE (XEXP (op0, 0)))))
8271 {
8272 op0 = XEXP (op0, 0);
8273 continue;
8274 }
8275 break;
8276
8277 case PLUS:
8278 /* (eq (plus X C1) C2) -> (eq X (minus C2 C1)). We can only do
8279 this for equality comparisons due to pathological cases involving
8280 overflows. */
8281 if (equality_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
8282 && (tem = simplify_binary_operation (MINUS, mode, op1,
8283 XEXP (op0, 1))) != 0)
8284 {
8285 op0 = XEXP (op0, 0);
8286 op1 = tem;
8287 continue;
8288 }
8289
8290 /* (plus (abs X) (const_int -1)) is < 0 if and only if X == 0. */
8291 if (const_op == 0 && XEXP (op0, 1) == constm1_rtx
8292 && GET_CODE (XEXP (op0, 0)) == ABS && sign_bit_comparison_p)
8293 {
8294 op0 = XEXP (XEXP (op0, 0), 0);
8295 code = (code == LT ? EQ : NE);
8296 continue;
8297 }
8298 break;
8299
8300 case MINUS:
8301 /* The sign bit of (minus (ashiftrt X C) X), where C is the number
8302 of bits in X minus 1, is one iff X > 0. */
8303 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == ASHIFTRT
8304 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
8305 && INTVAL (XEXP (XEXP (op0, 0), 1)) == mode_width - 1
8306 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
8307 {
8308 op0 = XEXP (op0, 1);
8309 code = (code == GE ? LE : GT);
8310 continue;
8311 }
8312 break;
8313
8314 case XOR:
8315 /* (eq (xor A B) C) -> (eq A (xor B C)). This is a simplification
8316 if C is zero or B is a constant. */
8317 if (equality_comparison_p
8318 && 0 != (tem = simplify_binary_operation (XOR, mode,
8319 XEXP (op0, 1), op1)))
8320 {
8321 op0 = XEXP (op0, 0);
8322 op1 = tem;
8323 continue;
8324 }
8325 break;
8326
8327 case EQ: case NE:
8328 case LT: case LTU: case LE: case LEU:
8329 case GT: case GTU: case GE: case GEU:
8330 /* We can't do anything if OP0 is a condition code value, rather
8331 than an actual data value. */
8332 if (const_op != 0
8333 #ifdef HAVE_cc0
8334 || XEXP (op0, 0) == cc0_rtx
8335 #endif
8336 || GET_MODE_CLASS (GET_MODE (XEXP (op0, 0))) == MODE_CC)
8337 break;
8338
8339 /* Get the two operands being compared. */
8340 if (GET_CODE (XEXP (op0, 0)) == COMPARE)
8341 tem = XEXP (XEXP (op0, 0), 0), tem1 = XEXP (XEXP (op0, 0), 1);
8342 else
8343 tem = XEXP (op0, 0), tem1 = XEXP (op0, 1);
8344
8345 /* Check for the cases where we simply want the result of the
8346 earlier test or the opposite of that result. */
8347 if (code == NE
8348 || (code == EQ && reversible_comparison_p (op0))
8349 || (GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
8350 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
8351 && (STORE_FLAG_VALUE
8352 & (((HOST_WIDE_INT) 1
8353 << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
8354 && (code == LT
8355 || (code == GE && reversible_comparison_p (op0)))))
8356 {
8357 code = (code == LT || code == NE
8358 ? GET_CODE (op0) : reverse_condition (GET_CODE (op0)));
8359 op0 = tem, op1 = tem1;
8360 continue;
8361 }
8362 break;
8363
8364 case IOR:
8365 /* The sign bit of (ior (plus X (const_int -1)) X) is non-zero
8366 iff X <= 0. */
8367 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == PLUS
8368 && XEXP (XEXP (op0, 0), 1) == constm1_rtx
8369 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
8370 {
8371 op0 = XEXP (op0, 1);
8372 code = (code == GE ? GT : LE);
8373 continue;
8374 }
8375 break;
8376
8377 case AND:
8378 /* Convert (and (xshift 1 X) Y) to (and (lshiftrt Y X) 1). This
8379 will be converted to a ZERO_EXTRACT later. */
8380 if (const_op == 0 && equality_comparison_p
8381 && (GET_CODE (XEXP (op0, 0)) == ASHIFT
8382 || GET_CODE (XEXP (op0, 0)) == LSHIFT)
8383 && XEXP (XEXP (op0, 0), 0) == const1_rtx)
8384 {
8385 op0 = simplify_and_const_int
8386 (op0, mode, gen_rtx_combine (LSHIFTRT, mode,
8387 XEXP (op0, 1),
8388 XEXP (XEXP (op0, 0), 1)),
8389 (HOST_WIDE_INT) 1);
8390 continue;
8391 }
8392
8393 /* If we are comparing (and (lshiftrt X C1) C2) for equality with
8394 zero and X is a comparison and C1 and C2 describe only bits set
8395 in STORE_FLAG_VALUE, we can compare with X. */
8396 if (const_op == 0 && equality_comparison_p
8397 && mode_width <= HOST_BITS_PER_WIDE_INT
8398 && GET_CODE (XEXP (op0, 1)) == CONST_INT
8399 && GET_CODE (XEXP (op0, 0)) == LSHIFTRT
8400 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
8401 && INTVAL (XEXP (XEXP (op0, 0), 1)) >= 0
8402 && INTVAL (XEXP (XEXP (op0, 0), 1)) < HOST_BITS_PER_WIDE_INT)
8403 {
8404 mask = ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
8405 << INTVAL (XEXP (XEXP (op0, 0), 1)));
8406 if ((~ STORE_FLAG_VALUE & mask) == 0
8407 && (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (op0, 0), 0))) == '<'
8408 || ((tem = get_last_value (XEXP (XEXP (op0, 0), 0))) != 0
8409 && GET_RTX_CLASS (GET_CODE (tem)) == '<')))
8410 {
8411 op0 = XEXP (XEXP (op0, 0), 0);
8412 continue;
8413 }
8414 }
8415
8416 /* If we are doing an equality comparison of an AND of a bit equal
8417 to the sign bit, replace this with a LT or GE comparison of
8418 the underlying value. */
8419 if (equality_comparison_p
8420 && const_op == 0
8421 && GET_CODE (XEXP (op0, 1)) == CONST_INT
8422 && mode_width <= HOST_BITS_PER_WIDE_INT
8423 && ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
8424 == (HOST_WIDE_INT) 1 << (mode_width - 1)))
8425 {
8426 op0 = XEXP (op0, 0);
8427 code = (code == EQ ? GE : LT);
8428 continue;
8429 }
8430
8431 /* If this AND operation is really a ZERO_EXTEND from a narrower
8432 mode, the constant fits within that mode, and this is either an
8433 equality or unsigned comparison, try to do this comparison in
8434 the narrower mode. */
8435 if ((equality_comparison_p || unsigned_comparison_p)
8436 && GET_CODE (XEXP (op0, 1)) == CONST_INT
8437 && (i = exact_log2 ((INTVAL (XEXP (op0, 1))
8438 & GET_MODE_MASK (mode))
8439 + 1)) >= 0
8440 && const_op >> i == 0
8441 && (tmode = mode_for_size (i, MODE_INT, 1)) != BLKmode)
8442 {
8443 op0 = gen_lowpart_for_combine (tmode, XEXP (op0, 0));
8444 continue;
8445 }
8446 break;
8447
8448 case ASHIFT:
8449 case LSHIFT:
8450 /* If we have (compare (xshift FOO N) (const_int C)) and
8451 the high order N bits of FOO (N+1 if an inequality comparison)
8452 are known to be zero, we can do this by comparing FOO with C
8453 shifted right N bits so long as the low-order N bits of C are
8454 zero. */
8455 if (GET_CODE (XEXP (op0, 1)) == CONST_INT
8456 && INTVAL (XEXP (op0, 1)) >= 0
8457 && ((INTVAL (XEXP (op0, 1)) + ! equality_comparison_p)
8458 < HOST_BITS_PER_WIDE_INT)
8459 && ((const_op
8460 & ((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1) == 0)
8461 && mode_width <= HOST_BITS_PER_WIDE_INT
8462 && (nonzero_bits (XEXP (op0, 0), mode)
8463 & ~ (mask >> (INTVAL (XEXP (op0, 1))
8464 + ! equality_comparison_p))) == 0)
8465 {
8466 const_op >>= INTVAL (XEXP (op0, 1));
8467 op1 = GEN_INT (const_op);
8468 op0 = XEXP (op0, 0);
8469 continue;
8470 }
8471
8472 /* If we are doing a sign bit comparison, it means we are testing
8473 a particular bit. Convert it to the appropriate AND. */
8474 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
8475 && mode_width <= HOST_BITS_PER_WIDE_INT)
8476 {
8477 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
8478 ((HOST_WIDE_INT) 1
8479 << (mode_width - 1
8480 - INTVAL (XEXP (op0, 1)))));
8481 code = (code == LT ? NE : EQ);
8482 continue;
8483 }
8484
8485 /* If this an equality comparison with zero and we are shifting
8486 the low bit to the sign bit, we can convert this to an AND of the
8487 low-order bit. */
8488 if (const_op == 0 && equality_comparison_p
8489 && GET_CODE (XEXP (op0, 1)) == CONST_INT
8490 && INTVAL (XEXP (op0, 1)) == mode_width - 1)
8491 {
8492 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
8493 (HOST_WIDE_INT) 1);
8494 continue;
8495 }
8496 break;
8497
8498 case ASHIFTRT:
8499 /* If this is an equality comparison with zero, we can do this
8500 as a logical shift, which might be much simpler. */
8501 if (equality_comparison_p && const_op == 0
8502 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
8503 {
8504 op0 = simplify_shift_const (NULL_RTX, LSHIFTRT, mode,
8505 XEXP (op0, 0),
8506 INTVAL (XEXP (op0, 1)));
8507 continue;
8508 }
8509
8510 /* If OP0 is a sign extension and CODE is not an unsigned comparison,
8511 do the comparison in a narrower mode. */
8512 if (! unsigned_comparison_p
8513 && GET_CODE (XEXP (op0, 1)) == CONST_INT
8514 && GET_CODE (XEXP (op0, 0)) == ASHIFT
8515 && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1)
8516 && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)),
8517 MODE_INT, 1)) != BLKmode
8518 && ((unsigned HOST_WIDE_INT) const_op <= GET_MODE_MASK (tmode)
8519 || ((unsigned HOST_WIDE_INT) - const_op
8520 <= GET_MODE_MASK (tmode))))
8521 {
8522 op0 = gen_lowpart_for_combine (tmode, XEXP (XEXP (op0, 0), 0));
8523 continue;
8524 }
8525
8526 /* ... fall through ... */
8527 case LSHIFTRT:
8528 /* If we have (compare (xshiftrt FOO N) (const_int C)) and
8529 the low order N bits of FOO are known to be zero, we can do this
8530 by comparing FOO with C shifted left N bits so long as no
8531 overflow occurs. */
8532 if (GET_CODE (XEXP (op0, 1)) == CONST_INT
8533 && INTVAL (XEXP (op0, 1)) >= 0
8534 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
8535 && mode_width <= HOST_BITS_PER_WIDE_INT
8536 && (nonzero_bits (XEXP (op0, 0), mode)
8537 & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0
8538 && (const_op == 0
8539 || (floor_log2 (const_op) + INTVAL (XEXP (op0, 1))
8540 < mode_width)))
8541 {
8542 const_op <<= INTVAL (XEXP (op0, 1));
8543 op1 = GEN_INT (const_op);
8544 op0 = XEXP (op0, 0);
8545 continue;
8546 }
8547
8548 /* If we are using this shift to extract just the sign bit, we
8549 can replace this with an LT or GE comparison. */
8550 if (const_op == 0
8551 && (equality_comparison_p || sign_bit_comparison_p)
8552 && GET_CODE (XEXP (op0, 1)) == CONST_INT
8553 && INTVAL (XEXP (op0, 1)) == mode_width - 1)
8554 {
8555 op0 = XEXP (op0, 0);
8556 code = (code == NE || code == GT ? LT : GE);
8557 continue;
8558 }
8559 break;
8560 }
8561
8562 break;
8563 }
8564
8565 /* Now make any compound operations involved in this comparison. Then,
8566 check for an outmost SUBREG on OP0 that isn't doing anything or is
8567 paradoxical. The latter case can only occur when it is known that the
8568 "extra" bits will be zero. Therefore, it is safe to remove the SUBREG.
8569 We can never remove a SUBREG for a non-equality comparison because the
8570 sign bit is in a different place in the underlying object. */
8571
8572 op0 = make_compound_operation (op0, op1 == const0_rtx ? COMPARE : SET);
8573 op1 = make_compound_operation (op1, SET);
8574
8575 if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
8576 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
8577 && (code == NE || code == EQ)
8578 && ((GET_MODE_SIZE (GET_MODE (op0))
8579 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0))))))
8580 {
8581 op0 = SUBREG_REG (op0);
8582 op1 = gen_lowpart_for_combine (GET_MODE (op0), op1);
8583 }
8584
8585 else if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
8586 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
8587 && (code == NE || code == EQ)
8588 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
8589 <= HOST_BITS_PER_WIDE_INT)
8590 && (nonzero_bits (SUBREG_REG (op0), GET_MODE (SUBREG_REG (op0)))
8591 & ~ GET_MODE_MASK (GET_MODE (op0))) == 0
8592 && (tem = gen_lowpart_for_combine (GET_MODE (SUBREG_REG (op0)),
8593 op1),
8594 (nonzero_bits (tem, GET_MODE (SUBREG_REG (op0)))
8595 & ~ GET_MODE_MASK (GET_MODE (op0))) == 0))
8596 op0 = SUBREG_REG (op0), op1 = tem;
8597
8598 /* We now do the opposite procedure: Some machines don't have compare
8599 insns in all modes. If OP0's mode is an integer mode smaller than a
8600 word and we can't do a compare in that mode, see if there is a larger
8601 mode for which we can do the compare. There are a number of cases in
8602 which we can use the wider mode. */
8603
8604 mode = GET_MODE (op0);
8605 if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT
8606 && GET_MODE_SIZE (mode) < UNITS_PER_WORD
8607 && cmp_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
8608 for (tmode = GET_MODE_WIDER_MODE (mode);
8609 (tmode != VOIDmode
8610 && GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT);
8611 tmode = GET_MODE_WIDER_MODE (tmode))
8612 if (cmp_optab->handlers[(int) tmode].insn_code != CODE_FOR_nothing)
8613 {
8614 /* If the only nonzero bits in OP0 and OP1 are those in the
8615 narrower mode and this is an equality or unsigned comparison,
8616 we can use the wider mode. Similarly for sign-extended
8617 values and equality or signed comparisons. */
8618 if (((code == EQ || code == NE
8619 || code == GEU || code == GTU || code == LEU || code == LTU)
8620 && (nonzero_bits (op0, tmode) & ~ GET_MODE_MASK (mode)) == 0
8621 && (nonzero_bits (op1, tmode) & ~ GET_MODE_MASK (mode)) == 0)
8622 || ((code == EQ || code == NE
8623 || code == GE || code == GT || code == LE || code == LT)
8624 && (num_sign_bit_copies (op0, tmode)
8625 > GET_MODE_BITSIZE (tmode) - GET_MODE_BITSIZE (mode))
8626 && (num_sign_bit_copies (op1, tmode)
8627 > GET_MODE_BITSIZE (tmode) - GET_MODE_BITSIZE (mode))))
8628 {
8629 op0 = gen_lowpart_for_combine (tmode, op0);
8630 op1 = gen_lowpart_for_combine (tmode, op1);
8631 break;
8632 }
8633
8634 /* If this is a test for negative, we can make an explicit
8635 test of the sign bit. */
8636
8637 if (op1 == const0_rtx && (code == LT || code == GE)
8638 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
8639 {
8640 op0 = gen_binary (AND, tmode,
8641 gen_lowpart_for_combine (tmode, op0),
8642 GEN_INT ((HOST_WIDE_INT) 1
8643 << (GET_MODE_BITSIZE (mode) - 1)));
8644 code = (code == LT) ? NE : EQ;
8645 break;
8646 }
8647 }
8648
8649 *pop0 = op0;
8650 *pop1 = op1;
8651
8652 return code;
8653 }
8654 \f
8655 /* Return 1 if we know that X, a comparison operation, is not operating
8656 on a floating-point value or is EQ or NE, meaning that we can safely
8657 reverse it. */
8658
8659 static int
8660 reversible_comparison_p (x)
8661 rtx x;
8662 {
8663 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
8664 || GET_CODE (x) == NE || GET_CODE (x) == EQ)
8665 return 1;
8666
8667 switch (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))))
8668 {
8669 case MODE_INT:
8670 return 1;
8671
8672 case MODE_CC:
8673 x = get_last_value (XEXP (x, 0));
8674 return (x && GET_CODE (x) == COMPARE
8675 && GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) == MODE_INT);
8676 }
8677
8678 return 0;
8679 }
8680 \f
8681 /* Utility function for following routine. Called when X is part of a value
8682 being stored into reg_last_set_value. Sets reg_last_set_table_tick
8683 for each register mentioned. Similar to mention_regs in cse.c */
8684
8685 static void
8686 update_table_tick (x)
8687 rtx x;
8688 {
8689 register enum rtx_code code = GET_CODE (x);
8690 register char *fmt = GET_RTX_FORMAT (code);
8691 register int i;
8692
8693 if (code == REG)
8694 {
8695 int regno = REGNO (x);
8696 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
8697 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
8698
8699 for (i = regno; i < endregno; i++)
8700 reg_last_set_table_tick[i] = label_tick;
8701
8702 return;
8703 }
8704
8705 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
8706 /* Note that we can't have an "E" in values stored; see
8707 get_last_value_validate. */
8708 if (fmt[i] == 'e')
8709 update_table_tick (XEXP (x, i));
8710 }
8711
8712 /* Record that REG is set to VALUE in insn INSN. If VALUE is zero, we
8713 are saying that the register is clobbered and we no longer know its
8714 value. If INSN is zero, don't update reg_last_set; this is only permitted
8715 with VALUE also zero and is used to invalidate the register. */
8716
8717 static void
8718 record_value_for_reg (reg, insn, value)
8719 rtx reg;
8720 rtx insn;
8721 rtx value;
8722 {
8723 int regno = REGNO (reg);
8724 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
8725 ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1);
8726 int i;
8727
8728 /* If VALUE contains REG and we have a previous value for REG, substitute
8729 the previous value. */
8730 if (value && insn && reg_overlap_mentioned_p (reg, value))
8731 {
8732 rtx tem;
8733
8734 /* Set things up so get_last_value is allowed to see anything set up to
8735 our insn. */
8736 subst_low_cuid = INSN_CUID (insn);
8737 tem = get_last_value (reg);
8738
8739 if (tem)
8740 value = replace_rtx (copy_rtx (value), reg, tem);
8741 }
8742
8743 /* For each register modified, show we don't know its value, that
8744 its value has been updated, and that we don't know the location of
8745 the death of the register. */
8746 for (i = regno; i < endregno; i ++)
8747 {
8748 if (insn)
8749 reg_last_set[i] = insn;
8750 reg_last_set_value[i] = 0;
8751 reg_last_death[i] = 0;
8752 }
8753
8754 /* Mark registers that are being referenced in this value. */
8755 if (value)
8756 update_table_tick (value);
8757
8758 /* Now update the status of each register being set.
8759 If someone is using this register in this block, set this register
8760 to invalid since we will get confused between the two lives in this
8761 basic block. This makes using this register always invalid. In cse, we
8762 scan the table to invalidate all entries using this register, but this
8763 is too much work for us. */
8764
8765 for (i = regno; i < endregno; i++)
8766 {
8767 reg_last_set_label[i] = label_tick;
8768 if (value && reg_last_set_table_tick[i] == label_tick)
8769 reg_last_set_invalid[i] = 1;
8770 else
8771 reg_last_set_invalid[i] = 0;
8772 }
8773
8774 /* The value being assigned might refer to X (like in "x++;"). In that
8775 case, we must replace it with (clobber (const_int 0)) to prevent
8776 infinite loops. */
8777 if (value && ! get_last_value_validate (&value,
8778 reg_last_set_label[regno], 0))
8779 {
8780 value = copy_rtx (value);
8781 if (! get_last_value_validate (&value, reg_last_set_label[regno], 1))
8782 value = 0;
8783 }
8784
8785 /* For the main register being modified, update the value. */
8786 reg_last_set_value[regno] = value;
8787
8788 }
8789
8790 /* Used for communication between the following two routines. */
8791 static rtx record_dead_insn;
8792
8793 /* Called via note_stores from record_dead_and_set_regs to handle one
8794 SET or CLOBBER in an insn. */
8795
8796 static void
8797 record_dead_and_set_regs_1 (dest, setter)
8798 rtx dest, setter;
8799 {
8800 if (GET_CODE (dest) == REG)
8801 {
8802 /* If we are setting the whole register, we know its value. Otherwise
8803 show that we don't know the value. We can handle SUBREG in
8804 some cases. */
8805 if (GET_CODE (setter) == SET && dest == SET_DEST (setter))
8806 record_value_for_reg (dest, record_dead_insn, SET_SRC (setter));
8807 else if (GET_CODE (setter) == SET
8808 && GET_CODE (SET_DEST (setter)) == SUBREG
8809 && SUBREG_REG (SET_DEST (setter)) == dest
8810 && subreg_lowpart_p (SET_DEST (setter)))
8811 record_value_for_reg (dest, record_dead_insn,
8812 gen_lowpart_for_combine (GET_MODE (dest),
8813 SET_SRC (setter)));
8814 else
8815 record_value_for_reg (dest, record_dead_insn, NULL_RTX);
8816 }
8817 else if (GET_CODE (dest) == MEM
8818 /* Ignore pushes, they clobber nothing. */
8819 && ! push_operand (dest, GET_MODE (dest)))
8820 mem_last_set = INSN_CUID (record_dead_insn);
8821 }
8822
8823 /* Update the records of when each REG was most recently set or killed
8824 for the things done by INSN. This is the last thing done in processing
8825 INSN in the combiner loop.
8826
8827 We update reg_last_set, reg_last_set_value, reg_last_death, and also the
8828 similar information mem_last_set (which insn most recently modified memory)
8829 and last_call_cuid (which insn was the most recent subroutine call). */
8830
8831 static void
8832 record_dead_and_set_regs (insn)
8833 rtx insn;
8834 {
8835 register rtx link;
8836 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
8837 {
8838 if (REG_NOTE_KIND (link) == REG_DEAD)
8839 reg_last_death[REGNO (XEXP (link, 0))] = insn;
8840 else if (REG_NOTE_KIND (link) == REG_INC)
8841 record_value_for_reg (XEXP (link, 0), insn, NULL_RTX);
8842 }
8843
8844 if (GET_CODE (insn) == CALL_INSN)
8845 last_call_cuid = mem_last_set = INSN_CUID (insn);
8846
8847 record_dead_insn = insn;
8848 note_stores (PATTERN (insn), record_dead_and_set_regs_1);
8849 }
8850 \f
8851 /* Utility routine for the following function. Verify that all the registers
8852 mentioned in *LOC are valid when *LOC was part of a value set when
8853 label_tick == TICK. Return 0 if some are not.
8854
8855 If REPLACE is non-zero, replace the invalid reference with
8856 (clobber (const_int 0)) and return 1. This replacement is useful because
8857 we often can get useful information about the form of a value (e.g., if
8858 it was produced by a shift that always produces -1 or 0) even though
8859 we don't know exactly what registers it was produced from. */
8860
8861 static int
8862 get_last_value_validate (loc, tick, replace)
8863 rtx *loc;
8864 int tick;
8865 int replace;
8866 {
8867 rtx x = *loc;
8868 char *fmt = GET_RTX_FORMAT (GET_CODE (x));
8869 int len = GET_RTX_LENGTH (GET_CODE (x));
8870 int i;
8871
8872 if (GET_CODE (x) == REG)
8873 {
8874 int regno = REGNO (x);
8875 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
8876 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
8877 int j;
8878
8879 for (j = regno; j < endregno; j++)
8880 if (reg_last_set_invalid[j]
8881 /* If this is a pseudo-register that was only set once, it is
8882 always valid. */
8883 || (! (regno >= FIRST_PSEUDO_REGISTER && reg_n_sets[regno] == 1)
8884 && reg_last_set_label[j] > tick))
8885 {
8886 if (replace)
8887 *loc = gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
8888 return replace;
8889 }
8890
8891 return 1;
8892 }
8893
8894 for (i = 0; i < len; i++)
8895 if ((fmt[i] == 'e'
8896 && get_last_value_validate (&XEXP (x, i), tick, replace) == 0)
8897 /* Don't bother with these. They shouldn't occur anyway. */
8898 || fmt[i] == 'E')
8899 return 0;
8900
8901 /* If we haven't found a reason for it to be invalid, it is valid. */
8902 return 1;
8903 }
8904
8905 /* Get the last value assigned to X, if known. Some registers
8906 in the value may be replaced with (clobber (const_int 0)) if their value
8907 is known longer known reliably. */
8908
8909 static rtx
8910 get_last_value (x)
8911 rtx x;
8912 {
8913 int regno;
8914 rtx value;
8915
8916 /* If this is a non-paradoxical SUBREG, get the value of its operand and
8917 then convert it to the desired mode. If this is a paradoxical SUBREG,
8918 we cannot predict what values the "extra" bits might have. */
8919 if (GET_CODE (x) == SUBREG
8920 && subreg_lowpart_p (x)
8921 && (GET_MODE_SIZE (GET_MODE (x))
8922 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
8923 && (value = get_last_value (SUBREG_REG (x))) != 0)
8924 return gen_lowpart_for_combine (GET_MODE (x), value);
8925
8926 if (GET_CODE (x) != REG)
8927 return 0;
8928
8929 regno = REGNO (x);
8930 value = reg_last_set_value[regno];
8931
8932 /* If we don't have a value or if it isn't for this basic block, return 0. */
8933
8934 if (value == 0
8935 || (reg_n_sets[regno] != 1
8936 && (reg_last_set_label[regno] != label_tick)))
8937 return 0;
8938
8939 /* If the value was set in a later insn that the ones we are processing,
8940 we can't use it even if the register was only set once, but make a quick
8941 check to see if the previous insn set it to something. This is commonly
8942 the case when the same pseudo is used by repeated insns. */
8943
8944 if (INSN_CUID (reg_last_set[regno]) >= subst_low_cuid)
8945 {
8946 rtx insn, set;
8947
8948 for (insn = prev_nonnote_insn (subst_insn);
8949 insn && INSN_CUID (insn) >= subst_low_cuid;
8950 insn = prev_nonnote_insn (insn))
8951 ;
8952
8953 if (insn
8954 && (set = single_set (insn)) != 0
8955 && rtx_equal_p (SET_DEST (set), x))
8956 {
8957 value = SET_SRC (set);
8958
8959 /* Make sure that VALUE doesn't reference X. Replace any
8960 expliit references with a CLOBBER. If there are any remaining
8961 references (rare), don't use the value. */
8962
8963 if (reg_mentioned_p (x, value))
8964 value = replace_rtx (copy_rtx (value), x,
8965 gen_rtx (CLOBBER, GET_MODE (x), const0_rtx));
8966
8967 if (reg_overlap_mentioned_p (x, value))
8968 return 0;
8969 }
8970 else
8971 return 0;
8972 }
8973
8974 /* If the value has all its registers valid, return it. */
8975 if (get_last_value_validate (&value, reg_last_set_label[regno], 0))
8976 return value;
8977
8978 /* Otherwise, make a copy and replace any invalid register with
8979 (clobber (const_int 0)). If that fails for some reason, return 0. */
8980
8981 value = copy_rtx (value);
8982 if (get_last_value_validate (&value, reg_last_set_label[regno], 1))
8983 return value;
8984
8985 return 0;
8986 }
8987 \f
8988 /* Return nonzero if expression X refers to a REG or to memory
8989 that is set in an instruction more recent than FROM_CUID. */
8990
8991 static int
8992 use_crosses_set_p (x, from_cuid)
8993 register rtx x;
8994 int from_cuid;
8995 {
8996 register char *fmt;
8997 register int i;
8998 register enum rtx_code code = GET_CODE (x);
8999
9000 if (code == REG)
9001 {
9002 register int regno = REGNO (x);
9003 #ifdef PUSH_ROUNDING
9004 /* Don't allow uses of the stack pointer to be moved,
9005 because we don't know whether the move crosses a push insn. */
9006 if (regno == STACK_POINTER_REGNUM)
9007 return 1;
9008 #endif
9009 return (reg_last_set[regno]
9010 && INSN_CUID (reg_last_set[regno]) > from_cuid);
9011 }
9012
9013 if (code == MEM && mem_last_set > from_cuid)
9014 return 1;
9015
9016 fmt = GET_RTX_FORMAT (code);
9017
9018 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
9019 {
9020 if (fmt[i] == 'E')
9021 {
9022 register int j;
9023 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
9024 if (use_crosses_set_p (XVECEXP (x, i, j), from_cuid))
9025 return 1;
9026 }
9027 else if (fmt[i] == 'e'
9028 && use_crosses_set_p (XEXP (x, i), from_cuid))
9029 return 1;
9030 }
9031 return 0;
9032 }
9033 \f
9034 /* Define three variables used for communication between the following
9035 routines. */
9036
9037 static int reg_dead_regno, reg_dead_endregno;
9038 static int reg_dead_flag;
9039
9040 /* Function called via note_stores from reg_dead_at_p.
9041
9042 If DEST is within [reg_dead_rengno, reg_dead_endregno), set
9043 reg_dead_flag to 1 if X is a CLOBBER and to -1 it is a SET. */
9044
9045 static void
9046 reg_dead_at_p_1 (dest, x)
9047 rtx dest;
9048 rtx x;
9049 {
9050 int regno, endregno;
9051
9052 if (GET_CODE (dest) != REG)
9053 return;
9054
9055 regno = REGNO (dest);
9056 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
9057 ? HARD_REGNO_NREGS (regno, GET_MODE (dest)) : 1);
9058
9059 if (reg_dead_endregno > regno && reg_dead_regno < endregno)
9060 reg_dead_flag = (GET_CODE (x) == CLOBBER) ? 1 : -1;
9061 }
9062
9063 /* Return non-zero if REG is known to be dead at INSN.
9064
9065 We scan backwards from INSN. If we hit a REG_DEAD note or a CLOBBER
9066 referencing REG, it is dead. If we hit a SET referencing REG, it is
9067 live. Otherwise, see if it is live or dead at the start of the basic
9068 block we are in. */
9069
9070 static int
9071 reg_dead_at_p (reg, insn)
9072 rtx reg;
9073 rtx insn;
9074 {
9075 int block, i;
9076
9077 /* Set variables for reg_dead_at_p_1. */
9078 reg_dead_regno = REGNO (reg);
9079 reg_dead_endregno = reg_dead_regno + (reg_dead_regno < FIRST_PSEUDO_REGISTER
9080 ? HARD_REGNO_NREGS (reg_dead_regno,
9081 GET_MODE (reg))
9082 : 1);
9083
9084 reg_dead_flag = 0;
9085
9086 /* Scan backwards until we find a REG_DEAD note, SET, CLOBBER, label, or
9087 beginning of function. */
9088 for (; insn && GET_CODE (insn) != CODE_LABEL;
9089 insn = prev_nonnote_insn (insn))
9090 {
9091 note_stores (PATTERN (insn), reg_dead_at_p_1);
9092 if (reg_dead_flag)
9093 return reg_dead_flag == 1 ? 1 : 0;
9094
9095 if (find_regno_note (insn, REG_DEAD, reg_dead_regno))
9096 return 1;
9097 }
9098
9099 /* Get the basic block number that we were in. */
9100 if (insn == 0)
9101 block = 0;
9102 else
9103 {
9104 for (block = 0; block < n_basic_blocks; block++)
9105 if (insn == basic_block_head[block])
9106 break;
9107
9108 if (block == n_basic_blocks)
9109 return 0;
9110 }
9111
9112 for (i = reg_dead_regno; i < reg_dead_endregno; i++)
9113 if (basic_block_live_at_start[block][i / REGSET_ELT_BITS]
9114 & ((REGSET_ELT_TYPE) 1 << (i % REGSET_ELT_BITS)))
9115 return 0;
9116
9117 return 1;
9118 }
9119 \f
9120 /* Remove register number REGNO from the dead registers list of INSN.
9121
9122 Return the note used to record the death, if there was one. */
9123
9124 rtx
9125 remove_death (regno, insn)
9126 int regno;
9127 rtx insn;
9128 {
9129 register rtx note = find_regno_note (insn, REG_DEAD, regno);
9130
9131 if (note)
9132 {
9133 reg_n_deaths[regno]--;
9134 remove_note (insn, note);
9135 }
9136
9137 return note;
9138 }
9139
9140 /* For each register (hardware or pseudo) used within expression X, if its
9141 death is in an instruction with cuid between FROM_CUID (inclusive) and
9142 TO_INSN (exclusive), put a REG_DEAD note for that register in the
9143 list headed by PNOTES.
9144
9145 This is done when X is being merged by combination into TO_INSN. These
9146 notes will then be distributed as needed. */
9147
9148 static void
9149 move_deaths (x, from_cuid, to_insn, pnotes)
9150 rtx x;
9151 int from_cuid;
9152 rtx to_insn;
9153 rtx *pnotes;
9154 {
9155 register char *fmt;
9156 register int len, i;
9157 register enum rtx_code code = GET_CODE (x);
9158
9159 if (code == REG)
9160 {
9161 register int regno = REGNO (x);
9162 register rtx where_dead = reg_last_death[regno];
9163
9164 if (where_dead && INSN_CUID (where_dead) >= from_cuid
9165 && INSN_CUID (where_dead) < INSN_CUID (to_insn))
9166 {
9167 rtx note = remove_death (regno, reg_last_death[regno]);
9168
9169 /* It is possible for the call above to return 0. This can occur
9170 when reg_last_death points to I2 or I1 that we combined with.
9171 In that case make a new note. */
9172
9173 if (note)
9174 {
9175 XEXP (note, 1) = *pnotes;
9176 *pnotes = note;
9177 }
9178 else
9179 *pnotes = gen_rtx (EXPR_LIST, REG_DEAD, x, *pnotes);
9180
9181 reg_n_deaths[regno]++;
9182 }
9183
9184 return;
9185 }
9186
9187 else if (GET_CODE (x) == SET)
9188 {
9189 rtx dest = SET_DEST (x);
9190
9191 move_deaths (SET_SRC (x), from_cuid, to_insn, pnotes);
9192
9193 /* In the case of a ZERO_EXTRACT, a STRICT_LOW_PART, or a SUBREG
9194 that accesses one word of a multi-word item, some
9195 piece of everything register in the expression is used by
9196 this insn, so remove any old death. */
9197
9198 if (GET_CODE (dest) == ZERO_EXTRACT
9199 || GET_CODE (dest) == STRICT_LOW_PART
9200 || (GET_CODE (dest) == SUBREG
9201 && (((GET_MODE_SIZE (GET_MODE (dest))
9202 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
9203 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
9204 + UNITS_PER_WORD - 1) / UNITS_PER_WORD))))
9205 {
9206 move_deaths (dest, from_cuid, to_insn, pnotes);
9207 return;
9208 }
9209
9210 /* If this is some other SUBREG, we know it replaces the entire
9211 value, so use that as the destination. */
9212 if (GET_CODE (dest) == SUBREG)
9213 dest = SUBREG_REG (dest);
9214
9215 /* If this is a MEM, adjust deaths of anything used in the address.
9216 For a REG (the only other possibility), the entire value is
9217 being replaced so the old value is not used in this insn. */
9218
9219 if (GET_CODE (dest) == MEM)
9220 move_deaths (XEXP (dest, 0), from_cuid, to_insn, pnotes);
9221 return;
9222 }
9223
9224 else if (GET_CODE (x) == CLOBBER)
9225 return;
9226
9227 len = GET_RTX_LENGTH (code);
9228 fmt = GET_RTX_FORMAT (code);
9229
9230 for (i = 0; i < len; i++)
9231 {
9232 if (fmt[i] == 'E')
9233 {
9234 register int j;
9235 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
9236 move_deaths (XVECEXP (x, i, j), from_cuid, to_insn, pnotes);
9237 }
9238 else if (fmt[i] == 'e')
9239 move_deaths (XEXP (x, i), from_cuid, to_insn, pnotes);
9240 }
9241 }
9242 \f
9243 /* Return 1 if X is the target of a bit-field assignment in BODY, the
9244 pattern of an insn. X must be a REG. */
9245
9246 static int
9247 reg_bitfield_target_p (x, body)
9248 rtx x;
9249 rtx body;
9250 {
9251 int i;
9252
9253 if (GET_CODE (body) == SET)
9254 {
9255 rtx dest = SET_DEST (body);
9256 rtx target;
9257 int regno, tregno, endregno, endtregno;
9258
9259 if (GET_CODE (dest) == ZERO_EXTRACT)
9260 target = XEXP (dest, 0);
9261 else if (GET_CODE (dest) == STRICT_LOW_PART)
9262 target = SUBREG_REG (XEXP (dest, 0));
9263 else
9264 return 0;
9265
9266 if (GET_CODE (target) == SUBREG)
9267 target = SUBREG_REG (target);
9268
9269 if (GET_CODE (target) != REG)
9270 return 0;
9271
9272 tregno = REGNO (target), regno = REGNO (x);
9273 if (tregno >= FIRST_PSEUDO_REGISTER || regno >= FIRST_PSEUDO_REGISTER)
9274 return target == x;
9275
9276 endtregno = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (target));
9277 endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
9278
9279 return endregno > tregno && regno < endtregno;
9280 }
9281
9282 else if (GET_CODE (body) == PARALLEL)
9283 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
9284 if (reg_bitfield_target_p (x, XVECEXP (body, 0, i)))
9285 return 1;
9286
9287 return 0;
9288 }
9289 \f
9290 /* Given a chain of REG_NOTES originally from FROM_INSN, try to place them
9291 as appropriate. I3 and I2 are the insns resulting from the combination
9292 insns including FROM (I2 may be zero).
9293
9294 ELIM_I2 and ELIM_I1 are either zero or registers that we know will
9295 not need REG_DEAD notes because they are being substituted for. This
9296 saves searching in the most common cases.
9297
9298 Each note in the list is either ignored or placed on some insns, depending
9299 on the type of note. */
9300
9301 static void
9302 distribute_notes (notes, from_insn, i3, i2, elim_i2, elim_i1)
9303 rtx notes;
9304 rtx from_insn;
9305 rtx i3, i2;
9306 rtx elim_i2, elim_i1;
9307 {
9308 rtx note, next_note;
9309 rtx tem;
9310
9311 for (note = notes; note; note = next_note)
9312 {
9313 rtx place = 0, place2 = 0;
9314
9315 /* If this NOTE references a pseudo register, ensure it references
9316 the latest copy of that register. */
9317 if (XEXP (note, 0) && GET_CODE (XEXP (note, 0)) == REG
9318 && REGNO (XEXP (note, 0)) >= FIRST_PSEUDO_REGISTER)
9319 XEXP (note, 0) = regno_reg_rtx[REGNO (XEXP (note, 0))];
9320
9321 next_note = XEXP (note, 1);
9322 switch (REG_NOTE_KIND (note))
9323 {
9324 case REG_UNUSED:
9325 /* If this register is set or clobbered in I3, put the note there
9326 unless there is one already. */
9327 if (reg_set_p (XEXP (note, 0), PATTERN (i3)))
9328 {
9329 if (! (GET_CODE (XEXP (note, 0)) == REG
9330 ? find_regno_note (i3, REG_UNUSED, REGNO (XEXP (note, 0)))
9331 : find_reg_note (i3, REG_UNUSED, XEXP (note, 0))))
9332 place = i3;
9333 }
9334 /* Otherwise, if this register is used by I3, then this register
9335 now dies here, so we must put a REG_DEAD note here unless there
9336 is one already. */
9337 else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3))
9338 && ! (GET_CODE (XEXP (note, 0)) == REG
9339 ? find_regno_note (i3, REG_DEAD, REGNO (XEXP (note, 0)))
9340 : find_reg_note (i3, REG_DEAD, XEXP (note, 0))))
9341 {
9342 PUT_REG_NOTE_KIND (note, REG_DEAD);
9343 place = i3;
9344 }
9345 break;
9346
9347 case REG_EQUAL:
9348 case REG_EQUIV:
9349 case REG_NONNEG:
9350 /* These notes say something about results of an insn. We can
9351 only support them if they used to be on I3 in which case they
9352 remain on I3. Otherwise they are ignored.
9353
9354 If the note refers to an expression that is not a constant, we
9355 must also ignore the note since we cannot tell whether the
9356 equivalence is still true. It might be possible to do
9357 slightly better than this (we only have a problem if I2DEST
9358 or I1DEST is present in the expression), but it doesn't
9359 seem worth the trouble. */
9360
9361 if (from_insn == i3
9362 && (XEXP (note, 0) == 0 || CONSTANT_P (XEXP (note, 0))))
9363 place = i3;
9364 break;
9365
9366 case REG_INC:
9367 case REG_NO_CONFLICT:
9368 case REG_LABEL:
9369 /* These notes say something about how a register is used. They must
9370 be present on any use of the register in I2 or I3. */
9371 if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3)))
9372 place = i3;
9373
9374 if (i2 && reg_mentioned_p (XEXP (note, 0), PATTERN (i2)))
9375 {
9376 if (place)
9377 place2 = i2;
9378 else
9379 place = i2;
9380 }
9381 break;
9382
9383 case REG_WAS_0:
9384 /* It is too much trouble to try to see if this note is still
9385 correct in all situations. It is better to simply delete it. */
9386 break;
9387
9388 case REG_RETVAL:
9389 /* If the insn previously containing this note still exists,
9390 put it back where it was. Otherwise move it to the previous
9391 insn. Adjust the corresponding REG_LIBCALL note. */
9392 if (GET_CODE (from_insn) != NOTE)
9393 place = from_insn;
9394 else
9395 {
9396 tem = find_reg_note (XEXP (note, 0), REG_LIBCALL, NULL_RTX);
9397 place = prev_real_insn (from_insn);
9398 if (tem && place)
9399 XEXP (tem, 0) = place;
9400 }
9401 break;
9402
9403 case REG_LIBCALL:
9404 /* This is handled similarly to REG_RETVAL. */
9405 if (GET_CODE (from_insn) != NOTE)
9406 place = from_insn;
9407 else
9408 {
9409 tem = find_reg_note (XEXP (note, 0), REG_RETVAL, NULL_RTX);
9410 place = next_real_insn (from_insn);
9411 if (tem && place)
9412 XEXP (tem, 0) = place;
9413 }
9414 break;
9415
9416 case REG_DEAD:
9417 /* If the register is used as an input in I3, it dies there.
9418 Similarly for I2, if it is non-zero and adjacent to I3.
9419
9420 If the register is not used as an input in either I3 or I2
9421 and it is not one of the registers we were supposed to eliminate,
9422 there are two possibilities. We might have a non-adjacent I2
9423 or we might have somehow eliminated an additional register
9424 from a computation. For example, we might have had A & B where
9425 we discover that B will always be zero. In this case we will
9426 eliminate the reference to A.
9427
9428 In both cases, we must search to see if we can find a previous
9429 use of A and put the death note there. */
9430
9431 if (reg_referenced_p (XEXP (note, 0), PATTERN (i3)))
9432 place = i3;
9433 else if (i2 != 0 && next_nonnote_insn (i2) == i3
9434 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
9435 place = i2;
9436
9437 if (XEXP (note, 0) == elim_i2 || XEXP (note, 0) == elim_i1)
9438 break;
9439
9440 /* If the register is used in both I2 and I3 and it dies in I3,
9441 we might have added another reference to it. If reg_n_refs
9442 was 2, bump it to 3. This has to be correct since the
9443 register must have been set somewhere. The reason this is
9444 done is because local-alloc.c treats 2 references as a
9445 special case. */
9446
9447 if (place == i3 && i2 != 0 && GET_CODE (XEXP (note, 0)) == REG
9448 && reg_n_refs[REGNO (XEXP (note, 0))]== 2
9449 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
9450 reg_n_refs[REGNO (XEXP (note, 0))] = 3;
9451
9452 if (place == 0)
9453 for (tem = prev_nonnote_insn (i3);
9454 tem && (GET_CODE (tem) == INSN
9455 || GET_CODE (tem) == CALL_INSN);
9456 tem = prev_nonnote_insn (tem))
9457 {
9458 /* If the register is being set at TEM, see if that is all
9459 TEM is doing. If so, delete TEM. Otherwise, make this
9460 into a REG_UNUSED note instead. */
9461 if (reg_set_p (XEXP (note, 0), PATTERN (tem)))
9462 {
9463 rtx set = single_set (tem);
9464
9465 /* Verify that it was the set, and not a clobber that
9466 modified the register. */
9467
9468 if (set != 0 && ! side_effects_p (SET_SRC (set))
9469 && rtx_equal_p (XEXP (note, 0), SET_DEST (set)))
9470 {
9471 /* Move the notes and links of TEM elsewhere.
9472 This might delete other dead insns recursively.
9473 First set the pattern to something that won't use
9474 any register. */
9475
9476 PATTERN (tem) = pc_rtx;
9477
9478 distribute_notes (REG_NOTES (tem), tem, tem,
9479 NULL_RTX, NULL_RTX, NULL_RTX);
9480 distribute_links (LOG_LINKS (tem));
9481
9482 PUT_CODE (tem, NOTE);
9483 NOTE_LINE_NUMBER (tem) = NOTE_INSN_DELETED;
9484 NOTE_SOURCE_FILE (tem) = 0;
9485 }
9486 else
9487 {
9488 PUT_REG_NOTE_KIND (note, REG_UNUSED);
9489
9490 /* If there isn't already a REG_UNUSED note, put one
9491 here. */
9492 if (! find_regno_note (tem, REG_UNUSED,
9493 REGNO (XEXP (note, 0))))
9494 place = tem;
9495 break;
9496 }
9497 }
9498 else if (reg_referenced_p (XEXP (note, 0), PATTERN (tem)))
9499 {
9500 place = tem;
9501 break;
9502 }
9503 }
9504
9505 /* If the register is set or already dead at PLACE, we needn't do
9506 anything with this note if it is still a REG_DEAD note.
9507
9508 Note that we cannot use just `dead_or_set_p' here since we can
9509 convert an assignment to a register into a bit-field assignment.
9510 Therefore, we must also omit the note if the register is the
9511 target of a bitfield assignment. */
9512
9513 if (place && REG_NOTE_KIND (note) == REG_DEAD)
9514 {
9515 int regno = REGNO (XEXP (note, 0));
9516
9517 if (dead_or_set_p (place, XEXP (note, 0))
9518 || reg_bitfield_target_p (XEXP (note, 0), PATTERN (place)))
9519 {
9520 /* Unless the register previously died in PLACE, clear
9521 reg_last_death. [I no longer understand why this is
9522 being done.] */
9523 if (reg_last_death[regno] != place)
9524 reg_last_death[regno] = 0;
9525 place = 0;
9526 }
9527 else
9528 reg_last_death[regno] = place;
9529
9530 /* If this is a death note for a hard reg that is occupying
9531 multiple registers, ensure that we are still using all
9532 parts of the object. If we find a piece of the object
9533 that is unused, we must add a USE for that piece before
9534 PLACE and put the appropriate REG_DEAD note on it.
9535
9536 An alternative would be to put a REG_UNUSED for the pieces
9537 on the insn that set the register, but that can't be done if
9538 it is not in the same block. It is simpler, though less
9539 efficient, to add the USE insns. */
9540
9541 if (place && regno < FIRST_PSEUDO_REGISTER
9542 && HARD_REGNO_NREGS (regno, GET_MODE (XEXP (note, 0))) > 1)
9543 {
9544 int endregno
9545 = regno + HARD_REGNO_NREGS (regno,
9546 GET_MODE (XEXP (note, 0)));
9547 int all_used = 1;
9548 int i;
9549
9550 for (i = regno; i < endregno; i++)
9551 if (! refers_to_regno_p (i, i + 1, PATTERN (place), 0))
9552 {
9553 rtx piece = gen_rtx (REG, word_mode, i);
9554 rtx p;
9555
9556 /* See if we already placed a USE note for this
9557 register in front of PLACE. */
9558 for (p = place;
9559 GET_CODE (PREV_INSN (p)) == INSN
9560 && GET_CODE (PATTERN (PREV_INSN (p))) == USE;
9561 p = PREV_INSN (p))
9562 if (rtx_equal_p (piece,
9563 XEXP (PATTERN (PREV_INSN (p)), 0)))
9564 {
9565 p = 0;
9566 break;
9567 }
9568
9569 if (p)
9570 {
9571 rtx use_insn
9572 = emit_insn_before (gen_rtx (USE, VOIDmode,
9573 piece),
9574 p);
9575 REG_NOTES (use_insn)
9576 = gen_rtx (EXPR_LIST, REG_DEAD, piece,
9577 REG_NOTES (use_insn));
9578 }
9579
9580 all_used = 0;
9581 }
9582
9583 if (! all_used)
9584 {
9585 /* Put only REG_DEAD notes for pieces that are
9586 still used and that are not already dead or set. */
9587
9588 for (i = regno; i < endregno; i++)
9589 {
9590 rtx piece = gen_rtx (REG, word_mode, i);
9591
9592 if (reg_referenced_p (piece, PATTERN (place))
9593 && ! dead_or_set_p (place, piece)
9594 && ! reg_bitfield_target_p (piece,
9595 PATTERN (place)))
9596 REG_NOTES (place) = gen_rtx (EXPR_LIST, REG_DEAD,
9597 piece,
9598 REG_NOTES (place));
9599 }
9600
9601 place = 0;
9602 }
9603 }
9604 }
9605 break;
9606
9607 default:
9608 /* Any other notes should not be present at this point in the
9609 compilation. */
9610 abort ();
9611 }
9612
9613 if (place)
9614 {
9615 XEXP (note, 1) = REG_NOTES (place);
9616 REG_NOTES (place) = note;
9617 }
9618 else if ((REG_NOTE_KIND (note) == REG_DEAD
9619 || REG_NOTE_KIND (note) == REG_UNUSED)
9620 && GET_CODE (XEXP (note, 0)) == REG)
9621 reg_n_deaths[REGNO (XEXP (note, 0))]--;
9622
9623 if (place2)
9624 {
9625 if ((REG_NOTE_KIND (note) == REG_DEAD
9626 || REG_NOTE_KIND (note) == REG_UNUSED)
9627 && GET_CODE (XEXP (note, 0)) == REG)
9628 reg_n_deaths[REGNO (XEXP (note, 0))]++;
9629
9630 REG_NOTES (place2) = gen_rtx (GET_CODE (note), REG_NOTE_KIND (note),
9631 XEXP (note, 0), REG_NOTES (place2));
9632 }
9633 }
9634 }
9635 \f
9636 /* Similarly to above, distribute the LOG_LINKS that used to be present on
9637 I3, I2, and I1 to new locations. This is also called in one case to
9638 add a link pointing at I3 when I3's destination is changed. */
9639
9640 static void
9641 distribute_links (links)
9642 rtx links;
9643 {
9644 rtx link, next_link;
9645
9646 for (link = links; link; link = next_link)
9647 {
9648 rtx place = 0;
9649 rtx insn;
9650 rtx set, reg;
9651
9652 next_link = XEXP (link, 1);
9653
9654 /* If the insn that this link points to is a NOTE or isn't a single
9655 set, ignore it. In the latter case, it isn't clear what we
9656 can do other than ignore the link, since we can't tell which
9657 register it was for. Such links wouldn't be used by combine
9658 anyway.
9659
9660 It is not possible for the destination of the target of the link to
9661 have been changed by combine. The only potential of this is if we
9662 replace I3, I2, and I1 by I3 and I2. But in that case the
9663 destination of I2 also remains unchanged. */
9664
9665 if (GET_CODE (XEXP (link, 0)) == NOTE
9666 || (set = single_set (XEXP (link, 0))) == 0)
9667 continue;
9668
9669 reg = SET_DEST (set);
9670 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
9671 || GET_CODE (reg) == SIGN_EXTRACT
9672 || GET_CODE (reg) == STRICT_LOW_PART)
9673 reg = XEXP (reg, 0);
9674
9675 /* A LOG_LINK is defined as being placed on the first insn that uses
9676 a register and points to the insn that sets the register. Start
9677 searching at the next insn after the target of the link and stop
9678 when we reach a set of the register or the end of the basic block.
9679
9680 Note that this correctly handles the link that used to point from
9681 I3 to I2. Also note that not much searching is typically done here
9682 since most links don't point very far away. */
9683
9684 for (insn = NEXT_INSN (XEXP (link, 0));
9685 (insn && GET_CODE (insn) != CODE_LABEL
9686 && GET_CODE (PREV_INSN (insn)) != JUMP_INSN);
9687 insn = NEXT_INSN (insn))
9688 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
9689 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
9690 {
9691 if (reg_referenced_p (reg, PATTERN (insn)))
9692 place = insn;
9693 break;
9694 }
9695
9696 /* If we found a place to put the link, place it there unless there
9697 is already a link to the same insn as LINK at that point. */
9698
9699 if (place)
9700 {
9701 rtx link2;
9702
9703 for (link2 = LOG_LINKS (place); link2; link2 = XEXP (link2, 1))
9704 if (XEXP (link2, 0) == XEXP (link, 0))
9705 break;
9706
9707 if (link2 == 0)
9708 {
9709 XEXP (link, 1) = LOG_LINKS (place);
9710 LOG_LINKS (place) = link;
9711 }
9712 }
9713 }
9714 }
9715 \f
9716 void
9717 dump_combine_stats (file)
9718 FILE *file;
9719 {
9720 fprintf
9721 (file,
9722 ";; Combiner statistics: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n\n",
9723 combine_attempts, combine_merges, combine_extras, combine_successes);
9724 }
9725
9726 void
9727 dump_combine_total_stats (file)
9728 FILE *file;
9729 {
9730 fprintf
9731 (file,
9732 "\n;; Combiner totals: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n",
9733 total_attempts, total_merges, total_extras, total_successes);
9734 }
This page took 0.581184 seconds and 6 git commands to generate.