1 /* Common subexpression elimination for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1992 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
24 #include "hard-reg-set.h"
27 #include "insn-config.h"
33 /* The basic idea of common subexpression elimination is to go
34 through the code, keeping a record of expressions that would
35 have the same value at the current scan point, and replacing
36 expressions encountered with the cheapest equivalent expression.
38 It is too complicated to keep track of the different possibilities
39 when control paths merge; so, at each label, we forget all that is
40 known and start fresh. This can be described as processing each
41 basic block separately. Note, however, that these are not quite
42 the same as the basic blocks found by a later pass and used for
43 data flow analysis and register packing. We do not need to start fresh
44 after a conditional jump instruction if there is no label there.
46 We use two data structures to record the equivalent expressions:
47 a hash table for most expressions, and several vectors together
48 with "quantity numbers" to record equivalent (pseudo) registers.
50 The use of the special data structure for registers is desirable
51 because it is faster. It is possible because registers references
52 contain a fairly small number, the register number, taken from
53 a contiguously allocated series, and two register references are
54 identical if they have the same number. General expressions
55 do not have any such thing, so the only way to retrieve the
56 information recorded on an expression other than a register
57 is to keep it in a hash table.
59 Registers and "quantity numbers":
61 At the start of each basic block, all of the (hardware and pseudo)
62 registers used in the function are given distinct quantity
63 numbers to indicate their contents. During scan, when the code
64 copies one register into another, we copy the quantity number.
65 When a register is loaded in any other way, we allocate a new
66 quantity number to describe the value generated by this operation.
67 `reg_qty' records what quantity a register is currently thought
70 All real quantity numbers are greater than or equal to `max_reg'.
71 If register N has not been assigned a quantity, reg_qty[N] will equal N.
73 Quantity numbers below `max_reg' do not exist and none of the `qty_...'
74 variables should be referenced with an index below `max_reg'.
76 We also maintain a bidirectional chain of registers for each
77 quantity number. `qty_first_reg', `qty_last_reg',
78 `reg_next_eqv' and `reg_prev_eqv' hold these chains.
80 The first register in a chain is the one whose lifespan is least local.
81 Among equals, it is the one that was seen first.
82 We replace any equivalent register with that one.
84 If two registers have the same quantity number, it must be true that
85 REG expressions with `qty_mode' must be in the hash table for both
86 registers and must be in the same class.
88 The converse is not true. Since hard registers may be referenced in
89 any mode, two REG expressions might be equivalent in the hash table
90 but not have the same quantity number if the quantity number of one
91 of the registers is not the same mode as those expressions.
93 Constants and quantity numbers
95 When a quantity has a known constant value, that value is stored
96 in the appropriate element of qty_const. This is in addition to
97 putting the constant in the hash table as is usual for non-regs.
99 Whether a reg or a constant is preferred is determined by the configuration
100 macro CONST_COSTS and will often depend on the constant value. In any
101 event, expressions containing constants can be simplified, by fold_rtx.
103 When a quantity has a known nearly constant value (such as an address
104 of a stack slot), that value is stored in the appropriate element
107 Integer constants don't have a machine mode. However, cse
108 determines the intended machine mode from the destination
109 of the instruction that moves the constant. The machine mode
110 is recorded in the hash table along with the actual RTL
111 constant expression so that different modes are kept separate.
115 To record known equivalences among expressions in general
116 we use a hash table called `table'. It has a fixed number of buckets
117 that contain chains of `struct table_elt' elements for expressions.
118 These chains connect the elements whose expressions have the same
121 Other chains through the same elements connect the elements which
122 currently have equivalent values.
124 Register references in an expression are canonicalized before hashing
125 the expression. This is done using `reg_qty' and `qty_first_reg'.
126 The hash code of a register reference is computed using the quantity
127 number, not the register number.
129 When the value of an expression changes, it is necessary to remove from the
130 hash table not just that expression but all expressions whose values
131 could be different as a result.
133 1. If the value changing is in memory, except in special cases
134 ANYTHING referring to memory could be changed. That is because
135 nobody knows where a pointer does not point.
136 The function `invalidate_memory' removes what is necessary.
138 The special cases are when the address is constant or is
139 a constant plus a fixed register such as the frame pointer
140 or a static chain pointer. When such addresses are stored in,
141 we can tell exactly which other such addresses must be invalidated
142 due to overlap. `invalidate' does this.
143 All expressions that refer to non-constant
144 memory addresses are also invalidated. `invalidate_memory' does this.
146 2. If the value changing is a register, all expressions
147 containing references to that register, and only those,
150 Because searching the entire hash table for expressions that contain
151 a register is very slow, we try to figure out when it isn't necessary.
152 Precisely, this is necessary only when expressions have been
153 entered in the hash table using this register, and then the value has
154 changed, and then another expression wants to be added to refer to
155 the register's new value. This sequence of circumstances is rare
156 within any one basic block.
158 The vectors `reg_tick' and `reg_in_table' are used to detect this case.
159 reg_tick[i] is incremented whenever a value is stored in register i.
160 reg_in_table[i] holds -1 if no references to register i have been
161 entered in the table; otherwise, it contains the value reg_tick[i] had
162 when the references were entered. If we want to enter a reference
163 and reg_in_table[i] != reg_tick[i], we must scan and remove old references.
164 Until we want to enter a new entry, the mere fact that the two vectors
165 don't match makes the entries be ignored if anyone tries to match them.
167 Registers themselves are entered in the hash table as well as in
168 the equivalent-register chains. However, the vectors `reg_tick'
169 and `reg_in_table' do not apply to expressions which are simple
170 register references. These expressions are removed from the table
171 immediately when they become invalid, and this can be done even if
172 we do not immediately search for all the expressions that refer to
175 A CLOBBER rtx in an instruction invalidates its operand for further
176 reuse. A CLOBBER or SET rtx whose operand is a MEM:BLK
177 invalidates everything that resides in memory.
181 Constant expressions that differ only by an additive integer
182 are called related. When a constant expression is put in
183 the table, the related expression with no constant term
184 is also entered. These are made to point at each other
185 so that it is possible to find out if there exists any
186 register equivalent to an expression related to a given expression. */
188 /* One plus largest register number used in this function. */
192 /* Length of vectors indexed by quantity number.
193 We know in advance we will not need a quantity number this big. */
197 /* Next quantity number to be allocated.
198 This is 1 + the largest number needed so far. */
202 /* Indexed by quantity number, gives the first (or last) (pseudo) register
203 in the chain of registers that currently contain this quantity. */
205 static int *qty_first_reg
;
206 static int *qty_last_reg
;
208 /* Index by quantity number, gives the mode of the quantity. */
210 static enum machine_mode
*qty_mode
;
212 /* Indexed by quantity number, gives the rtx of the constant value of the
213 quantity, or zero if it does not have a known value.
214 A sum of the frame pointer (or arg pointer) plus a constant
215 can also be entered here. */
217 static rtx
*qty_const
;
219 /* Indexed by qty number, gives the insn that stored the constant value
220 recorded in `qty_const'. */
222 static rtx
*qty_const_insn
;
224 /* The next three variables are used to track when a comparison between a
225 quantity and some constant or register has been passed. In that case, we
226 know the results of the comparison in case we see it again. These variables
227 record a comparison that is known to be true. */
229 /* Indexed by qty number, gives the rtx code of a comparison with a known
230 result involving this quantity. If none, it is UNKNOWN. */
231 static enum rtx_code
*qty_comparison_code
;
233 /* Indexed by qty number, gives the constant being compared against in a
234 comparison of known result. If no such comparison, it is undefined.
235 If the comparison is not with a constant, it is zero. */
237 static rtx
*qty_comparison_const
;
239 /* Indexed by qty number, gives the quantity being compared against in a
240 comparison of known result. If no such comparison, if it undefined.
241 If the comparison is not with a register, it is -1. */
243 static int *qty_comparison_qty
;
246 /* For machines that have a CC0, we do not record its value in the hash
247 table since its use is guaranteed to be the insn immediately following
248 its definition and any other insn is presumed to invalidate it.
250 Instead, we store below the value last assigned to CC0. If it should
251 happen to be a constant, it is stored in preference to the actual
252 assigned value. In case it is a constant, we store the mode in which
253 the constant should be interpreted. */
255 static rtx prev_insn_cc0
;
256 static enum machine_mode prev_insn_cc0_mode
;
259 /* Previous actual insn. 0 if at first insn of basic block. */
261 static rtx prev_insn
;
263 /* Insn being scanned. */
265 static rtx this_insn
;
267 /* Index by (pseudo) register number, gives the quantity number
268 of the register's current contents. */
272 /* Index by (pseudo) register number, gives the number of the next (or
273 previous) (pseudo) register in the chain of registers sharing the same
276 Or -1 if this register is at the end of the chain.
278 If reg_qty[N] == N, reg_next_eqv[N] is undefined. */
280 static int *reg_next_eqv
;
281 static int *reg_prev_eqv
;
283 /* Index by (pseudo) register number, gives the number of times
284 that register has been altered in the current basic block. */
286 static int *reg_tick
;
288 /* Index by (pseudo) register number, gives the reg_tick value at which
289 rtx's containing this register are valid in the hash table.
290 If this does not equal the current reg_tick value, such expressions
291 existing in the hash table are invalid.
292 If this is -1, no expressions containing this register have been
293 entered in the table. */
295 static int *reg_in_table
;
297 /* A HARD_REG_SET containing all the hard registers for which there is
298 currently a REG expression in the hash table. Note the difference
299 from the above variables, which indicate if the REG is mentioned in some
300 expression in the table. */
302 static HARD_REG_SET hard_regs_in_table
;
304 /* A HARD_REG_SET containing all the hard registers that are invalidated
307 static HARD_REG_SET regs_invalidated_by_call
;
309 /* Two vectors of ints:
310 one containing max_reg -1's; the other max_reg + 500 (an approximation
311 for max_qty) elements where element i contains i.
312 These are used to initialize various other vectors fast. */
314 static int *all_minus_one
;
315 static int *consec_ints
;
317 /* CUID of insn that starts the basic block currently being cse-processed. */
319 static int cse_basic_block_start
;
321 /* CUID of insn that ends the basic block currently being cse-processed. */
323 static int cse_basic_block_end
;
325 /* Vector mapping INSN_UIDs to cuids.
326 The cuids are like uids but increase monotonically always.
327 We use them to see whether a reg is used outside a given basic block. */
329 static short *uid_cuid
;
331 /* Get the cuid of an insn. */
333 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
335 /* Nonzero if cse has altered conditional jump insns
336 in such a way that jump optimization should be redone. */
338 static int cse_jumps_altered
;
340 /* canon_hash stores 1 in do_not_record
341 if it notices a reference to CC0, PC, or some other volatile
344 static int do_not_record
;
346 /* canon_hash stores 1 in hash_arg_in_memory
347 if it notices a reference to memory within the expression being hashed. */
349 static int hash_arg_in_memory
;
351 /* canon_hash stores 1 in hash_arg_in_struct
352 if it notices a reference to memory that's part of a structure. */
354 static int hash_arg_in_struct
;
356 /* The hash table contains buckets which are chains of `struct table_elt's,
357 each recording one expression's information.
358 That expression is in the `exp' field.
360 Those elements with the same hash code are chained in both directions
361 through the `next_same_hash' and `prev_same_hash' fields.
363 Each set of expressions with equivalent values
364 are on a two-way chain through the `next_same_value'
365 and `prev_same_value' fields, and all point with
366 the `first_same_value' field at the first element in
367 that chain. The chain is in order of increasing cost.
368 Each element's cost value is in its `cost' field.
370 The `in_memory' field is nonzero for elements that
371 involve any reference to memory. These elements are removed
372 whenever a write is done to an unidentified location in memory.
373 To be safe, we assume that a memory address is unidentified unless
374 the address is either a symbol constant or a constant plus
375 the frame pointer or argument pointer.
377 The `in_struct' field is nonzero for elements that
378 involve any reference to memory inside a structure or array.
380 The `related_value' field is used to connect related expressions
381 (that differ by adding an integer).
382 The related expressions are chained in a circular fashion.
383 `related_value' is zero for expressions for which this
386 The `cost' field stores the cost of this element's expression.
388 The `is_const' flag is set if the element is a constant (including
391 The `flag' field is used as a temporary during some search routines.
393 The `mode' field is usually the same as GET_MODE (`exp'), but
394 if `exp' is a CONST_INT and has no machine mode then the `mode'
395 field is the mode it was being used as. Each constant is
396 recorded separately for each mode it is used with. */
402 struct table_elt
*next_same_hash
;
403 struct table_elt
*prev_same_hash
;
404 struct table_elt
*next_same_value
;
405 struct table_elt
*prev_same_value
;
406 struct table_elt
*first_same_value
;
407 struct table_elt
*related_value
;
409 enum machine_mode mode
;
418 /* We don't want a lot of buckets, because we rarely have very many
419 things stored in the hash table, and a lot of buckets slows
420 down a lot of loops that happen frequently. */
423 /* Compute hash code of X in mode M. Special-case case where X is a pseudo
424 register (hard registers may require `do_not_record' to be set). */
427 (GET_CODE (X) == REG && REGNO (X) >= FIRST_PSEUDO_REGISTER \
428 ? ((((int) REG << 7) + reg_qty[REGNO (X)]) % NBUCKETS) \
429 : canon_hash (X, M) % NBUCKETS)
431 /* Determine whether register number N is considered a fixed register for CSE.
432 It is desirable to replace other regs with fixed regs, to reduce need for
434 A reg wins if it is either the frame pointer or designated as fixed,
435 but not if it is an overlapping register. */
436 #ifdef OVERLAPPING_REGNO_P
437 #define FIXED_REGNO_P(N) \
438 (((N) == FRAME_POINTER_REGNUM || fixed_regs[N]) \
439 && ! OVERLAPPING_REGNO_P ((N)))
441 #define FIXED_REGNO_P(N) \
442 ((N) == FRAME_POINTER_REGNUM || fixed_regs[N])
445 /* Compute cost of X, as stored in the `cost' field of a table_elt. Fixed
446 hard registers are the cheapest with a cost of 0. Next come pseudos
447 with a cost of one and other hard registers with a cost of 2. Aside
448 from these special cases, call `rtx_cost'. */
451 (GET_CODE (X) == REG \
452 ? (REGNO (X) >= FIRST_PSEUDO_REGISTER ? 1 \
453 : (FIXED_REGNO_P (REGNO (X)) \
454 && REGNO_REG_CLASS (REGNO (X)) != NO_REGS) ? 0 \
456 : rtx_cost (X) * 2) \
458 /* Determine if the quantity number for register X represents a valid index
459 into the `qty_...' variables. */
461 #define REGNO_QTY_VALID_P(N) (reg_qty[N] != (N))
463 static struct table_elt
*table
[NBUCKETS
];
465 /* Chain of `struct table_elt's made so far for this function
466 but currently removed from the table. */
468 static struct table_elt
*free_element_chain
;
470 /* Number of `struct table_elt' structures made so far for this function. */
472 static int n_elements_made
;
474 /* Maximum value `n_elements_made' has had so far in this compilation
475 for functions previously processed. */
477 static int max_elements_made
;
479 /* Surviving equivalence class when two equivalence classes are merged
480 by recording the effects of a jump in the last insn. Zero if the
481 last insn was not a conditional jump. */
483 static struct table_elt
*last_jump_equiv_class
;
485 /* Set to the cost of a constant pool reference if one was found for a
486 symbolic constant. If this was found, it means we should try to
487 convert constants into constant pool entries if they don't fit in
490 static int constant_pool_entries_cost
;
492 /* Bits describing what kind of values in memory must be invalidated
493 for a particular instruction. If all three bits are zero,
494 no memory refs need to be invalidated. Each bit is more powerful
495 than the preceding ones, and if a bit is set then the preceding
498 Here is how the bits are set:
499 Pushing onto the stack invalidates only the stack pointer,
500 writing at a fixed address invalidates only variable addresses,
501 writing in a structure element at variable address
502 invalidates all but scalar variables,
503 and writing in anything else at variable address invalidates everything. */
507 int sp
: 1; /* Invalidate stack pointer. */
508 int var
: 1; /* Invalidate variable addresses. */
509 int nonscalar
: 1; /* Invalidate all but scalar variables. */
510 int all
: 1; /* Invalidate all memory refs. */
513 /* Nonzero if X has the form (PLUS frame-pointer integer). We check for
514 virtual regs here because the simplify_*_operation routines are called
515 by integrate.c, which is called before virtual register instantiation. */
517 #define FIXED_BASE_PLUS_P(X) \
518 ((X) == frame_pointer_rtx || (X) == arg_pointer_rtx \
519 || (X) == virtual_stack_vars_rtx \
520 || (X) == virtual_incoming_args_rtx \
521 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
522 && (XEXP (X, 0) == frame_pointer_rtx \
523 || XEXP (X, 0) == arg_pointer_rtx \
524 || XEXP (X, 0) == virtual_stack_vars_rtx \
525 || XEXP (X, 0) == virtual_incoming_args_rtx)))
527 /* Similar, but also allows reference to the stack pointer. */
529 #define NONZERO_BASE_PLUS_P(X) \
530 (FIXED_BASE_PLUS_P (X) \
531 || (X) == stack_pointer_rtx \
532 || (X) == virtual_stack_dynamic_rtx \
533 || (X) == virtual_outgoing_args_rtx \
534 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
535 && (XEXP (X, 0) == stack_pointer_rtx \
536 || XEXP (X, 0) == virtual_stack_dynamic_rtx \
537 || XEXP (X, 0) == virtual_outgoing_args_rtx)))
539 static struct table_elt
*lookup ();
540 static void free_element ();
542 static int insert_regs ();
543 static void rehash_using_reg ();
544 static void remove_invalid_refs ();
545 static int exp_equiv_p ();
547 int refers_to_mem_p ();
548 static void invalidate_from_clobbers ();
549 static int safe_hash ();
550 static int canon_hash ();
551 static rtx
fold_rtx ();
552 static rtx
equiv_constant ();
553 static void record_jump_cond ();
554 static void note_mem_written ();
555 static int cse_rtx_addr_varies_p ();
556 static enum rtx_code
find_comparison_args ();
557 static void cse_insn ();
558 static void cse_set_around_loop ();
560 /* Return an estimate of the cost of computing rtx X.
561 One use is in cse, to decide which expression to keep in the hash table.
562 Another is in rtl generation, to pick the cheapest way to multiply.
563 Other uses like the latter are expected in the future. */
565 /* Return the right cost to give to an operation
566 to make the cost of the corresponding register-to-register instruction
567 N times that of a fast register-to-register instruction. */
569 #define COSTS_N_INSNS(N) ((N) * 4 - 2)
576 register enum rtx_code code
;
583 /* Compute the default costs of certain things.
584 Note that RTX_COSTS can override the defaults. */
590 /* Count multiplication by 2**n as a shift,
591 because if we are considering it, we would output it as a shift. */
592 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
593 && exact_log2 (INTVAL (XEXP (x
, 1))) >= 0)
596 total
= COSTS_N_INSNS (5);
602 total
= COSTS_N_INSNS (7);
605 /* Used in loop.c and combine.c as a marker. */
609 /* We don't want these to be used in substitutions because
610 we have no way of validating the resulting insn. So assign
611 anything containing an ASM_OPERANDS a very high cost. */
627 CONST_COSTS (x
, code
);
630 /* Sum the costs of the sub-rtx's, plus cost of this operation,
631 which is already in total. */
633 fmt
= GET_RTX_FORMAT (code
);
634 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
636 total
+= rtx_cost (XEXP (x
, i
));
637 else if (fmt
[i
] == 'E')
638 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
639 total
+= rtx_cost (XVECEXP (x
, i
, j
));
644 /* Clear the hash table and initialize each register with its own quantity,
645 for a new basic block. */
654 bzero (reg_tick
, max_reg
* sizeof (int));
656 bcopy (all_minus_one
, reg_in_table
, max_reg
* sizeof (int));
657 bcopy (consec_ints
, reg_qty
, max_reg
* sizeof (int));
658 CLEAR_HARD_REG_SET (hard_regs_in_table
);
660 /* The per-quantity values used to be initialized here, but it is
661 much faster to initialize each as it is made in `make_new_qty'. */
663 for (i
= 0; i
< NBUCKETS
; i
++)
665 register struct table_elt
*this, *next
;
666 for (this = table
[i
]; this; this = next
)
668 next
= this->next_same_hash
;
673 bzero (table
, sizeof table
);
682 /* Say that register REG contains a quantity not in any register before
683 and initialize that quantity. */
691 if (next_qty
>= max_qty
)
694 q
= reg_qty
[reg
] = next_qty
++;
695 qty_first_reg
[q
] = reg
;
696 qty_last_reg
[q
] = reg
;
697 qty_const
[q
] = qty_const_insn
[q
] = 0;
698 qty_comparison_code
[q
] = UNKNOWN
;
700 reg_next_eqv
[reg
] = reg_prev_eqv
[reg
] = -1;
703 /* Make reg NEW equivalent to reg OLD.
704 OLD is not changing; NEW is. */
707 make_regs_eqv (new, old
)
708 register int new, old
;
710 register int lastr
, firstr
;
711 register int q
= reg_qty
[old
];
713 /* Nothing should become eqv until it has a "non-invalid" qty number. */
714 if (! REGNO_QTY_VALID_P (old
))
718 firstr
= qty_first_reg
[q
];
719 lastr
= qty_last_reg
[q
];
721 /* Prefer fixed hard registers to anything. Prefer pseudo regs to other
722 hard regs. Among pseudos, if NEW will live longer than any other reg
723 of the same qty, and that is beyond the current basic block,
724 make it the new canonical replacement for this qty. */
725 if (! (firstr
< FIRST_PSEUDO_REGISTER
&& FIXED_REGNO_P (firstr
))
726 /* Certain fixed registers might be of the class NO_REGS. This means
727 that not only can they not be allocated by the compiler, but
728 they cannot be used in substitutions or cannonicallizations
730 && (new >= FIRST_PSEUDO_REGISTER
|| REGNO_REG_CLASS (new) != NO_REGS
)
731 && ((new < FIRST_PSEUDO_REGISTER
&& FIXED_REGNO_P (new))
732 || (new >= FIRST_PSEUDO_REGISTER
733 && (firstr
< FIRST_PSEUDO_REGISTER
734 || ((uid_cuid
[regno_last_uid
[new]] > cse_basic_block_end
735 || (uid_cuid
[regno_first_uid
[new]]
736 < cse_basic_block_start
))
737 && (uid_cuid
[regno_last_uid
[new]]
738 > uid_cuid
[regno_last_uid
[firstr
]]))))))
740 reg_prev_eqv
[firstr
] = new;
741 reg_next_eqv
[new] = firstr
;
742 reg_prev_eqv
[new] = -1;
743 qty_first_reg
[q
] = new;
747 /* If NEW is a hard reg (known to be non-fixed), insert at end.
748 Otherwise, insert before any non-fixed hard regs that are at the
749 end. Registers of class NO_REGS cannot be used as an
750 equivalent for anything. */
751 while (lastr
< FIRST_PSEUDO_REGISTER
&& reg_prev_eqv
[lastr
] >= 0
752 && (REGNO_REG_CLASS (lastr
) == NO_REGS
|| ! FIXED_REGNO_P (lastr
))
753 && new >= FIRST_PSEUDO_REGISTER
)
754 lastr
= reg_prev_eqv
[lastr
];
755 reg_next_eqv
[new] = reg_next_eqv
[lastr
];
756 if (reg_next_eqv
[lastr
] >= 0)
757 reg_prev_eqv
[reg_next_eqv
[lastr
]] = new;
759 qty_last_reg
[q
] = new;
760 reg_next_eqv
[lastr
] = new;
761 reg_prev_eqv
[new] = lastr
;
765 /* Remove REG from its equivalence class. */
768 delete_reg_equiv (reg
)
771 register int n
= reg_next_eqv
[reg
];
772 register int p
= reg_prev_eqv
[reg
];
773 register int q
= reg_qty
[reg
];
775 /* If invalid, do nothing. N and P above are undefined in that case. */
786 qty_first_reg
[q
] = n
;
791 /* Remove any invalid expressions from the hash table
792 that refer to any of the registers contained in expression X.
794 Make sure that newly inserted references to those registers
795 as subexpressions will be considered valid.
797 mention_regs is not called when a register itself
798 is being stored in the table.
800 Return 1 if we have done something that may have changed the hash code
807 register enum rtx_code code
;
810 register int changed
= 0;
818 register int regno
= REGNO (x
);
819 register int endregno
820 = regno
+ (regno
>= FIRST_PSEUDO_REGISTER
? 1
821 : HARD_REGNO_NREGS (regno
, GET_MODE (x
)));
824 for (i
= regno
; i
< endregno
; i
++)
826 if (reg_in_table
[i
] >= 0 && reg_in_table
[i
] != reg_tick
[i
])
827 remove_invalid_refs (i
);
829 reg_in_table
[i
] = reg_tick
[i
];
835 /* If X is a comparison or a COMPARE and either operand is a register
836 that does not have a quantity, give it one. This is so that a later
837 call to record_jump_equiv won't cause X to be assigned a different
838 hash code and not found in the table after that call.
840 It is not necessary to do this here, since rehash_using_reg can
841 fix up the table later, but doing this here eliminates the need to
842 call that expensive function in the most common case where the only
843 use of the register is in the comparison. */
845 if (code
== COMPARE
|| GET_RTX_CLASS (code
) == '<')
847 if (GET_CODE (XEXP (x
, 0)) == REG
848 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x
, 0))))
849 if (insert_regs (XEXP (x
, 0), 0, 0))
851 rehash_using_reg (XEXP (x
, 0));
855 if (GET_CODE (XEXP (x
, 1)) == REG
856 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x
, 1))))
857 if (insert_regs (XEXP (x
, 1), 0, 0))
859 rehash_using_reg (XEXP (x
, 1));
864 fmt
= GET_RTX_FORMAT (code
);
865 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
867 changed
|= mention_regs (XEXP (x
, i
));
868 else if (fmt
[i
] == 'E')
869 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
870 changed
|= mention_regs (XVECEXP (x
, i
, j
));
875 /* Update the register quantities for inserting X into the hash table
876 with a value equivalent to CLASSP.
877 (If the class does not contain a REG, it is irrelevant.)
878 If MODIFIED is nonzero, X is a destination; it is being modified.
879 Note that delete_reg_equiv should be called on a register
880 before insert_regs is done on that register with MODIFIED != 0.
882 Nonzero value means that elements of reg_qty have changed
883 so X's hash code may be different. */
886 insert_regs (x
, classp
, modified
)
888 struct table_elt
*classp
;
891 if (GET_CODE (x
) == REG
)
893 register int regno
= REGNO (x
);
896 || ! (REGNO_QTY_VALID_P (regno
)
897 && qty_mode
[reg_qty
[regno
]] == GET_MODE (x
)))
900 for (classp
= classp
->first_same_value
;
902 classp
= classp
->next_same_value
)
903 if (GET_CODE (classp
->exp
) == REG
904 && GET_MODE (classp
->exp
) == GET_MODE (x
))
906 make_regs_eqv (regno
, REGNO (classp
->exp
));
910 make_new_qty (regno
);
911 qty_mode
[reg_qty
[regno
]] = GET_MODE (x
);
916 return mention_regs (x
);
919 /* Look in or update the hash table. */
921 /* Put the element ELT on the list of free elements. */
925 struct table_elt
*elt
;
927 elt
->next_same_hash
= free_element_chain
;
928 free_element_chain
= elt
;
931 /* Return an element that is free for use. */
933 static struct table_elt
*
936 struct table_elt
*elt
= free_element_chain
;
939 free_element_chain
= elt
->next_same_hash
;
943 return (struct table_elt
*) oballoc (sizeof (struct table_elt
));
946 /* Remove table element ELT from use in the table.
947 HASH is its hash code, made using the HASH macro.
948 It's an argument because often that is known in advance
949 and we save much time not recomputing it. */
952 remove_from_table (elt
, hash
)
953 register struct table_elt
*elt
;
959 /* Mark this element as removed. See cse_insn. */
960 elt
->first_same_value
= 0;
962 /* Remove the table element from its equivalence class. */
965 register struct table_elt
*prev
= elt
->prev_same_value
;
966 register struct table_elt
*next
= elt
->next_same_value
;
968 if (next
) next
->prev_same_value
= prev
;
971 prev
->next_same_value
= next
;
974 register struct table_elt
*newfirst
= next
;
977 next
->first_same_value
= newfirst
;
978 next
= next
->next_same_value
;
983 /* Remove the table element from its hash bucket. */
986 register struct table_elt
*prev
= elt
->prev_same_hash
;
987 register struct table_elt
*next
= elt
->next_same_hash
;
989 if (next
) next
->prev_same_hash
= prev
;
992 prev
->next_same_hash
= next
;
993 else if (table
[hash
] == elt
)
997 /* This entry is not in the proper hash bucket. This can happen
998 when two classes were merged by `merge_equiv_classes'. Search
999 for the hash bucket that it heads. This happens only very
1000 rarely, so the cost is acceptable. */
1001 for (hash
= 0; hash
< NBUCKETS
; hash
++)
1002 if (table
[hash
] == elt
)
1007 /* Remove the table element from its related-value circular chain. */
1009 if (elt
->related_value
!= 0 && elt
->related_value
!= elt
)
1011 register struct table_elt
*p
= elt
->related_value
;
1012 while (p
->related_value
!= elt
)
1013 p
= p
->related_value
;
1014 p
->related_value
= elt
->related_value
;
1015 if (p
->related_value
== p
)
1016 p
->related_value
= 0;
1022 /* Look up X in the hash table and return its table element,
1023 or 0 if X is not in the table.
1025 MODE is the machine-mode of X, or if X is an integer constant
1026 with VOIDmode then MODE is the mode with which X will be used.
1028 Here we are satisfied to find an expression whose tree structure
1031 static struct table_elt
*
1032 lookup (x
, hash
, mode
)
1035 enum machine_mode mode
;
1037 register struct table_elt
*p
;
1039 for (p
= table
[hash
]; p
; p
= p
->next_same_hash
)
1040 if (mode
== p
->mode
&& ((x
== p
->exp
&& GET_CODE (x
) == REG
)
1041 || exp_equiv_p (x
, p
->exp
, GET_CODE (x
) != REG
, 0)))
1047 /* Like `lookup' but don't care whether the table element uses invalid regs.
1048 Also ignore discrepancies in the machine mode of a register. */
1050 static struct table_elt
*
1051 lookup_for_remove (x
, hash
, mode
)
1054 enum machine_mode mode
;
1056 register struct table_elt
*p
;
1058 if (GET_CODE (x
) == REG
)
1060 int regno
= REGNO (x
);
1061 /* Don't check the machine mode when comparing registers;
1062 invalidating (REG:SI 0) also invalidates (REG:DF 0). */
1063 for (p
= table
[hash
]; p
; p
= p
->next_same_hash
)
1064 if (GET_CODE (p
->exp
) == REG
1065 && REGNO (p
->exp
) == regno
)
1070 for (p
= table
[hash
]; p
; p
= p
->next_same_hash
)
1071 if (mode
== p
->mode
&& (x
== p
->exp
|| exp_equiv_p (x
, p
->exp
, 0, 0)))
1078 /* Look for an expression equivalent to X and with code CODE.
1079 If one is found, return that expression. */
1082 lookup_as_function (x
, code
)
1086 register struct table_elt
*p
= lookup (x
, safe_hash (x
, VOIDmode
) % NBUCKETS
,
1091 for (p
= p
->first_same_value
; p
; p
= p
->next_same_value
)
1093 if (GET_CODE (p
->exp
) == code
1094 /* Make sure this is a valid entry in the table. */
1095 && exp_equiv_p (p
->exp
, p
->exp
, 1, 0))
1102 /* Insert X in the hash table, assuming HASH is its hash code
1103 and CLASSP is an element of the class it should go in
1104 (or 0 if a new class should be made).
1105 It is inserted at the proper position to keep the class in
1106 the order cheapest first.
1108 MODE is the machine-mode of X, or if X is an integer constant
1109 with VOIDmode then MODE is the mode with which X will be used.
1111 For elements of equal cheapness, the most recent one
1112 goes in front, except that the first element in the list
1113 remains first unless a cheaper element is added. The order of
1114 pseudo-registers does not matter, as canon_reg will be called to
1115 find the cheapest when a register is retreived from the table.
1117 The in_memory field in the hash table element is set to 0.
1118 The caller must set it nonzero if appropriate.
1120 You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1121 and if insert_regs returns a nonzero value
1122 you must then recompute its hash code before calling here.
1124 If necessary, update table showing constant values of quantities. */
1126 #define CHEAPER(X,Y) ((X)->cost < (Y)->cost)
1128 static struct table_elt
*
1129 insert (x
, classp
, hash
, mode
)
1131 register struct table_elt
*classp
;
1133 enum machine_mode mode
;
1135 register struct table_elt
*elt
;
1137 /* If X is a register and we haven't made a quantity for it,
1138 something is wrong. */
1139 if (GET_CODE (x
) == REG
&& ! REGNO_QTY_VALID_P (REGNO (x
)))
1142 /* If X is a hard register, show it is being put in the table. */
1143 if (GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
1145 int regno
= REGNO (x
);
1146 int endregno
= regno
+ HARD_REGNO_NREGS (regno
, GET_MODE (x
));
1149 for (i
= regno
; i
< endregno
; i
++)
1150 SET_HARD_REG_BIT (hard_regs_in_table
, i
);
1154 /* Put an element for X into the right hash bucket. */
1156 elt
= get_element ();
1158 elt
->cost
= COST (x
);
1159 elt
->next_same_value
= 0;
1160 elt
->prev_same_value
= 0;
1161 elt
->next_same_hash
= table
[hash
];
1162 elt
->prev_same_hash
= 0;
1163 elt
->related_value
= 0;
1166 elt
->is_const
= (CONSTANT_P (x
)
1167 /* GNU C++ takes advantage of this for `this'
1168 (and other const values). */
1169 || (RTX_UNCHANGING_P (x
)
1170 && GET_CODE (x
) == REG
1171 && REGNO (x
) >= FIRST_PSEUDO_REGISTER
)
1172 || FIXED_BASE_PLUS_P (x
));
1175 table
[hash
]->prev_same_hash
= elt
;
1178 /* Put it into the proper value-class. */
1181 classp
= classp
->first_same_value
;
1182 if (CHEAPER (elt
, classp
))
1183 /* Insert at the head of the class */
1185 register struct table_elt
*p
;
1186 elt
->next_same_value
= classp
;
1187 classp
->prev_same_value
= elt
;
1188 elt
->first_same_value
= elt
;
1190 for (p
= classp
; p
; p
= p
->next_same_value
)
1191 p
->first_same_value
= elt
;
1195 /* Insert not at head of the class. */
1196 /* Put it after the last element cheaper than X. */
1197 register struct table_elt
*p
, *next
;
1198 for (p
= classp
; (next
= p
->next_same_value
) && CHEAPER (next
, elt
);
1200 /* Put it after P and before NEXT. */
1201 elt
->next_same_value
= next
;
1203 next
->prev_same_value
= elt
;
1204 elt
->prev_same_value
= p
;
1205 p
->next_same_value
= elt
;
1206 elt
->first_same_value
= classp
;
1210 elt
->first_same_value
= elt
;
1212 /* If this is a constant being set equivalent to a register or a register
1213 being set equivalent to a constant, note the constant equivalence.
1215 If this is a constant, it cannot be equivalent to a different constant,
1216 and a constant is the only thing that can be cheaper than a register. So
1217 we know the register is the head of the class (before the constant was
1220 If this is a register that is not already known equivalent to a
1221 constant, we must check the entire class.
1223 If this is a register that is already known equivalent to an insn,
1224 update `qty_const_insn' to show that `this_insn' is the latest
1225 insn making that quantity equivalent to the constant. */
1227 if (elt
->is_const
&& classp
&& GET_CODE (classp
->exp
) == REG
)
1229 qty_const
[reg_qty
[REGNO (classp
->exp
)]]
1230 = gen_lowpart_if_possible (qty_mode
[reg_qty
[REGNO (classp
->exp
)]], x
);
1231 qty_const_insn
[reg_qty
[REGNO (classp
->exp
)]] = this_insn
;
1234 else if (GET_CODE (x
) == REG
&& classp
&& ! qty_const
[reg_qty
[REGNO (x
)]])
1236 register struct table_elt
*p
;
1238 for (p
= classp
; p
!= 0; p
= p
->next_same_value
)
1242 qty_const
[reg_qty
[REGNO (x
)]]
1243 = gen_lowpart_if_possible (GET_MODE (x
), p
->exp
);
1244 qty_const_insn
[reg_qty
[REGNO (x
)]] = this_insn
;
1250 else if (GET_CODE (x
) == REG
&& qty_const
[reg_qty
[REGNO (x
)]]
1251 && GET_MODE (x
) == qty_mode
[reg_qty
[REGNO (x
)]])
1252 qty_const_insn
[reg_qty
[REGNO (x
)]] = this_insn
;
1254 /* If this is a constant with symbolic value,
1255 and it has a term with an explicit integer value,
1256 link it up with related expressions. */
1257 if (GET_CODE (x
) == CONST
)
1259 rtx subexp
= get_related_value (x
);
1261 struct table_elt
*subelt
, *subelt_prev
;
1265 /* Get the integer-free subexpression in the hash table. */
1266 subhash
= safe_hash (subexp
, mode
) % NBUCKETS
;
1267 subelt
= lookup (subexp
, subhash
, mode
);
1269 subelt
= insert (subexp
, 0, subhash
, mode
);
1270 /* Initialize SUBELT's circular chain if it has none. */
1271 if (subelt
->related_value
== 0)
1272 subelt
->related_value
= subelt
;
1273 /* Find the element in the circular chain that precedes SUBELT. */
1274 subelt_prev
= subelt
;
1275 while (subelt_prev
->related_value
!= subelt
)
1276 subelt_prev
= subelt_prev
->related_value
;
1277 /* Put new ELT into SUBELT's circular chain just before SUBELT.
1278 This way the element that follows SUBELT is the oldest one. */
1279 elt
->related_value
= subelt_prev
->related_value
;
1280 subelt_prev
->related_value
= elt
;
1287 /* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1288 CLASS2 into CLASS1. This is done when we have reached an insn which makes
1289 the two classes equivalent.
1291 CLASS1 will be the surviving class; CLASS2 should not be used after this
1294 Any invalid entries in CLASS2 will not be copied. */
1297 merge_equiv_classes (class1
, class2
)
1298 struct table_elt
*class1
, *class2
;
1300 struct table_elt
*elt
, *next
, *new;
1302 /* Ensure we start with the head of the classes. */
1303 class1
= class1
->first_same_value
;
1304 class2
= class2
->first_same_value
;
1306 /* If they were already equal, forget it. */
1307 if (class1
== class2
)
1310 for (elt
= class2
; elt
; elt
= next
)
1314 enum machine_mode mode
= elt
->mode
;
1316 next
= elt
->next_same_value
;
1318 /* Remove old entry, make a new one in CLASS1's class.
1319 Don't do this for invalid entries as we cannot find their
1320 hash code (it also isn't necessary). */
1321 if (GET_CODE (exp
) == REG
|| exp_equiv_p (exp
, exp
, 1, 0))
1323 hash_arg_in_memory
= 0;
1324 hash_arg_in_struct
= 0;
1325 hash
= HASH (exp
, mode
);
1327 if (GET_CODE (exp
) == REG
)
1328 delete_reg_equiv (REGNO (exp
));
1330 remove_from_table (elt
, hash
);
1332 if (insert_regs (exp
, class1
, 0))
1333 hash
= HASH (exp
, mode
);
1334 new = insert (exp
, class1
, hash
, mode
);
1335 new->in_memory
= hash_arg_in_memory
;
1336 new->in_struct
= hash_arg_in_struct
;
1341 /* Remove from the hash table, or mark as invalid,
1342 all expressions whose values could be altered by storing in X.
1343 X is a register, a subreg, or a memory reference with nonvarying address
1344 (because, when a memory reference with a varying address is stored in,
1345 all memory references are removed by invalidate_memory
1346 so specific invalidation is superfluous).
1348 A nonvarying address may be just a register or just
1349 a symbol reference, or it may be either of those plus
1350 a numeric offset. */
1357 register struct table_elt
*p
;
1359 register int start
, end
;
1361 /* If X is a register, dependencies on its contents
1362 are recorded through the qty number mechanism.
1363 Just change the qty number of the register,
1364 mark it as invalid for expressions that refer to it,
1365 and remove it itself. */
1367 if (GET_CODE (x
) == REG
)
1369 register int regno
= REGNO (x
);
1370 register int hash
= HASH (x
, GET_MODE (x
));
1372 /* Remove REGNO from any quantity list it might be on and indicate
1373 that it's value might have changed. If it is a pseudo, remove its
1374 entry from the hash table.
1376 For a hard register, we do the first two actions above for any
1377 additional hard registers corresponding to X. Then, if any of these
1378 registers are in the table, we must remove any REG entries that
1379 overlap these registers. */
1381 delete_reg_equiv (regno
);
1384 if (regno
>= FIRST_PSEUDO_REGISTER
)
1385 remove_from_table (lookup_for_remove (x
, hash
, GET_MODE (x
)), hash
);
1388 int in_table
= TEST_HARD_REG_BIT (hard_regs_in_table
, regno
);
1389 int endregno
= regno
+ HARD_REGNO_NREGS (regno
, GET_MODE (x
));
1390 int tregno
, tendregno
;
1391 register struct table_elt
*p
, *next
;
1393 CLEAR_HARD_REG_BIT (hard_regs_in_table
, regno
);
1395 for (i
= regno
+ 1; i
< endregno
; i
++)
1397 in_table
|= TEST_HARD_REG_BIT (hard_regs_in_table
, i
);
1398 CLEAR_HARD_REG_BIT (hard_regs_in_table
, i
);
1399 delete_reg_equiv (i
);
1404 for (hash
= 0; hash
< NBUCKETS
; hash
++)
1405 for (p
= table
[hash
]; p
; p
= next
)
1407 next
= p
->next_same_hash
;
1409 if (GET_CODE (p
->exp
) != REG
1410 || REGNO (p
->exp
) >= FIRST_PSEUDO_REGISTER
)
1413 tregno
= REGNO (p
->exp
);
1415 = tregno
+ HARD_REGNO_NREGS (tregno
, GET_MODE (p
->exp
));
1416 if (tendregno
> regno
&& tregno
< endregno
)
1417 remove_from_table (p
, hash
);
1424 if (GET_CODE (x
) == SUBREG
)
1426 if (GET_CODE (SUBREG_REG (x
)) != REG
)
1428 invalidate (SUBREG_REG (x
));
1432 /* X is not a register; it must be a memory reference with
1433 a nonvarying address. Remove all hash table elements
1434 that refer to overlapping pieces of memory. */
1436 if (GET_CODE (x
) != MEM
)
1441 /* Registers with nonvarying addresses usually have constant equivalents;
1442 but the frame pointer register is also possible. */
1443 if (GET_CODE (base
) == REG
1444 && REGNO_QTY_VALID_P (REGNO (base
))
1445 && qty_mode
[reg_qty
[REGNO (base
)]] == GET_MODE (base
)
1446 && qty_const
[reg_qty
[REGNO (base
)]] != 0)
1447 base
= qty_const
[reg_qty
[REGNO (base
)]];
1448 else if (GET_CODE (base
) == PLUS
1449 && GET_CODE (XEXP (base
, 1)) == CONST_INT
1450 && GET_CODE (XEXP (base
, 0)) == REG
1451 && REGNO_QTY_VALID_P (REGNO (XEXP (base
, 0)))
1452 && (qty_mode
[reg_qty
[REGNO (XEXP (base
, 0))]]
1453 == GET_MODE (XEXP (base
, 0)))
1454 && qty_const
[reg_qty
[REGNO (XEXP (base
, 0))]])
1456 start
= INTVAL (XEXP (base
, 1));
1457 base
= qty_const
[reg_qty
[REGNO (XEXP (base
, 0))]];
1460 if (GET_CODE (base
) == CONST
)
1461 base
= XEXP (base
, 0);
1462 if (GET_CODE (base
) == PLUS
1463 && GET_CODE (XEXP (base
, 1)) == CONST_INT
)
1465 start
+= INTVAL (XEXP (base
, 1));
1466 base
= XEXP (base
, 0);
1469 end
= start
+ GET_MODE_SIZE (GET_MODE (x
));
1470 for (i
= 0; i
< NBUCKETS
; i
++)
1472 register struct table_elt
*next
;
1473 for (p
= table
[i
]; p
; p
= next
)
1475 next
= p
->next_same_hash
;
1476 if (refers_to_mem_p (p
->exp
, base
, start
, end
))
1477 remove_from_table (p
, i
);
1482 /* Remove all expressions that refer to register REGNO,
1483 since they are already invalid, and we are about to
1484 mark that register valid again and don't want the old
1485 expressions to reappear as valid. */
1488 remove_invalid_refs (regno
)
1492 register struct table_elt
*p
, *next
;
1494 for (i
= 0; i
< NBUCKETS
; i
++)
1495 for (p
= table
[i
]; p
; p
= next
)
1497 next
= p
->next_same_hash
;
1498 if (GET_CODE (p
->exp
) != REG
1499 && refers_to_regno_p (regno
, regno
+ 1, p
->exp
, 0))
1500 remove_from_table (p
, i
);
1504 /* Recompute the hash codes of any valid entries in the hash table that
1505 reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
1507 This is called when we make a jump equivalence. */
1510 rehash_using_reg (x
)
1514 struct table_elt
*p
, *next
;
1517 if (GET_CODE (x
) == SUBREG
)
1520 /* If X is not a register or if the register is known not to be in any
1521 valid entries in the table, we have no work to do. */
1523 if (GET_CODE (x
) != REG
1524 || reg_in_table
[REGNO (x
)] < 0
1525 || reg_in_table
[REGNO (x
)] != reg_tick
[REGNO (x
)])
1528 /* Scan all hash chains looking for valid entries that mention X.
1529 If we find one and it is in the wrong hash chain, move it. We can skip
1530 objects that are registers, since they are handled specially. */
1532 for (i
= 0; i
< NBUCKETS
; i
++)
1533 for (p
= table
[i
]; p
; p
= next
)
1535 next
= p
->next_same_hash
;
1536 if (GET_CODE (p
->exp
) != REG
&& reg_mentioned_p (x
, p
->exp
)
1537 && exp_equiv_p (p
->exp
, p
->exp
, 1, 0)
1538 && i
!= (hash
= safe_hash (p
->exp
, p
->mode
) % NBUCKETS
))
1540 if (p
->next_same_hash
)
1541 p
->next_same_hash
->prev_same_hash
= p
->prev_same_hash
;
1543 if (p
->prev_same_hash
)
1544 p
->prev_same_hash
->next_same_hash
= p
->next_same_hash
;
1546 table
[i
] = p
->next_same_hash
;
1548 p
->next_same_hash
= table
[hash
];
1549 p
->prev_same_hash
= 0;
1551 table
[hash
]->prev_same_hash
= p
;
1557 /* Remove from the hash table all expressions that reference memory,
1558 or some of them as specified by *WRITES. */
1561 invalidate_memory (writes
)
1562 struct write_data
*writes
;
1565 register struct table_elt
*p
, *next
;
1566 int all
= writes
->all
;
1567 int nonscalar
= writes
->nonscalar
;
1569 for (i
= 0; i
< NBUCKETS
; i
++)
1570 for (p
= table
[i
]; p
; p
= next
)
1572 next
= p
->next_same_hash
;
1575 || (nonscalar
&& p
->in_struct
)
1576 || cse_rtx_addr_varies_p (p
->exp
)))
1577 remove_from_table (p
, i
);
1581 /* Remove from the hash table any expression that is a call-clobbered
1582 register. Also update their TICK values. */
1585 invalidate_for_call ()
1587 int regno
, endregno
;
1590 struct table_elt
*p
, *next
;
1593 /* Go through all the hard registers. For each that is clobbered in
1594 a CALL_INSN, remove the register from quantity chains and update
1595 reg_tick if defined. Also see if any of these registers is currently
1598 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1599 if (TEST_HARD_REG_BIT (regs_invalidated_by_call
, regno
))
1601 delete_reg_equiv (regno
);
1602 if (reg_tick
[regno
] >= 0)
1605 in_table
|= TEST_HARD_REG_BIT (hard_regs_in_table
, regno
);
1608 /* In the case where we have no call-clobbered hard registers in the
1609 table, we are done. Otherwise, scan the table and remove any
1610 entry that overlaps a call-clobbered register. */
1613 for (hash
= 0; hash
< NBUCKETS
; hash
++)
1614 for (p
= table
[hash
]; p
; p
= next
)
1616 next
= p
->next_same_hash
;
1618 if (GET_CODE (p
->exp
) != REG
1619 || REGNO (p
->exp
) >= FIRST_PSEUDO_REGISTER
)
1622 regno
= REGNO (p
->exp
);
1623 endregno
= regno
+ HARD_REGNO_NREGS (regno
, GET_MODE (p
->exp
));
1625 for (i
= regno
; i
< endregno
; i
++)
1626 if (TEST_HARD_REG_BIT (regs_invalidated_by_call
, i
))
1628 remove_from_table (p
, hash
);
1634 /* Given an expression X of type CONST,
1635 and ELT which is its table entry (or 0 if it
1636 is not in the hash table),
1637 return an alternate expression for X as a register plus integer.
1638 If none can be found, return 0. */
1641 use_related_value (x
, elt
)
1643 struct table_elt
*elt
;
1645 register struct table_elt
*relt
= 0;
1646 register struct table_elt
*p
, *q
;
1649 /* First, is there anything related known?
1650 If we have a table element, we can tell from that.
1651 Otherwise, must look it up. */
1653 if (elt
!= 0 && elt
->related_value
!= 0)
1655 else if (elt
== 0 && GET_CODE (x
) == CONST
)
1657 rtx subexp
= get_related_value (x
);
1659 relt
= lookup (subexp
,
1660 safe_hash (subexp
, GET_MODE (subexp
)) % NBUCKETS
,
1667 /* Search all related table entries for one that has an
1668 equivalent register. */
1673 /* This loop is strange in that it is executed in two different cases.
1674 The first is when X is already in the table. Then it is searching
1675 the RELATED_VALUE list of X's class (RELT). The second case is when
1676 X is not in the table. Then RELT points to a class for the related
1679 Ensure that, whatever case we are in, that we ignore classes that have
1680 the same value as X. */
1682 if (rtx_equal_p (x
, p
->exp
))
1685 for (q
= p
->first_same_value
; q
; q
= q
->next_same_value
)
1686 if (GET_CODE (q
->exp
) == REG
)
1692 p
= p
->related_value
;
1694 /* We went all the way around, so there is nothing to be found.
1695 Alternatively, perhaps RELT was in the table for some other reason
1696 and it has no related values recorded. */
1697 if (p
== relt
|| p
== 0)
1704 offset
= (get_integer_term (x
) - get_integer_term (p
->exp
));
1705 /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity. */
1706 return plus_constant (q
->exp
, offset
);
1709 /* Hash an rtx. We are careful to make sure the value is never negative.
1710 Equivalent registers hash identically.
1711 MODE is used in hashing for CONST_INTs only;
1712 otherwise the mode of X is used.
1714 Store 1 in do_not_record if any subexpression is volatile.
1716 Store 1 in hash_arg_in_memory if X contains a MEM rtx
1717 which does not have the RTX_UNCHANGING_P bit set.
1718 In this case, also store 1 in hash_arg_in_struct
1719 if there is a MEM rtx which has the MEM_IN_STRUCT_P bit set.
1721 Note that cse_insn knows that the hash code of a MEM expression
1722 is just (int) MEM plus the hash code of the address. */
1725 canon_hash (x
, mode
)
1727 enum machine_mode mode
;
1730 register int hash
= 0;
1731 register enum rtx_code code
;
1734 /* repeat is used to turn tail-recursion into iteration. */
1739 code
= GET_CODE (x
);
1744 register int regno
= REGNO (x
);
1746 /* On some machines, we can't record any non-fixed hard register,
1747 because extending its life will cause reload problems. We
1748 consider ap, fp, and sp to be fixed for this purpose.
1749 On all machines, we can't record any global registers. */
1751 if (regno
< FIRST_PSEUDO_REGISTER
1752 && (global_regs
[regno
]
1753 #ifdef SMALL_REGISTER_CLASSES
1754 || (! fixed_regs
[regno
]
1755 && regno
!= FRAME_POINTER_REGNUM
1756 && regno
!= ARG_POINTER_REGNUM
1757 && regno
!= STACK_POINTER_REGNUM
)
1764 return hash
+ ((int) REG
<< 7) + reg_qty
[regno
];
1768 hash
+= ((int) mode
+ ((int) CONST_INT
<< 7)
1769 + INTVAL (x
) + (INTVAL (x
) >> HASHBITS
));
1770 return ((1 << HASHBITS
) - 1) & hash
;
1773 /* This is like the general case, except that it only counts
1774 the integers representing the constant. */
1775 hash
+= (int) code
+ (int) GET_MODE (x
);
1778 for (i
= 2; i
< GET_RTX_LENGTH (CONST_DOUBLE
); i
++)
1780 int tem
= XINT (x
, i
);
1781 hash
+= ((1 << HASHBITS
) - 1) & (tem
+ (tem
>> HASHBITS
));
1786 /* Assume there is only one rtx object for any given label. */
1788 /* Use `and' to ensure a positive number. */
1789 return (hash
+ ((int) LABEL_REF
<< 7)
1790 + ((int) XEXP (x
, 0) & ((1 << HASHBITS
) - 1)));
1793 return (hash
+ ((int) SYMBOL_REF
<< 7)
1794 + ((int) XEXP (x
, 0) & ((1 << HASHBITS
) - 1)));
1797 if (MEM_VOLATILE_P (x
))
1802 if (! RTX_UNCHANGING_P (x
))
1804 hash_arg_in_memory
= 1;
1805 if (MEM_IN_STRUCT_P (x
)) hash_arg_in_struct
= 1;
1807 /* Now that we have already found this special case,
1808 might as well speed it up as much as possible. */
1820 case UNSPEC_VOLATILE
:
1825 if (MEM_VOLATILE_P (x
))
1832 i
= GET_RTX_LENGTH (code
) - 1;
1833 hash
+= (int) code
+ (int) GET_MODE (x
);
1834 fmt
= GET_RTX_FORMAT (code
);
1839 rtx tem
= XEXP (x
, i
);
1842 /* If the operand is a REG that is equivalent to a constant, hash
1843 as if we were hashing the constant, since we will be comparing
1845 if (tem
!= 0 && GET_CODE (tem
) == REG
1846 && REGNO_QTY_VALID_P (REGNO (tem
))
1847 && qty_mode
[reg_qty
[REGNO (tem
)]] == GET_MODE (tem
)
1848 && (tem1
= qty_const
[reg_qty
[REGNO (tem
)]]) != 0
1849 && CONSTANT_P (tem1
))
1852 /* If we are about to do the last recursive call
1853 needed at this level, change it into iteration.
1854 This function is called enough to be worth it. */
1860 hash
+= canon_hash (tem
, 0);
1862 else if (fmt
[i
] == 'E')
1863 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
1864 hash
+= canon_hash (XVECEXP (x
, i
, j
), 0);
1865 else if (fmt
[i
] == 's')
1867 register char *p
= XSTR (x
, i
);
1871 register int tem
= *p
++;
1872 hash
+= ((1 << HASHBITS
) - 1) & (tem
+ (tem
>> HASHBITS
));
1875 else if (fmt
[i
] == 'i')
1877 register int tem
= XINT (x
, i
);
1878 hash
+= ((1 << HASHBITS
) - 1) & (tem
+ (tem
>> HASHBITS
));
1886 /* Like canon_hash but with no side effects. */
1891 enum machine_mode mode
;
1893 int save_do_not_record
= do_not_record
;
1894 int save_hash_arg_in_memory
= hash_arg_in_memory
;
1895 int save_hash_arg_in_struct
= hash_arg_in_struct
;
1896 int hash
= canon_hash (x
, mode
);
1897 hash_arg_in_memory
= save_hash_arg_in_memory
;
1898 hash_arg_in_struct
= save_hash_arg_in_struct
;
1899 do_not_record
= save_do_not_record
;
1903 /* Return 1 iff X and Y would canonicalize into the same thing,
1904 without actually constructing the canonicalization of either one.
1905 If VALIDATE is nonzero,
1906 we assume X is an expression being processed from the rtl
1907 and Y was found in the hash table. We check register refs
1908 in Y for being marked as valid.
1910 If EQUAL_VALUES is nonzero, we allow a register to match a constant value
1911 that is known to be in the register. Ordinarily, we don't allow them
1912 to match, because letting them match would cause unpredictable results
1913 in all the places that search a hash table chain for an equivalent
1914 for a given value. A possible equivalent that has different structure
1915 has its hash code computed from different data. Whether the hash code
1916 is the same as that of the the given value is pure luck. */
1919 exp_equiv_p (x
, y
, validate
, equal_values
)
1925 register enum rtx_code code
;
1928 /* Note: it is incorrect to assume an expression is equivalent to itself
1929 if VALIDATE is nonzero. */
1930 if (x
== y
&& !validate
)
1932 if (x
== 0 || y
== 0)
1935 code
= GET_CODE (x
);
1936 if (code
!= GET_CODE (y
))
1941 /* If X is a constant and Y is a register or vice versa, they may be
1942 equivalent. We only have to validate if Y is a register. */
1943 if (CONSTANT_P (x
) && GET_CODE (y
) == REG
1944 && REGNO_QTY_VALID_P (REGNO (y
))
1945 && GET_MODE (y
) == qty_mode
[reg_qty
[REGNO (y
)]]
1946 && rtx_equal_p (x
, qty_const
[reg_qty
[REGNO (y
)]])
1947 && (! validate
|| reg_in_table
[REGNO (y
)] == reg_tick
[REGNO (y
)]))
1950 if (CONSTANT_P (y
) && code
== REG
1951 && REGNO_QTY_VALID_P (REGNO (x
))
1952 && GET_MODE (x
) == qty_mode
[reg_qty
[REGNO (x
)]]
1953 && rtx_equal_p (y
, qty_const
[reg_qty
[REGNO (x
)]]))
1959 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
1960 if (GET_MODE (x
) != GET_MODE (y
))
1970 return XINT (x
, 0) == XINT (y
, 0);
1974 return XEXP (x
, 0) == XEXP (y
, 0);
1978 int regno
= REGNO (y
);
1980 = regno
+ (regno
>= FIRST_PSEUDO_REGISTER
? 1
1981 : HARD_REGNO_NREGS (regno
, GET_MODE (y
)));
1984 /* If the quantities are not the same, the expressions are not
1985 equivalent. If there are and we are not to validate, they
1986 are equivalent. Otherwise, ensure all regs are up-to-date. */
1988 if (reg_qty
[REGNO (x
)] != reg_qty
[regno
])
1994 for (i
= regno
; i
< endregno
; i
++)
1995 if (reg_in_table
[i
] != reg_tick
[i
])
2001 /* For commutative operations, check both orders. */
2009 return ((exp_equiv_p (XEXP (x
, 0), XEXP (y
, 0), validate
, equal_values
)
2010 && exp_equiv_p (XEXP (x
, 1), XEXP (y
, 1),
2011 validate
, equal_values
))
2012 || (exp_equiv_p (XEXP (x
, 0), XEXP (y
, 1),
2013 validate
, equal_values
)
2014 && exp_equiv_p (XEXP (x
, 1), XEXP (y
, 0),
2015 validate
, equal_values
)));
2018 /* Compare the elements. If any pair of corresponding elements
2019 fail to match, return 0 for the whole things. */
2021 fmt
= GET_RTX_FORMAT (code
);
2022 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2026 if (! exp_equiv_p (XEXP (x
, i
), XEXP (y
, i
), validate
, equal_values
))
2029 else if (fmt
[i
] == 'E')
2032 if (XVECLEN (x
, i
) != XVECLEN (y
, i
))
2034 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2035 if (! exp_equiv_p (XVECEXP (x
, i
, j
), XVECEXP (y
, i
, j
),
2036 validate
, equal_values
))
2039 else if (fmt
[i
] == 's')
2041 if (strcmp (XSTR (x
, i
), XSTR (y
, i
)))
2044 else if (fmt
[i
] == 'i')
2046 if (XINT (x
, i
) != XINT (y
, i
))
2049 else if (fmt
[i
] != '0')
2055 /* Return 1 iff any subexpression of X matches Y.
2056 Here we do not require that X or Y be valid (for registers referred to)
2057 for being in the hash table. */
2064 register enum rtx_code code
;
2070 if (x
== 0 || y
== 0)
2073 code
= GET_CODE (x
);
2074 /* If X as a whole has the same code as Y, they may match.
2076 if (code
== GET_CODE (y
))
2078 if (exp_equiv_p (x
, y
, 0, 1))
2082 /* X does not match, so try its subexpressions. */
2084 fmt
= GET_RTX_FORMAT (code
);
2085 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2094 if (refers_to_p (XEXP (x
, i
), y
))
2097 else if (fmt
[i
] == 'E')
2100 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2101 if (refers_to_p (XVECEXP (x
, i
, j
), y
))
2108 /* Return 1 iff any subexpression of X refers to memory
2109 at an address of BASE plus some offset
2110 such that any of the bytes' offsets fall between START (inclusive)
2111 and END (exclusive).
2113 The value is undefined if X is a varying address.
2114 This function is not used in such cases.
2116 When used in the cse pass, `qty_const' is nonzero, and it is used
2117 to treat an address that is a register with a known constant value
2118 as if it were that constant value.
2119 In the loop pass, `qty_const' is zero, so this is not done. */
2122 refers_to_mem_p (x
, base
, start
, end
)
2127 register enum rtx_code code
;
2130 if (GET_CODE (base
) == CONST_INT
)
2132 start
+= INTVAL (base
);
2133 end
+= INTVAL (base
);
2141 code
= GET_CODE (x
);
2144 register rtx addr
= XEXP (x
, 0); /* Get the address. */
2148 if (GET_CODE (addr
) == REG
2149 /* qty_const is 0 when outside the cse pass;
2150 at such times, this info is not available. */
2152 && REGNO_QTY_VALID_P (REGNO (addr
))
2153 && GET_MODE (addr
) == qty_mode
[reg_qty
[REGNO (addr
)]]
2154 && qty_const
[reg_qty
[REGNO (addr
)]] != 0)
2155 addr
= qty_const
[reg_qty
[REGNO (addr
)]];
2156 else if (GET_CODE (addr
) == PLUS
2157 && GET_CODE (XEXP (addr
, 1)) == CONST_INT
2158 && GET_CODE (XEXP (addr
, 0)) == REG
2160 && REGNO_QTY_VALID_P (REGNO (XEXP (addr
, 0)))
2161 && (GET_MODE (XEXP (addr
, 0))
2162 == qty_mode
[reg_qty
[REGNO (XEXP (addr
, 0))]])
2163 && qty_const
[reg_qty
[REGNO (XEXP (addr
, 0))]])
2165 i
= INTVAL (XEXP (addr
, 1));
2166 addr
= qty_const
[reg_qty
[REGNO (XEXP (addr
, 0))]];
2170 if (GET_CODE (addr
) == CONST
)
2171 addr
= XEXP (addr
, 0);
2173 /* If ADDR is BASE, or BASE plus an integer, put
2174 the integer in I. */
2175 if (GET_CODE (addr
) == PLUS
2176 && XEXP (addr
, 0) == base
2177 && GET_CODE (XEXP (addr
, 1)) == CONST_INT
)
2178 i
+= INTVAL (XEXP (addr
, 1));
2179 else if (GET_CODE (addr
) == LO_SUM
)
2181 if (GET_CODE (base
) != LO_SUM
)
2183 /* The REG component of the LO_SUM is known by the
2184 const value in the XEXP part. */
2185 addr
= XEXP (addr
, 1);
2186 base
= XEXP (base
, 1);
2188 if (GET_CODE (base
) == CONST
)
2189 base
= XEXP (base
, 0);
2190 if (GET_CODE (base
) == PLUS
2191 && GET_CODE (XEXP (base
, 1)) == CONST_INT
)
2193 int tem
= INTVAL (XEXP (base
, 1));
2196 base
= XEXP (base
, 0);
2200 else if (GET_CODE (base
) == LO_SUM
)
2202 base
= XEXP (base
, 1);
2203 if (GET_CODE (base
) == CONST
)
2204 base
= XEXP (base
, 0);
2205 if (GET_CODE (base
) == PLUS
2206 && GET_CODE (XEXP (base
, 1)) == CONST_INT
)
2208 int tem
= INTVAL (XEXP (base
, 1));
2211 base
= XEXP (base
, 0);
2215 else if (GET_CODE (addr
) == CONST_INT
&& base
== const0_rtx
)
2217 else if (addr
!= base
)
2220 myend
= i
+ GET_MODE_SIZE (GET_MODE (x
));
2221 return myend
> start
&& i
< end
;
2224 /* X does not match, so try its subexpressions. */
2226 fmt
= GET_RTX_FORMAT (code
);
2227 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2236 if (refers_to_mem_p (XEXP (x
, i
), base
, start
, end
))
2239 else if (fmt
[i
] == 'E')
2242 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2243 if (refers_to_mem_p (XVECEXP (x
, i
, j
), base
, start
, end
))
2250 /* Nonzero if X refers to memory at a varying address;
2251 except that a register which has at the moment a known constant value
2252 isn't considered variable. */
2255 cse_rtx_addr_varies_p (x
)
2258 /* We need not check for X and the equivalence class being of the same
2259 mode because if X is equivalent to a constant in some mode, it
2260 doesn't vary in any mode. */
2262 if (GET_CODE (x
) == MEM
2263 && GET_CODE (XEXP (x
, 0)) == REG
2264 && REGNO_QTY_VALID_P (REGNO (XEXP (x
, 0)))
2265 && GET_MODE (XEXP (x
, 0)) == qty_mode
[reg_qty
[REGNO (XEXP (x
, 0))]]
2266 && qty_const
[reg_qty
[REGNO (XEXP (x
, 0))]] != 0)
2269 if (GET_CODE (x
) == MEM
2270 && GET_CODE (XEXP (x
, 0)) == PLUS
2271 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
2272 && GET_CODE (XEXP (XEXP (x
, 0), 0)) == REG
2273 && REGNO_QTY_VALID_P (REGNO (XEXP (XEXP (x
, 0), 0)))
2274 && (GET_MODE (XEXP (XEXP (x
, 0), 0))
2275 == qty_mode
[reg_qty
[REGNO (XEXP (XEXP (x
, 0), 0))]])
2276 && qty_const
[reg_qty
[REGNO (XEXP (XEXP (x
, 0), 0))]])
2279 return rtx_addr_varies_p (x
);
2282 /* Canonicalize an expression:
2283 replace each register reference inside it
2284 with the "oldest" equivalent register.
2286 If INSN is non-zero and we are replacing a pseudo with a hard register
2287 or vice versa, verify that INSN remains valid after we make our
2296 register enum rtx_code code
;
2302 code
= GET_CODE (x
);
2320 /* Never replace a hard reg, because hard regs can appear
2321 in more than one machine mode, and we must preserve the mode
2322 of each occurrence. Also, some hard regs appear in
2323 MEMs that are shared and mustn't be altered. Don't try to
2324 replace any reg that maps to a reg of class NO_REGS. */
2325 if (REGNO (x
) < FIRST_PSEUDO_REGISTER
2326 || ! REGNO_QTY_VALID_P (REGNO (x
)))
2329 first
= qty_first_reg
[reg_qty
[REGNO (x
)]];
2330 return (first
>= FIRST_PSEUDO_REGISTER
? regno_reg_rtx
[first
]
2331 : REGNO_REG_CLASS (first
) == NO_REGS
? x
2332 : gen_rtx (REG
, qty_mode
[reg_qty
[REGNO (x
)]], first
));
2336 fmt
= GET_RTX_FORMAT (code
);
2337 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2343 rtx
new = canon_reg (XEXP (x
, i
), insn
);
2345 /* If replacing pseudo with hard reg or vice versa, ensure the
2346 insn remains valid. */
2347 if (new && GET_CODE (new) == REG
&& GET_CODE (XEXP (x
, i
)) == REG
2348 && ((REGNO (new) < FIRST_PSEUDO_REGISTER
)
2349 != (REGNO (XEXP (x
, i
)) < FIRST_PSEUDO_REGISTER
)))
2350 validate_change (insn
, &XEXP (x
, i
), new, 0);
2354 else if (fmt
[i
] == 'E')
2355 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2356 XVECEXP (x
, i
, j
) = canon_reg (XVECEXP (x
, i
, j
), insn
);
2362 /* LOC is a location with INSN that is an operand address (the contents of
2363 a MEM). Find the best equivalent address to use that is valid for this
2366 On most CISC machines, complicated address modes are costly, and rtx_cost
2367 is a good approximation for that cost. However, most RISC machines have
2368 only a few (usually only one) memory reference formats. If an address is
2369 valid at all, it is often just as cheap as any other address. Hence, for
2370 RISC machines, we use the configuration macro `ADDRESS_COST' to compare the
2371 costs of various addresses. For two addresses of equal cost, choose the one
2372 with the highest `rtx_cost' value as that has the potential of eliminating
2373 the most insns. For equal costs, we choose the first in the equivalence
2374 class. Note that we ignore the fact that pseudo registers are cheaper
2375 than hard registers here because we would also prefer the pseudo registers.
2379 find_best_addr (insn
, loc
)
2383 struct table_elt
*elt
, *p
;
2386 int found_better
= 1;
2387 int save_do_not_record
= do_not_record
;
2388 int save_hash_arg_in_memory
= hash_arg_in_memory
;
2389 int save_hash_arg_in_struct
= hash_arg_in_struct
;
2394 /* Do not try to replace constant addresses or addresses of local and
2395 argument slots. These MEM expressions are made only once and inserted
2396 in many instructions, as well as being used to control symbol table
2397 output. It is not safe to clobber them.
2399 There are some uncommon cases where the address is already in a register
2400 for some reason, but we cannot take advantage of that because we have
2401 no easy way to unshare the MEM. In addition, looking up all stack
2402 addresses is costly. */
2403 if ((GET_CODE (addr
) == PLUS
2404 && GET_CODE (XEXP (addr
, 0)) == REG
2405 && GET_CODE (XEXP (addr
, 1)) == CONST_INT
2406 && (regno
= REGNO (XEXP (addr
, 0)),
2407 regno
== FRAME_POINTER_REGNUM
|| regno
== ARG_POINTER_REGNUM
))
2408 || (GET_CODE (addr
) == REG
2409 && (regno
= REGNO (addr
),
2410 regno
== FRAME_POINTER_REGNUM
|| regno
== ARG_POINTER_REGNUM
))
2411 || CONSTANT_ADDRESS_P (addr
))
2414 /* If this address is not simply a register, try to fold it. This will
2415 sometimes simplify the expression. Many simplifications
2416 will not be valid, but some, usually applying the associative rule, will
2417 be valid and produce better code. */
2418 if (GET_CODE (addr
) != REG
2419 && validate_change (insn
, loc
, fold_rtx (addr
, insn
), 0))
2422 /* If this address is not in the hash table, we can't do any better.
2423 Also, ignore if volatile. */
2425 hash_code
= HASH (addr
, Pmode
);
2426 addr_volatile
= do_not_record
;
2427 do_not_record
= save_do_not_record
;
2428 hash_arg_in_memory
= save_hash_arg_in_memory
;
2429 hash_arg_in_struct
= save_hash_arg_in_struct
;
2434 elt
= lookup (addr
, hash_code
, Pmode
);
2439 #ifndef ADDRESS_COST
2440 our_cost
= elt
->cost
;
2442 /* Find the lowest cost below ours that works. */
2443 for (elt
= elt
->first_same_value
; elt
; elt
= elt
->next_same_value
)
2444 if (elt
->cost
< our_cost
2445 && (GET_CODE (elt
->exp
) == REG
|| exp_equiv_p (elt
->exp
, elt
->exp
, 1, 0))
2446 && validate_change (insn
, loc
, canon_reg (copy_rtx (elt
->exp
), 0), 0))
2451 /* We need to find the best (under the criteria documented above) entry in
2452 the class that is valid. We use the `flag' field to indicate choices
2453 that were invalid and iterate until we can't find a better one that
2454 hasn't already been tried. */
2456 for (p
= elt
->first_same_value
; p
; p
= p
->next_same_value
)
2459 while (found_better
)
2461 int best_addr_cost
= ADDRESS_COST (*loc
);
2462 int best_rtx_cost
= (elt
->cost
+ 1) >> 1;
2463 struct table_elt
*best_elt
= elt
;
2466 for (p
= elt
->first_same_value
; p
; p
= p
->next_same_value
)
2468 && (GET_CODE (p
->exp
) == REG
|| exp_equiv_p (p
->exp
, p
->exp
, 1, 0))
2469 && (ADDRESS_COST (p
->exp
) < best_addr_cost
2470 || (ADDRESS_COST (p
->exp
) == best_addr_cost
2471 && (p
->cost
+ 1) >> 1 > best_rtx_cost
)))
2474 best_addr_cost
= ADDRESS_COST (p
->exp
);
2475 best_rtx_cost
= (p
->cost
+ 1) >> 1;
2481 if (validate_change (insn
, loc
,
2482 canon_reg (copy_rtx (best_elt
->exp
), 0), 0))
2491 /* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
2492 operation (EQ, NE, GT, etc.), follow it back through the hash table and
2493 what values are being compared.
2495 *PARG1 and *PARG2 are updated to contain the rtx representing the values
2496 actually being compared. For example, if *PARG1 was (cc0) and *PARG2
2497 was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
2498 compared to produce cc0.
2500 The return value is the comparison operator and is either the code of
2501 A or the code corresponding to the inverse of the comparison. */
2503 static enum rtx_code
2504 find_comparison_args (code
, parg1
, parg2
)
2510 arg1
= *parg1
, arg2
= *parg2
;
2512 /* If ARG2 is const0_rtx, see what ARG1 is equivalent to. */
2514 while (arg2
== const0_rtx
)
2516 /* Set non-zero when we find something of interest. */
2518 int reverse_code
= 0;
2519 struct table_elt
*p
= 0;
2521 /* If arg1 is a COMPARE, extract the comparison arguments from it.
2522 On machines with CC0, this is the only case that can occur, since
2523 fold_rtx will return the COMPARE or item being compared with zero
2526 if (GET_CODE (arg1
) == COMPARE
&& arg2
== const0_rtx
)
2529 /* If ARG1 is a comparison operator and CODE is testing for
2530 STORE_FLAG_VALUE, get the inner arguments. */
2532 else if (GET_RTX_CLASS (GET_CODE (arg1
)) == '<')
2534 if (code
== NE
|| (code
== LT
&& STORE_FLAG_VALUE
== -1))
2536 else if (code
== EQ
|| (code
== GE
&& STORE_FLAG_VALUE
== -1))
2537 x
= arg1
, reverse_code
= 1;
2540 /* ??? We could also check for
2542 (ne (and (eq (...) (const_int 1))) (const_int 0))
2544 and related forms, but let's wait until we see them occurring. */
2547 /* Look up ARG1 in the hash table and see if it has an equivalence
2548 that lets us see what is being compared. */
2549 p
= lookup (arg1
, safe_hash (arg1
, GET_MODE (arg1
)) % NBUCKETS
,
2551 if (p
) p
= p
->first_same_value
;
2553 for (; p
; p
= p
->next_same_value
)
2555 enum machine_mode inner_mode
= GET_MODE (p
->exp
);
2557 /* If the entry isn't valid, skip it. */
2558 if (! exp_equiv_p (p
->exp
, p
->exp
, 1, 0))
2561 if (GET_CODE (p
->exp
) == COMPARE
2562 /* Another possibility is that this machine has a compare insn
2563 that includes the comparison code. In that case, ARG1 would
2564 be equivalent to a comparison operation that would set ARG1 to
2565 either STORE_FLAG_VALUE or zero. If this is an NE operation,
2566 ORIG_CODE is the actual comparison being done; if it is an EQ,
2567 we must reverse ORIG_CODE. On machine with a negative value
2568 for STORE_FLAG_VALUE, also look at LT and GE operations. */
2571 && inner_mode
!= VOIDmode
2572 && GET_MODE_BITSIZE (inner_mode
) <= HOST_BITS_PER_INT
2573 && (STORE_FLAG_VALUE
2574 & (1 << (GET_MODE_BITSIZE (inner_mode
) - 1)))))
2575 && GET_RTX_CLASS (GET_CODE (p
->exp
)) == '<'))
2580 else if ((code
== EQ
2582 && inner_mode
!= VOIDmode
2583 && GET_MODE_BITSIZE (inner_mode
) <= HOST_BITS_PER_INT
2584 && (STORE_FLAG_VALUE
2585 & (1 << (GET_MODE_BITSIZE (inner_mode
) - 1)))))
2586 && GET_RTX_CLASS (GET_CODE (p
->exp
)) == '<')
2593 /* If this is fp + constant, the equivalent is a better operand since
2594 it may let us predict the value of the comparison. */
2595 else if (NONZERO_BASE_PLUS_P (p
->exp
))
2602 /* If we didn't find a useful equivalence for ARG1, we are done.
2603 Otherwise, set up for the next iteration. */
2607 arg1
= XEXP (x
, 0), arg2
= XEXP (x
, 1);
2608 if (GET_RTX_CLASS (GET_CODE (x
)) == '<')
2609 code
= GET_CODE (x
);
2612 code
= reverse_condition (code
);
2615 /* Return our results. */
2616 *parg1
= fold_rtx (arg1
, 0), *parg2
= fold_rtx (arg2
, 0);
2621 /* Try to simplify a unary operation CODE whose output mode is to be
2622 MODE with input operand OP whose mode was originally OP_MODE.
2623 Return zero if no simplification can be made. */
2626 simplify_unary_operation (code
, mode
, op
, op_mode
)
2628 enum machine_mode mode
;
2630 enum machine_mode op_mode
;
2632 register int width
= GET_MODE_BITSIZE (mode
);
2634 /* The order of these tests is critical so that, for example, we don't
2635 check the wrong mode (input vs. output) for a conversion operation,
2636 such as FIX. At some point, this should be simplified. */
2638 #if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
2639 if (code
== FLOAT
&& GET_CODE (op
) == CONST_INT
)
2643 #ifdef REAL_ARITHMETIC
2644 REAL_VALUE_FROM_INT (d
, INTVAL (op
), INTVAL (op
) < 0 ? ~0 : 0);
2646 d
= (double) INTVAL (op
);
2648 return CONST_DOUBLE_FROM_REAL_VALUE (d
, mode
);
2650 else if (code
== UNSIGNED_FLOAT
&& GET_CODE (op
) == CONST_INT
)
2654 #ifdef REAL_ARITHMETIC
2655 REAL_VALUE_FROM_INT (d
, INTVAL (op
), 0);
2657 d
= (double) (unsigned int) INTVAL (op
);
2659 return CONST_DOUBLE_FROM_REAL_VALUE (d
, mode
);
2662 else if (code
== FLOAT
&& GET_CODE (op
) == CONST_DOUBLE
2663 && GET_MODE (op
) == VOIDmode
)
2667 #ifdef REAL_ARITHMETIC
2668 REAL_VALUE_FROM_INT (d
, CONST_DOUBLE_LOW (op
), CONST_DOUBLE_HIGH (op
));
2670 if (CONST_DOUBLE_HIGH (op
) < 0)
2672 d
= (double) (~ CONST_DOUBLE_HIGH (op
));
2673 d
*= ((double) (1 << (HOST_BITS_PER_INT
/ 2))
2674 * (double) (1 << (HOST_BITS_PER_INT
/ 2)));
2675 d
+= (double) (unsigned) (~ CONST_DOUBLE_LOW (op
));
2680 d
= (double) CONST_DOUBLE_HIGH (op
);
2681 d
*= ((double) (1 << (HOST_BITS_PER_INT
/ 2))
2682 * (double) (1 << (HOST_BITS_PER_INT
/ 2)));
2683 d
+= (double) (unsigned) CONST_DOUBLE_LOW (op
);
2685 #endif /* REAL_ARITHMETIC */
2686 return CONST_DOUBLE_FROM_REAL_VALUE (d
, mode
);
2688 else if (code
== UNSIGNED_FLOAT
&& GET_CODE (op
) == CONST_DOUBLE
2689 && GET_MODE (op
) == VOIDmode
)
2693 #ifdef REAL_ARITHMETIC
2694 REAL_VALUE_FROM_UNSIGNED_INT (d
, CONST_DOUBLE_LOW (op
),
2695 CONST_DOUBLE_HIGH (op
));
2697 d
= (double) CONST_DOUBLE_HIGH (op
);
2698 d
*= ((double) (1 << (HOST_BITS_PER_INT
/ 2))
2699 * (double) (1 << (HOST_BITS_PER_INT
/ 2)));
2700 d
+= (double) (unsigned) CONST_DOUBLE_LOW (op
);
2701 #endif /* REAL_ARITHMETIC */
2702 return CONST_DOUBLE_FROM_REAL_VALUE (d
, mode
);
2706 else if (GET_CODE (op
) == CONST_INT
2707 && width
<= HOST_BITS_PER_INT
&& width
> 0)
2709 register int arg0
= INTVAL (op
);
2723 val
= (arg0
>= 0 ? arg0
: - arg0
);
2727 /* Don't use ffs here. Instead, get low order bit and then its
2728 number. If arg0 is zero, this will return 0, as desired. */
2729 arg0
&= GET_MODE_MASK (mode
);
2730 val
= exact_log2 (arg0
& (- arg0
)) + 1;
2738 if (op_mode
== VOIDmode
)
2740 if (GET_MODE_BITSIZE (op_mode
) == HOST_BITS_PER_INT
)
2742 else if (GET_MODE_BITSIZE (op_mode
) < HOST_BITS_PER_INT
)
2743 val
= arg0
& ~((-1) << GET_MODE_BITSIZE (op_mode
));
2749 if (op_mode
== VOIDmode
)
2751 if (GET_MODE_BITSIZE (op_mode
) == HOST_BITS_PER_INT
)
2753 else if (GET_MODE_BITSIZE (op_mode
) < HOST_BITS_PER_INT
)
2755 val
= arg0
& ~((-1) << GET_MODE_BITSIZE (op_mode
));
2756 if (val
& (1 << (GET_MODE_BITSIZE (op_mode
) - 1)))
2757 val
-= 1 << GET_MODE_BITSIZE (op_mode
);
2770 /* Clear the bits that don't belong in our mode,
2771 unless they and our sign bit are all one.
2772 So we get either a reasonable negative value or a reasonable
2773 unsigned value for this mode. */
2774 if (width
< HOST_BITS_PER_INT
2775 && ((val
& ((-1) << (width
- 1))) != ((-1) << (width
- 1))))
2776 val
&= (1 << width
) - 1;
2778 return gen_rtx (CONST_INT
, VOIDmode
, val
);
2781 /* We can do some operations on integer CONST_DOUBLEs. Also allow
2782 for a DImode operation on a CONST_INT. */
2783 else if (GET_MODE (op
) == VOIDmode
2784 && (GET_CODE (op
) == CONST_DOUBLE
|| GET_CODE (op
) == CONST_INT
))
2788 if (GET_CODE (op
) == CONST_DOUBLE
)
2789 l1
= CONST_DOUBLE_LOW (op
), h1
= CONST_DOUBLE_HIGH (op
);
2791 l1
= INTVAL (op
), h1
= l1
< 0 ? -1 : 0;
2801 neg_double (l1
, h1
, &lv
, &hv
);
2806 neg_double (l1
, h1
, &lv
, &hv
);
2814 lv
= HOST_BITS_PER_INT
+ exact_log2 (h1
& (-h1
)) + 1;
2816 lv
= exact_log2 (l1
& (-l1
)) + 1;
2820 if (GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_INT
)
2821 return gen_rtx (CONST_INT
, VOIDmode
, l1
& GET_MODE_MASK (mode
));
2833 return immed_double_const (lv
, hv
, mode
);
2836 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
2837 else if (GET_CODE (op
) == CONST_DOUBLE
2838 && GET_MODE_CLASS (mode
) == MODE_FLOAT
)
2844 if (setjmp (handler
))
2845 /* There used to be a warning here, but that is inadvisable.
2846 People may want to cause traps, and the natural way
2847 to do it should not get a warning. */
2850 set_float_handler (handler
);
2852 REAL_VALUE_FROM_CONST_DOUBLE (d
, op
);
2857 d
= REAL_VALUE_NEGATE (d
);
2861 if (REAL_VALUE_NEGATIVE (d
))
2862 d
= REAL_VALUE_NEGATE (d
);
2865 case FLOAT_TRUNCATE
:
2866 d
= (double) REAL_VALUE_TRUNCATE (mode
, d
);
2870 /* All this does is change the mode. */
2874 d
= (double) REAL_VALUE_FIX_TRUNCATE (d
);
2878 d
= (double) REAL_VALUE_UNSIGNED_FIX_TRUNCATE (d
);
2888 x
= immed_real_const_1 (d
, mode
);
2889 set_float_handler (0);
2892 else if (GET_CODE (op
) == CONST_DOUBLE
&& GET_MODE_CLASS (mode
) == MODE_INT
2893 && width
<= HOST_BITS_PER_INT
&& width
> 0)
2900 if (setjmp (handler
))
2903 set_float_handler (handler
);
2905 REAL_VALUE_FROM_CONST_DOUBLE (d
, op
);
2910 val
= REAL_VALUE_FIX (d
);
2914 val
= REAL_VALUE_UNSIGNED_FIX (d
);
2921 set_float_handler (0);
2923 /* Clear the bits that don't belong in our mode,
2924 unless they and our sign bit are all one.
2925 So we get either a reasonable negative value or a reasonable
2926 unsigned value for this mode. */
2927 if (width
< HOST_BITS_PER_INT
2928 && ((val
& ((-1) << (width
- 1))) != ((-1) << (width
- 1))))
2929 val
&= (1 << width
) - 1;
2931 return gen_rtx (CONST_INT
, VOIDmode
, val
);
2934 else if (GET_MODE_CLASS (mode
) == MODE_INT
2935 || TARGET_FLOAT_FORMAT
!= IEEE_FLOAT_FORMAT
)
2937 /* There are some simplifications we can do even if the operands
2938 aren't constant, but they don't apply to floating-point
2944 /* (not (not X)) == X, similarly for NEG. */
2945 if (GET_CODE (op
) == code
)
2946 return XEXP (op
, 0);
2950 /* (sign_extend (truncate (minus (label_ref L1) (label_ref L2))))
2951 becomes just the MINUS if its mode is MODE. This allows
2952 folding switch statements on machines using casesi (such as
2954 if (GET_CODE (op
) == TRUNCATE
2955 && GET_MODE (XEXP (op
, 0)) == mode
2956 && GET_CODE (XEXP (op
, 0)) == MINUS
2957 && GET_CODE (XEXP (XEXP (op
, 0), 0)) == LABEL_REF
2958 && GET_CODE (XEXP (XEXP (op
, 0), 1)) == LABEL_REF
)
2959 return XEXP (op
, 0);
2969 /* Simplify a binary operation CODE with result mode MODE, operating on OP0
2970 and OP1. Return 0 if no simplification is possible.
2972 Don't use this for relational operations such as EQ or LT.
2973 Use simplify_relational_operation instead. */
2976 simplify_binary_operation (code
, mode
, op0
, op1
)
2978 enum machine_mode mode
;
2981 register int arg0
, arg1
, arg0s
, arg1s
;
2983 int width
= GET_MODE_BITSIZE (mode
);
2985 /* Relational operations don't work here. We must know the mode
2986 of the operands in order to do the comparison correctly.
2987 Assuming a full word can give incorrect results.
2988 Consider comparing 128 with -128 in QImode. */
2990 if (GET_RTX_CLASS (code
) == '<')
2993 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
2994 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
2995 && GET_CODE (op0
) == CONST_DOUBLE
&& GET_CODE (op1
) == CONST_DOUBLE
2996 && mode
== GET_MODE (op0
) && mode
== GET_MODE (op1
))
2998 REAL_VALUE_TYPE f0
, f1
, value
;
3001 if (setjmp (handler
))
3004 set_float_handler (handler
);
3006 REAL_VALUE_FROM_CONST_DOUBLE (f0
, op0
);
3007 REAL_VALUE_FROM_CONST_DOUBLE (f1
, op1
);
3008 f0
= REAL_VALUE_TRUNCATE (mode
, f0
);
3009 f1
= REAL_VALUE_TRUNCATE (mode
, f1
);
3011 #ifdef REAL_ARITHMETIC
3012 REAL_ARITHMETIC (value
, code
, f0
, f1
);
3026 #ifndef REAL_INFINITY
3033 value
= MIN (f0
, f1
);
3036 value
= MAX (f0
, f1
);
3043 set_float_handler (0);
3044 value
= REAL_VALUE_TRUNCATE (mode
, value
);
3045 return immed_real_const_1 (value
, mode
);
3048 /* We can fold some multi-word operations. */
3049 else if (GET_MODE_CLASS (mode
) == MODE_INT
3050 && GET_CODE (op0
) == CONST_DOUBLE
3051 && (GET_CODE (op1
) == CONST_DOUBLE
|| GET_CODE (op1
) == CONST_INT
))
3053 int l1
, l2
, h1
, h2
, lv
, hv
;
3055 l1
= CONST_DOUBLE_LOW (op0
), h1
= CONST_DOUBLE_HIGH (op0
);
3057 if (GET_CODE (op1
) == CONST_DOUBLE
)
3058 l2
= CONST_DOUBLE_LOW (op1
), h2
= CONST_DOUBLE_HIGH (op1
);
3060 l2
= INTVAL (op1
), h2
= l2
< 0 ? -1 : 0;
3065 /* A - B == A + (-B). */
3066 neg_double (l2
, h2
, &lv
, &hv
);
3069 /* .. fall through ... */
3072 add_double (l1
, h1
, l2
, h2
, &lv
, &hv
);
3076 mul_double (l1
, h1
, l2
, h2
, &lv
, &hv
);
3079 case DIV
: case MOD
: case UDIV
: case UMOD
:
3080 /* We'd need to include tree.h to do this and it doesn't seem worth
3085 lv
= l1
& l2
, hv
= h1
& h2
;
3089 lv
= l1
| l2
, hv
= h1
| h2
;
3093 lv
= l1
^ l2
, hv
= h1
^ h2
;
3097 if (h1
< h2
|| (h1
== h2
&& (unsigned) l1
< (unsigned) l2
))
3104 if (h1
> h2
|| (h1
== h2
&& (unsigned) l1
> (unsigned) l2
))
3111 if ((unsigned) h1
< (unsigned) h2
3112 || (h1
== h2
&& (unsigned) l1
< (unsigned) l2
))
3119 if ((unsigned) h1
> (unsigned) h2
3120 || (h1
== h2
&& (unsigned) l1
> (unsigned) l2
))
3126 case LSHIFTRT
: case ASHIFTRT
:
3127 case ASHIFT
: case LSHIFT
:
3128 case ROTATE
: case ROTATERT
:
3129 #ifdef SHIFT_COUNT_TRUNCATED
3130 l2
&= (GET_MODE_BITSIZE (mode
) - 1), h2
= 0;
3133 if (h2
!= 0 || l2
< 0 || l2
>= GET_MODE_BITSIZE (mode
))
3136 if (code
== LSHIFTRT
|| code
== ASHIFTRT
)
3137 rshift_double (l1
, h1
, l2
, GET_MODE_BITSIZE (mode
), &lv
, &hv
,
3139 else if (code
== ASHIFT
|| code
== LSHIFT
)
3140 lshift_double (l1
, h1
, l2
, GET_MODE_BITSIZE (mode
), &lv
, &hv
,
3142 else if (code
== ROTATE
)
3143 lrotate_double (l1
, h1
, l2
, GET_MODE_BITSIZE (mode
), &lv
, &hv
);
3144 else /* code == ROTATERT */
3145 rrotate_double (l1
, h1
, l2
, GET_MODE_BITSIZE (mode
), &lv
, &hv
);
3152 return immed_double_const (lv
, hv
, mode
);
3154 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
3156 if (GET_CODE (op0
) != CONST_INT
|| GET_CODE (op1
) != CONST_INT
3157 || width
> HOST_BITS_PER_INT
|| width
== 0)
3159 /* Even if we can't compute a constant result,
3160 there are some cases worth simplifying. */
3165 /* In IEEE floating point, x+0 is not the same as x. Similarly
3166 for the other optimizations below. */
3167 if (TARGET_FLOAT_FORMAT
== IEEE_FLOAT_FORMAT
3168 && GET_MODE_CLASS (mode
) != MODE_INT
)
3171 if (op1
== CONST0_RTX (mode
))
3174 /* Strip off any surrounding CONSTs. They don't matter in any of
3176 if (GET_CODE (op0
) == CONST
)
3177 op0
= XEXP (op0
, 0);
3178 if (GET_CODE (op1
) == CONST
)
3179 op1
= XEXP (op1
, 0);
3181 /* ((-a) + b) -> (b - a) and similarly for (a + (-b)) */
3182 if (GET_CODE (op0
) == NEG
)
3184 rtx tem
= simplify_binary_operation (MINUS
, mode
,
3185 op1
, XEXP (op0
, 0));
3186 return tem
? tem
: gen_rtx (MINUS
, mode
, op1
, XEXP (op0
, 0));
3188 else if (GET_CODE (op1
) == NEG
)
3190 rtx tem
= simplify_binary_operation (MINUS
, mode
,
3191 op0
, XEXP (op1
, 0));
3192 return tem
? tem
: gen_rtx (MINUS
, mode
, op0
, XEXP (op1
, 0));
3195 /* Don't use the associative law for floating point.
3196 The inaccuracy makes it nonassociative,
3197 and subtle programs can break if operations are associated. */
3198 if (GET_MODE_CLASS (mode
) != MODE_INT
)
3201 /* (a - b) + b -> a, similarly a + (b - a) -> a */
3202 if (GET_CODE (op0
) == MINUS
3203 && rtx_equal_p (XEXP (op0
, 1), op1
) && ! side_effects_p (op1
))
3204 return XEXP (op0
, 0);
3206 if (GET_CODE (op1
) == MINUS
3207 && rtx_equal_p (XEXP (op1
, 1), op0
) && ! side_effects_p (op0
))
3208 return XEXP (op1
, 0);
3210 /* (c1 - a) + c2 becomes (c1 + c2) - a. */
3211 if (GET_CODE (op1
) == CONST_INT
&& GET_CODE (op0
) == MINUS
3212 && GET_CODE (XEXP (op0
, 0)) == CONST_INT
)
3214 rtx tem
= simplify_binary_operation (PLUS
, mode
, op1
,
3217 return tem
? gen_rtx (MINUS
, mode
, tem
, XEXP (op0
, 1)) : 0;
3220 /* Handle both-operands-constant cases. */
3221 if (CONSTANT_P (op0
) && CONSTANT_P (op1
)
3222 && GET_CODE (op0
) != CONST_DOUBLE
3223 && GET_CODE (op1
) != CONST_DOUBLE
3224 && GET_MODE_CLASS (mode
) == MODE_INT
)
3226 if (GET_CODE (op1
) == CONST_INT
)
3227 return plus_constant (op0
, INTVAL (op1
));
3228 else if (GET_CODE (op0
) == CONST_INT
)
3229 return plus_constant (op1
, INTVAL (op0
));
3231 return gen_rtx (CONST
, mode
,
3232 gen_rtx (PLUS
, mode
,
3233 GET_CODE (op0
) == CONST
3234 ? XEXP (op0
, 0) : op0
,
3235 GET_CODE (op1
) == CONST
3236 ? XEXP (op1
, 0) : op1
));
3238 else if (GET_CODE (op1
) == CONST_INT
3239 && GET_CODE (op0
) == PLUS
3240 && (CONSTANT_P (XEXP (op0
, 0))
3241 || CONSTANT_P (XEXP (op0
, 1))))
3242 /* constant + (variable + constant)
3243 can result if an index register is made constant.
3244 We simplify this by adding the constants.
3245 If we did not, it would become an invalid address. */
3246 return plus_constant (op0
, INTVAL (op1
));
3251 /* Convert (compare FOO (const_int 0)) to FOO unless we aren't
3252 using cc0, in which case we want to leave it as a COMPARE
3253 so we can distinguish it from a register-register-copy.
3255 In IEEE floating point, x-0 is not the same as x. */
3257 if ((TARGET_FLOAT_FORMAT
!= IEEE_FLOAT_FORMAT
3258 || GET_MODE_CLASS (mode
) == MODE_INT
)
3259 && op1
== CONST0_RTX (mode
))
3262 /* Do nothing here. */
3267 /* In IEEE floating point, x-0 is not the same as x. */
3268 if (rtx_equal_p (op0
, op1
)
3269 && ! side_effects_p (op0
)
3270 /* We can't assume x-x is 0
3271 even with non-IEEE floating point. */
3272 && GET_MODE_CLASS (mode
) != MODE_FLOAT
)
3275 /* Change subtraction from zero into negation. */
3276 if (op0
== CONST0_RTX (mode
))
3277 return gen_rtx (NEG
, mode
, op1
);
3279 /* The remainer of these cases cannot be done for IEEE
3281 if (TARGET_FLOAT_FORMAT
== IEEE_FLOAT_FORMAT
3282 && GET_MODE_CLASS (mode
) != MODE_INT
)
3285 /* Subtracting 0 has no effect. */
3286 if (op1
== CONST0_RTX (mode
))
3289 /* Strip off any surrounding CONSTs. They don't matter in any of
3291 if (GET_CODE (op0
) == CONST
)
3292 op0
= XEXP (op0
, 0);
3293 if (GET_CODE (op1
) == CONST
)
3294 op1
= XEXP (op1
, 0);
3296 /* (a - (-b)) -> (a + b). */
3297 if (GET_CODE (op1
) == NEG
)
3299 rtx tem
= simplify_binary_operation (PLUS
, mode
,
3300 op0
, XEXP (op1
, 0));
3301 return tem
? tem
: gen_rtx (PLUS
, mode
, op0
, XEXP (op1
, 0));
3304 /* Don't use the associative law for floating point.
3305 The inaccuracy makes it nonassociative,
3306 and subtle programs can break if operations are associated. */
3307 if (GET_MODE_CLASS (mode
) != MODE_INT
)
3310 /* (a + b) - a -> b, and (b - (a + b)) -> -a */
3311 if (GET_CODE (op0
) == PLUS
3312 && rtx_equal_p (XEXP (op0
, 0), op1
)
3313 && ! side_effects_p (op1
))
3314 return XEXP (op0
, 1);
3315 else if (GET_CODE (op0
) == PLUS
3316 && rtx_equal_p (XEXP (op0
, 1), op1
)
3317 && ! side_effects_p (op1
))
3318 return XEXP (op0
, 0);
3320 if (GET_CODE (op1
) == PLUS
3321 && rtx_equal_p (XEXP (op1
, 0), op0
)
3322 && ! side_effects_p (op0
))
3324 rtx tem
= simplify_unary_operation (NEG
, mode
, XEXP (op1
, 1),
3327 return tem
? tem
: gen_rtx (NEG
, mode
, XEXP (op1
, 1));
3329 else if (GET_CODE (op1
) == PLUS
3330 && rtx_equal_p (XEXP (op1
, 1), op0
)
3331 && ! side_effects_p (op0
))
3333 rtx tem
= simplify_unary_operation (NEG
, mode
, XEXP (op1
, 0),
3336 return tem
? tem
: gen_rtx (NEG
, mode
, XEXP (op1
, 0));
3339 /* a - (a - b) -> b */
3340 if (GET_CODE (op1
) == MINUS
&& rtx_equal_p (op0
, XEXP (op1
, 0))
3341 && ! side_effects_p (op0
))
3342 return XEXP (op1
, 1);
3344 /* (a +/- b) - (a +/- c) can be simplified. Do variants of
3345 this involving commutativity. The most common case is
3346 (a + C1) - (a + C2), but it's not hard to do all the cases. */
3347 if ((GET_CODE (op0
) == PLUS
|| GET_CODE (op0
) == MINUS
)
3348 && (GET_CODE (op1
) == PLUS
|| GET_CODE (op1
) == MINUS
))
3350 rtx lhs0
= XEXP (op0
, 0), lhs1
= XEXP (op0
, 1);
3351 rtx rhs0
= XEXP (op1
, 0), rhs1
= XEXP (op1
, 1);
3352 int lhs_neg
= GET_CODE (op0
) == MINUS
;
3353 int rhs_neg
= GET_CODE (op1
) == MINUS
;
3354 rtx lhs
= 0, rhs
= 0;
3356 /* Set LHS and RHS to the two different terms. */
3357 if (rtx_equal_p (lhs0
, rhs0
) && ! side_effects_p (lhs0
))
3358 lhs
= lhs1
, rhs
= rhs1
;
3359 else if (! rhs_neg
&& rtx_equal_p (lhs0
, rhs1
)
3360 && ! side_effects_p (lhs0
))
3361 lhs
= lhs1
, rhs
= rhs0
;
3362 else if (! lhs_neg
&& rtx_equal_p (lhs1
, rhs0
)
3363 && ! side_effects_p (lhs1
))
3364 lhs
= lhs0
, rhs
= rhs1
;
3365 else if (! lhs_neg
&& ! rhs_neg
&& rtx_equal_p (lhs1
, rhs1
)
3366 && ! side_effects_p (lhs1
))
3367 lhs
= lhs0
, rhs
= rhs0
;
3369 /* The RHS is the operand of a MINUS, so its negation
3370 status should be complemented. */
3371 rhs_neg
= ! rhs_neg
;
3373 /* If we found two values equal, form the sum or difference
3374 of the remaining two terms. */
3377 rtx tem
= simplify_binary_operation (lhs_neg
== rhs_neg
3380 lhs_neg
? rhs
: lhs
,
3381 lhs_neg
? lhs
: rhs
);
3383 tem
= gen_rtx (lhs_neg
== rhs_neg
3385 mode
, lhs_neg
? rhs
: lhs
,
3386 lhs_neg
? lhs
: rhs
);
3388 /* If both sides negated, negate result. */
3389 if (lhs_neg
&& rhs_neg
)
3392 = simplify_unary_operation (NEG
, mode
, tem
, mode
);
3394 tem1
= gen_rtx (NEG
, mode
, tem
);
3404 /* c1 - (a + c2) becomes (c1 - c2) - a. */
3405 if (GET_CODE (op0
) == CONST_INT
&& GET_CODE (op1
) == PLUS
3406 && GET_CODE (XEXP (op1
, 1)) == CONST_INT
)
3408 rtx tem
= simplify_binary_operation (MINUS
, mode
, op0
,
3411 return tem
? gen_rtx (MINUS
, mode
, tem
, XEXP (op1
, 0)) : 0;
3414 /* c1 - (c2 - a) becomes (c1 - c2) + a. */
3415 if (GET_CODE (op0
) == CONST_INT
&& GET_CODE (op1
) == MINUS
3416 && GET_CODE (XEXP (op1
, 0)) == CONST_INT
)
3418 rtx tem
= simplify_binary_operation (MINUS
, mode
, op0
,
3421 return (tem
&& GET_CODE (tem
) == CONST_INT
3422 ? plus_constant (XEXP (op1
, 1), INTVAL (tem
))
3426 /* Don't let a relocatable value get a negative coeff. */
3427 if (GET_CODE (op1
) == CONST_INT
)
3428 return plus_constant (op0
, - INTVAL (op1
));
3432 if (op1
== constm1_rtx
)
3434 rtx tem
= simplify_unary_operation (NEG
, mode
, op0
, mode
);
3436 return tem
? tem
: gen_rtx (NEG
, mode
, op0
);
3439 /* In IEEE floating point, x*0 is not always 0. */
3440 if ((TARGET_FLOAT_FORMAT
!= IEEE_FLOAT_FORMAT
3441 || GET_MODE_CLASS (mode
) == MODE_INT
)
3442 && op1
== CONST0_RTX (mode
)
3443 && ! side_effects_p (op0
))
3446 /* In IEEE floating point, x*1 is not equivalent to x for nans.
3447 However, ANSI says we can drop signals,
3448 so we can do this anyway. */
3449 if (op1
== CONST1_RTX (mode
))
3452 /* Convert multiply by constant power of two into shift. */
3453 if (GET_CODE (op1
) == CONST_INT
3454 && (val
= exact_log2 (INTVAL (op1
))) >= 0)
3455 return gen_rtx (ASHIFT
, mode
, op0
,
3456 gen_rtx (CONST_INT
, VOIDmode
, val
));
3458 if (GET_CODE (op1
) == CONST_DOUBLE
3459 && GET_MODE_CLASS (GET_MODE (op1
)) == MODE_FLOAT
)
3462 REAL_VALUE_FROM_CONST_DOUBLE (d
, op1
);
3464 /* x*2 is x+x and x*(-1) is -x */
3465 if (REAL_VALUES_EQUAL (d
, dconst2
)
3466 && GET_MODE (op0
) == mode
)
3467 return gen_rtx (PLUS
, mode
, op0
, copy_rtx (op0
));
3469 else if (REAL_VALUES_EQUAL (d
, dconstm1
)
3470 && GET_MODE (op0
) == mode
)
3471 return gen_rtx (NEG
, mode
, op0
);
3476 if (op1
== const0_rtx
)
3478 if (GET_CODE (op1
) == CONST_INT
3479 && (INTVAL (op1
) & GET_MODE_MASK (mode
)) == GET_MODE_MASK (mode
))
3481 if (rtx_equal_p (op0
, op1
) && ! side_effects_p (op0
))
3483 /* A | (~A) -> -1 */
3484 if (((GET_CODE (op0
) == NOT
&& rtx_equal_p (XEXP (op0
, 0), op1
))
3485 || (GET_CODE (op1
) == NOT
&& rtx_equal_p (XEXP (op1
, 0), op0
)))
3486 && ! side_effects_p (op0
))
3491 if (op1
== const0_rtx
)
3493 if (GET_CODE (op1
) == CONST_INT
3494 && (INTVAL (op1
) & GET_MODE_MASK (mode
)) == GET_MODE_MASK (mode
))
3495 return gen_rtx (NOT
, mode
, op0
);
3496 if (op0
== op1
&& ! side_effects_p (op0
))
3501 if (op1
== const0_rtx
&& ! side_effects_p (op0
))
3503 if (GET_CODE (op1
) == CONST_INT
3504 && (INTVAL (op1
) & GET_MODE_MASK (mode
)) == GET_MODE_MASK (mode
))
3506 if (op0
== op1
&& ! side_effects_p (op0
))
3509 if (((GET_CODE (op0
) == NOT
&& rtx_equal_p (XEXP (op0
, 0), op1
))
3510 || (GET_CODE (op1
) == NOT
&& rtx_equal_p (XEXP (op1
, 0), op0
)))
3511 && ! side_effects_p (op0
))
3516 /* Convert divide by power of two into shift (divide by 1 handled
3518 if (GET_CODE (op1
) == CONST_INT
3519 && (arg1
= exact_log2 (INTVAL (op1
))) > 0)
3520 return gen_rtx (LSHIFTRT
, mode
, op0
,
3521 gen_rtx (CONST_INT
, VOIDmode
, arg1
));
3523 /* ... fall through ... */
3526 if (op1
== CONST1_RTX (mode
))
3528 else if (op0
== CONST0_RTX (mode
)
3529 && ! side_effects_p (op1
))
3531 #if 0 /* Turned off till an expert says this is a safe thing to do. */
3532 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3533 /* Change division by a constant into multiplication. */
3534 else if (GET_CODE (op1
) == CONST_DOUBLE
3535 && GET_MODE_CLASS (GET_MODE (op1
)) == MODE_FLOAT
3536 && op1
!= CONST0_RTX (mode
))
3539 REAL_VALUE_FROM_CONST_DOUBLE (d
, op1
);
3540 if (REAL_VALUES_EQUAL (d
, dconst0
))
3542 #if defined (REAL_ARITHMETIC)
3543 REAL_ARITHMETIC (d
, RDIV_EXPR
, dconst1
, d
);
3544 return gen_rtx (MULT
, mode
, op0
,
3545 CONST_DOUBLE_FROM_REAL_VALUE (d
, mode
));
3547 return gen_rtx (MULT
, mode
, op0
,
3548 CONST_DOUBLE_FROM_REAL_VALUE (1./d
, mode
));
3556 /* Handle modulus by power of two (mod with 1 handled below). */
3557 if (GET_CODE (op1
) == CONST_INT
3558 && exact_log2 (INTVAL (op1
)) > 0)
3559 return gen_rtx (AND
, mode
, op0
,
3560 gen_rtx (CONST_INT
, VOIDmode
, INTVAL (op1
) - 1));
3562 /* ... fall through ... */
3565 if ((op0
== const0_rtx
|| op1
== const1_rtx
)
3566 && ! side_effects_p (op0
) && ! side_effects_p (op1
))
3572 /* Rotating ~0 always results in ~0. */
3573 if (GET_CODE (op0
) == CONST_INT
&& width
<= HOST_BITS_PER_INT
3574 && INTVAL (op0
) == GET_MODE_MASK (mode
)
3575 && ! side_effects_p (op1
))
3578 /* ... fall through ... */
3584 if (op1
== const0_rtx
)
3586 if (op0
== const0_rtx
&& ! side_effects_p (op1
))
3591 if (width
<= HOST_BITS_PER_INT
&& GET_CODE (op1
) == CONST_INT
3592 && INTVAL (op1
) == 1 << (width
-1)
3593 && ! side_effects_p (op0
))
3595 else if (rtx_equal_p (op0
, op1
) && ! side_effects_p (op0
))
3600 if (width
<= HOST_BITS_PER_INT
&& GET_CODE (op1
) == CONST_INT
3601 && INTVAL (op1
) == GET_MODE_MASK (mode
) >> 1
3602 && ! side_effects_p (op0
))
3604 else if (rtx_equal_p (op0
, op1
) && ! side_effects_p (op0
))
3609 if (op1
== const0_rtx
&& ! side_effects_p (op0
))
3611 else if (rtx_equal_p (op0
, op1
) && ! side_effects_p (op0
))
3616 if (op1
== constm1_rtx
&& ! side_effects_p (op0
))
3618 else if (rtx_equal_p (op0
, op1
) && ! side_effects_p (op0
))
3629 /* Get the integer argument values in two forms:
3630 zero-extended in ARG0, ARG1 and sign-extended in ARG0S, ARG1S. */
3632 arg0
= INTVAL (op0
);
3633 arg1
= INTVAL (op1
);
3635 if (width
< HOST_BITS_PER_INT
)
3637 arg0
&= (1 << width
) - 1;
3638 arg1
&= (1 << width
) - 1;
3641 if (arg0s
& (1 << (width
- 1)))
3642 arg0s
|= ((-1) << width
);
3645 if (arg1s
& (1 << (width
- 1)))
3646 arg1s
|= ((-1) << width
);
3654 /* Compute the value of the arithmetic. */
3659 val
= arg0s
+ arg1s
;
3663 val
= arg0s
- arg1s
;
3667 val
= arg0s
* arg1s
;
3673 val
= arg0s
/ arg1s
;
3679 val
= arg0s
% arg1s
;
3685 val
= (unsigned) arg0
/ arg1
;
3691 val
= (unsigned) arg0
% arg1
;
3707 /* If shift count is undefined, don't fold it; let the machine do
3708 what it wants. But truncate it if the machine will do that. */
3712 #ifdef SHIFT_COUNT_TRUNCATED
3713 arg1
&= (BITS_PER_WORD
- 1);
3719 val
= ((unsigned) arg0
) >> arg1
;
3727 #ifdef SHIFT_COUNT_TRUNCATED
3728 arg1
&= (BITS_PER_WORD
- 1);
3734 val
= ((unsigned) arg0
) << arg1
;
3741 #ifdef SHIFT_COUNT_TRUNCATED
3742 arg1
&= (BITS_PER_WORD
- 1);
3748 val
= arg0s
>> arg1
;
3756 val
= ((((unsigned) arg0
) << (width
- arg1
))
3757 | (((unsigned) arg0
) >> arg1
));
3765 val
= ((((unsigned) arg0
) << arg1
)
3766 | (((unsigned) arg0
) >> (width
- arg1
)));
3770 /* Do nothing here. */
3777 /* Clear the bits that don't belong in our mode, unless they and our sign
3778 bit are all one. So we get either a reasonable negative value or a
3779 reasonable unsigned value for this mode. */
3780 if (width
< HOST_BITS_PER_INT
3781 && ((val
& ((-1) << (width
- 1))) != ((-1) << (width
- 1))))
3782 val
&= (1 << width
) - 1;
3784 return gen_rtx (CONST_INT
, VOIDmode
, val
);
3787 /* Like simplify_binary_operation except used for relational operators.
3788 MODE is the mode of the operands, not that of the result. */
3791 simplify_relational_operation (code
, mode
, op0
, op1
)
3793 enum machine_mode mode
;
3796 register int arg0
, arg1
, arg0s
, arg1s
;
3798 int width
= GET_MODE_BITSIZE (mode
);
3800 /* If op0 is a compare, extract the comparison arguments from it. */
3801 if (GET_CODE (op0
) == COMPARE
&& op1
== const0_rtx
)
3802 op1
= XEXP (op0
, 1), op0
= XEXP (op0
, 0);
3804 if (GET_CODE (op0
) != CONST_INT
|| GET_CODE (op1
) != CONST_INT
3805 || width
> HOST_BITS_PER_INT
|| width
== 0)
3807 /* Even if we can't compute a constant result,
3808 there are some cases worth simplifying. */
3810 /* For non-IEEE floating-point, if the two operands are equal, we know
3812 if (rtx_equal_p (op0
, op1
)
3813 && (TARGET_FLOAT_FORMAT
!= IEEE_FLOAT_FORMAT
3814 || GET_MODE_CLASS (GET_MODE (op0
)) != MODE_FLOAT
))
3815 return (code
== EQ
|| code
== GE
|| code
== LE
|| code
== LEU
3816 || code
== GEU
) ? const_true_rtx
: const0_rtx
;
3817 else if (GET_CODE (op0
) == CONST_DOUBLE
3818 && GET_CODE (op1
) == CONST_DOUBLE
3819 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_FLOAT
)
3821 REAL_VALUE_TYPE d0
, d1
;
3824 int op0lt
, op1lt
, equal
;
3826 if (setjmp (handler
))
3829 set_float_handler (handler
);
3830 REAL_VALUE_FROM_CONST_DOUBLE (d0
, op0
);
3831 REAL_VALUE_FROM_CONST_DOUBLE (d1
, op1
);
3832 equal
= REAL_VALUES_EQUAL (d0
, d1
);
3833 op0lt
= REAL_VALUES_LESS (d0
, d1
);
3834 op1lt
= REAL_VALUES_LESS (d1
, d0
);
3835 set_float_handler (0);
3840 return equal
? const_true_rtx
: const0_rtx
;
3842 return !equal
? const_true_rtx
: const0_rtx
;
3844 return equal
|| op0lt
? const_true_rtx
: const0_rtx
;
3846 return op0lt
? const_true_rtx
: const0_rtx
;
3848 return equal
|| op1lt
? const_true_rtx
: const0_rtx
;
3850 return op1lt
? const_true_rtx
: const0_rtx
;
3859 /* We can't make this assumption due to #pragma weak */
3860 if (CONSTANT_P (op0
) && op1
== const0_rtx
)
3863 if (NONZERO_BASE_PLUS_P (op0
) && op1
== const0_rtx
3864 /* On some machines, the ap reg can be 0 sometimes. */
3865 && op0
!= arg_pointer_rtx
)
3872 /* We can't make this assumption due to #pragma weak */
3873 if (CONSTANT_P (op0
) && op1
== const0_rtx
)
3874 return const_true_rtx
;
3876 if (NONZERO_BASE_PLUS_P (op0
) && op1
== const0_rtx
3877 /* On some machines, the ap reg can be 0 sometimes. */
3878 && op0
!= arg_pointer_rtx
)
3879 return const_true_rtx
;
3883 /* Unsigned values are never negative, but we must be sure we are
3884 actually comparing a value, not a CC operand. */
3885 if (op1
== const0_rtx
3886 && GET_MODE_CLASS (mode
) == MODE_INT
)
3887 return const_true_rtx
;
3891 if (op1
== const0_rtx
3892 && GET_MODE_CLASS (mode
) == MODE_INT
)
3897 /* Unsigned values are never greater than the largest
3899 if (GET_CODE (op1
) == CONST_INT
3900 && INTVAL (op1
) == GET_MODE_MASK (mode
)
3901 && GET_MODE_CLASS (mode
) == MODE_INT
)
3902 return const_true_rtx
;
3906 if (GET_CODE (op1
) == CONST_INT
3907 && INTVAL (op1
) == GET_MODE_MASK (mode
)
3908 && GET_MODE_CLASS (mode
) == MODE_INT
)
3916 /* Get the integer argument values in two forms:
3917 zero-extended in ARG0, ARG1 and sign-extended in ARG0S, ARG1S. */
3919 arg0
= INTVAL (op0
);
3920 arg1
= INTVAL (op1
);
3922 if (width
< HOST_BITS_PER_INT
)
3924 arg0
&= (1 << width
) - 1;
3925 arg1
&= (1 << width
) - 1;
3928 if (arg0s
& (1 << (width
- 1)))
3929 arg0s
|= ((-1) << width
);
3932 if (arg1s
& (1 << (width
- 1)))
3933 arg1s
|= ((-1) << width
);
3941 /* Compute the value of the arithmetic. */
3946 val
= arg0
!= arg1
? STORE_FLAG_VALUE
: 0;
3950 val
= arg0
== arg1
? STORE_FLAG_VALUE
: 0;
3954 val
= arg0s
<= arg1s
? STORE_FLAG_VALUE
: 0;
3958 val
= arg0s
< arg1s
? STORE_FLAG_VALUE
: 0;
3962 val
= arg0s
>= arg1s
? STORE_FLAG_VALUE
: 0;
3966 val
= arg0s
> arg1s
? STORE_FLAG_VALUE
: 0;
3970 val
= ((unsigned) arg0
) <= ((unsigned) arg1
) ? STORE_FLAG_VALUE
: 0;
3974 val
= ((unsigned) arg0
) < ((unsigned) arg1
) ? STORE_FLAG_VALUE
: 0;
3978 val
= ((unsigned) arg0
) >= ((unsigned) arg1
) ? STORE_FLAG_VALUE
: 0;
3982 val
= ((unsigned) arg0
) > ((unsigned) arg1
) ? STORE_FLAG_VALUE
: 0;
3989 /* Clear the bits that don't belong in our mode, unless they and our sign
3990 bit are all one. So we get either a reasonable negative value or a
3991 reasonable unsigned value for this mode. */
3992 if (width
< HOST_BITS_PER_INT
3993 && ((val
& ((-1) << (width
- 1))) != ((-1) << (width
- 1))))
3994 val
&= (1 << width
) - 1;
3996 return gen_rtx (CONST_INT
, VOIDmode
, val
);
3999 /* Simplify CODE, an operation with result mode MODE and three operands,
4000 OP0, OP1, and OP2. OP0_MODE was the mode of OP0 before it became
4001 a constant. Return 0 if no simplifications is possible. */
4004 simplify_ternary_operation (code
, mode
, op0_mode
, op0
, op1
, op2
)
4006 enum machine_mode mode
, op0_mode
;
4009 int width
= GET_MODE_BITSIZE (mode
);
4011 /* VOIDmode means "infinite" precision. */
4013 width
= HOST_BITS_PER_INT
;
4019 if (GET_CODE (op0
) == CONST_INT
4020 && GET_CODE (op1
) == CONST_INT
4021 && GET_CODE (op2
) == CONST_INT
4022 && INTVAL (op1
) + INTVAL (op2
) <= GET_MODE_BITSIZE (op0_mode
)
4023 && width
<= HOST_BITS_PER_INT
)
4025 /* Extracting a bit-field from a constant */
4026 int val
= INTVAL (op0
);
4029 val
>>= (GET_MODE_BITSIZE (op0_mode
) - INTVAL (op2
) - INTVAL (op1
));
4031 val
>>= INTVAL (op2
);
4033 if (HOST_BITS_PER_INT
!= INTVAL (op1
))
4035 /* First zero-extend. */
4036 val
&= (1 << INTVAL (op1
)) - 1;
4037 /* If desired, propagate sign bit. */
4038 if (code
== SIGN_EXTRACT
&& (val
& (1 << (INTVAL (op1
) - 1))))
4039 val
|= ~ (1 << INTVAL (op1
));
4042 /* Clear the bits that don't belong in our mode,
4043 unless they and our sign bit are all one.
4044 So we get either a reasonable negative value or a reasonable
4045 unsigned value for this mode. */
4046 if (width
< HOST_BITS_PER_INT
4047 && ((val
& ((-1) << (width
- 1))) != ((-1) << (width
- 1))))
4048 val
&= (1 << width
) - 1;
4050 return gen_rtx (CONST_INT
, VOIDmode
, val
);
4055 if (GET_CODE (op0
) == CONST_INT
)
4056 return op0
!= const0_rtx
? op1
: op2
;
4066 /* If X is a nontrivial arithmetic operation on an argument
4067 for which a constant value can be determined, return
4068 the result of operating on that value, as a constant.
4069 Otherwise, return X, possibly with one or more operands
4070 modified by recursive calls to this function.
4072 If X is a register whose contents are known, we do NOT
4073 return those contents. This is because an instruction that
4074 uses a register is usually faster than one that uses a constant.
4076 INSN is the insn that we may be modifying. If it is 0, make a copy
4077 of X before modifying it. */
4084 register enum rtx_code code
;
4085 register enum machine_mode mode
;
4087 register int i
, val
;
4092 /* Folded equivalents of first two operands of X. */
4096 /* Constant equivalents of first three operands of X;
4097 0 when no such equivalent is known. */
4102 /* The mode of the first operand of X. We need this for sign and zero
4104 enum machine_mode mode_arg0
;
4109 mode
= GET_MODE (x
);
4110 code
= GET_CODE (x
);
4119 /* No use simplifying an EXPR_LIST
4120 since they are used only for lists of args
4121 in a function call's REG_EQUAL note. */
4127 return prev_insn_cc0
;
4131 /* If the next insn is a CODE_LABEL followed by a jump table,
4132 PC's value is a LABEL_REF pointing to that label. That
4133 lets us fold switch statements on the Vax. */
4134 if (insn
&& GET_CODE (insn
) == JUMP_INSN
)
4136 rtx next
= next_nonnote_insn (insn
);
4138 if (next
&& GET_CODE (next
) == CODE_LABEL
4139 && NEXT_INSN (next
) != 0
4140 && GET_CODE (NEXT_INSN (next
)) == JUMP_INSN
4141 && (GET_CODE (PATTERN (NEXT_INSN (next
))) == ADDR_VEC
4142 || GET_CODE (PATTERN (NEXT_INSN (next
))) == ADDR_DIFF_VEC
))
4143 return gen_rtx (LABEL_REF
, Pmode
, next
);
4148 /* If this is a single word of a multi-word value, see if we previously
4149 assigned a value to that word. */
4150 if (GET_MODE_SIZE (mode
) <= UNITS_PER_WORD
4151 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))) > UNITS_PER_WORD
4152 && (new = lookup_as_function (x
, CONST_INT
)) != 0)
4155 /* Fold SUBREG_REG. If it changed, see if we can simplify the SUBREG.
4156 We might be able to if the SUBREG is extracting a single word in an
4157 integral mode or extracting the low part. */
4159 folded_arg0
= fold_rtx (SUBREG_REG (x
), insn
);
4160 const_arg0
= equiv_constant (folded_arg0
);
4162 folded_arg0
= const_arg0
;
4164 if (folded_arg0
!= SUBREG_REG (x
))
4168 if (GET_MODE_CLASS (mode
) == MODE_INT
4169 && GET_MODE_SIZE (mode
) == UNITS_PER_WORD
4170 && GET_MODE (SUBREG_REG (x
)) != VOIDmode
)
4171 new = operand_subword (folded_arg0
, SUBREG_WORD (x
), 0,
4172 GET_MODE (SUBREG_REG (x
)));
4173 if (new == 0 && subreg_lowpart_p (x
))
4174 new = gen_lowpart_if_possible (mode
, folded_arg0
);
4182 /* If we have (NOT Y), see if Y is known to be (NOT Z).
4183 If so, (NOT Y) simplifies to Z. Similarly for NEG. */
4184 new = lookup_as_function (XEXP (x
, 0), code
);
4186 return fold_rtx (copy_rtx (XEXP (new, 0)), insn
);
4190 /* If we are not actually processing an insn, don't try to find the
4191 best address. Not only don't we care, but we could modify the
4192 MEM in an invalid way since we have no insn to validate against. */
4194 find_best_addr (insn
, &XEXP (x
, 0));
4197 /* Even if we don't fold in the insn itself,
4198 we can safely do so here, in hopes of getting a constant. */
4199 rtx addr
= fold_rtx (XEXP (x
, 0), 0);
4203 if (GET_CODE (addr
) == REG
4204 && REGNO_QTY_VALID_P (REGNO (addr
))
4205 && GET_MODE (addr
) == qty_mode
[reg_qty
[REGNO (addr
)]]
4206 && qty_const
[reg_qty
[REGNO (addr
)]] != 0)
4207 addr
= qty_const
[reg_qty
[REGNO (addr
)]];
4209 /* If address is constant, split it into a base and integer offset. */
4210 if (GET_CODE (addr
) == SYMBOL_REF
|| GET_CODE (addr
) == LABEL_REF
)
4212 else if (GET_CODE (addr
) == CONST
&& GET_CODE (XEXP (addr
, 0)) == PLUS
4213 && GET_CODE (XEXP (XEXP (addr
, 0), 1)) == CONST_INT
)
4215 base
= XEXP (XEXP (addr
, 0), 0);
4216 offset
= INTVAL (XEXP (XEXP (addr
, 0), 1));
4218 else if (GET_CODE (addr
) == LO_SUM
4219 && GET_CODE (XEXP (addr
, 1)) == SYMBOL_REF
)
4220 base
= XEXP (addr
, 1);
4222 /* If this is a constant pool reference, we can fold it into its
4223 constant to allow better value tracking. */
4224 if (base
&& GET_CODE (base
) == SYMBOL_REF
4225 && CONSTANT_POOL_ADDRESS_P (base
))
4227 rtx constant
= get_pool_constant (base
);
4228 enum machine_mode const_mode
= get_pool_mode (base
);
4231 if (CONSTANT_P (constant
) && GET_CODE (constant
) != CONST_INT
)
4232 constant_pool_entries_cost
= COST (constant
);
4234 /* If we are loading the full constant, we have an equivalence. */
4235 if (offset
== 0 && mode
== const_mode
)
4238 /* If this actually isn't a constant (wierd!), we can't do
4239 anything. Otherwise, handle the two most common cases:
4240 extracting a word from a multi-word constant, and extracting
4241 the low-order bits. Other cases don't seem common enough to
4243 if (! CONSTANT_P (constant
))
4246 if (GET_MODE_CLASS (mode
) == MODE_INT
4247 && GET_MODE_SIZE (mode
) == UNITS_PER_WORD
4248 && offset
% UNITS_PER_WORD
== 0
4249 && (new = operand_subword (constant
,
4250 offset
/ UNITS_PER_WORD
,
4251 0, const_mode
)) != 0)
4254 if (((BYTES_BIG_ENDIAN
4255 && offset
== GET_MODE_SIZE (GET_MODE (constant
)) - 1)
4256 || (! BYTES_BIG_ENDIAN
&& offset
== 0))
4257 && (new = gen_lowpart_if_possible (mode
, constant
)) != 0)
4261 /* If this is a reference to a label at a known position in a jump
4262 table, we also know its value. */
4263 if (base
&& GET_CODE (base
) == LABEL_REF
)
4265 rtx label
= XEXP (base
, 0);
4266 rtx table_insn
= NEXT_INSN (label
);
4268 if (table_insn
&& GET_CODE (table_insn
) == JUMP_INSN
4269 && GET_CODE (PATTERN (table_insn
)) == ADDR_VEC
)
4271 rtx table
= PATTERN (table_insn
);
4274 && (offset
/ GET_MODE_SIZE (GET_MODE (table
))
4275 < XVECLEN (table
, 0)))
4276 return XVECEXP (table
, 0,
4277 offset
/ GET_MODE_SIZE (GET_MODE (table
)));
4279 if (table_insn
&& GET_CODE (table_insn
) == JUMP_INSN
4280 && GET_CODE (PATTERN (table_insn
)) == ADDR_DIFF_VEC
)
4282 rtx table
= PATTERN (table_insn
);
4285 && (offset
/ GET_MODE_SIZE (GET_MODE (table
))
4286 < XVECLEN (table
, 1)))
4288 offset
/= GET_MODE_SIZE (GET_MODE (table
));
4289 new = gen_rtx (MINUS
, Pmode
, XVECEXP (table
, 1, offset
),
4292 if (GET_MODE (table
) != Pmode
)
4293 new = gen_rtx (TRUNCATE
, GET_MODE (table
), new);
4307 mode_arg0
= VOIDmode
;
4309 /* Try folding our operands.
4310 Then see which ones have constant values known. */
4312 fmt
= GET_RTX_FORMAT (code
);
4313 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
4316 rtx arg
= XEXP (x
, i
);
4317 rtx folded_arg
= arg
, const_arg
= 0;
4318 enum machine_mode mode_arg
= GET_MODE (arg
);
4319 rtx cheap_arg
, expensive_arg
;
4320 rtx replacements
[2];
4323 /* Most arguments are cheap, so handle them specially. */
4324 switch (GET_CODE (arg
))
4327 /* This is the same as calling equiv_constant; it is duplicated
4329 if (REGNO_QTY_VALID_P (REGNO (arg
))
4330 && qty_const
[reg_qty
[REGNO (arg
)]] != 0
4331 && GET_CODE (qty_const
[reg_qty
[REGNO (arg
)]]) != REG
4332 && GET_CODE (qty_const
[reg_qty
[REGNO (arg
)]]) != PLUS
)
4334 = gen_lowpart_if_possible (GET_MODE (arg
),
4335 qty_const
[reg_qty
[REGNO (arg
)]]);
4348 folded_arg
= prev_insn_cc0
;
4349 mode_arg
= prev_insn_cc0_mode
;
4350 const_arg
= equiv_constant (folded_arg
);
4355 folded_arg
= fold_rtx (arg
, insn
);
4356 const_arg
= equiv_constant (folded_arg
);
4359 /* For the first three operands, see if the operand
4360 is constant or equivalent to a constant. */
4364 folded_arg0
= folded_arg
;
4365 const_arg0
= const_arg
;
4366 mode_arg0
= mode_arg
;
4369 folded_arg1
= folded_arg
;
4370 const_arg1
= const_arg
;
4373 const_arg2
= const_arg
;
4377 /* Pick the least expensive of the folded argument and an
4378 equivalent constant argument. */
4379 if (const_arg
== 0 || const_arg
== folded_arg
4380 || COST (const_arg
) > COST (folded_arg
))
4381 cheap_arg
= folded_arg
, expensive_arg
= const_arg
;
4383 cheap_arg
= const_arg
, expensive_arg
= folded_arg
;
4385 /* Try to replace the operand with the cheapest of the two
4386 possibilities. If it doesn't work and this is either of the first
4387 two operands of a commutative operation, try swapping them.
4388 If THAT fails, try the more expensive, provided it is cheaper
4389 than what is already there. */
4391 if (cheap_arg
== XEXP (x
, i
))
4394 if (insn
== 0 && ! copied
)
4400 replacements
[0] = cheap_arg
, replacements
[1] = expensive_arg
;
4402 j
< 2 && replacements
[j
]
4403 && COST (replacements
[j
]) < COST (XEXP (x
, i
));
4406 if (validate_change (insn
, &XEXP (x
, i
), replacements
[j
], 0))
4409 if (code
== NE
|| code
== EQ
|| GET_RTX_CLASS (code
) == 'c')
4411 validate_change (insn
, &XEXP (x
, i
), XEXP (x
, 1 - i
), 1);
4412 validate_change (insn
, &XEXP (x
, 1 - i
), replacements
[j
], 1);
4414 if (apply_change_group ())
4416 /* Swap them back to be invalid so that this loop can
4417 continue and flag them to be swapped back later. */
4420 tem
= XEXP (x
, 0); XEXP (x
, 0) = XEXP (x
, 1);
4429 else if (fmt
[i
] == 'E')
4430 /* Don't try to fold inside of a vector of expressions.
4431 Doing nothing is harmless. */
4434 /* If a commutative operation, place a constant integer as the second
4435 operand unless the first operand is also a constant integer. Otherwise,
4436 place any constant second unless the first operand is also a constant. */
4438 if (code
== EQ
|| code
== NE
|| GET_RTX_CLASS (code
) == 'c')
4440 if (must_swap
|| (const_arg0
4442 || (GET_CODE (const_arg0
) == CONST_INT
4443 && GET_CODE (const_arg1
) != CONST_INT
))))
4445 register rtx tem
= XEXP (x
, 0);
4447 if (insn
== 0 && ! copied
)
4453 validate_change (insn
, &XEXP (x
, 0), XEXP (x
, 1), 1);
4454 validate_change (insn
, &XEXP (x
, 1), tem
, 1);
4455 if (apply_change_group ())
4457 tem
= const_arg0
, const_arg0
= const_arg1
, const_arg1
= tem
;
4458 tem
= folded_arg0
, folded_arg0
= folded_arg1
, folded_arg1
= tem
;
4463 /* If X is an arithmetic operation, see if we can simplify it. */
4465 switch (GET_RTX_CLASS (code
))
4468 new = simplify_unary_operation (code
, mode
,
4469 const_arg0
? const_arg0
: folded_arg0
,
4474 /* See what items are actually being compared and set FOLDED_ARG[01]
4475 to those values and CODE to the actual comparison code. If any are
4476 constant, set CONST_ARG0 and CONST_ARG1 appropriately. We needn't
4477 do anything if both operands are already known to be constant. */
4479 if (const_arg0
== 0 || const_arg1
== 0)
4481 struct table_elt
*p0
, *p1
;
4483 code
= find_comparison_args (code
, &folded_arg0
, &folded_arg1
);
4484 const_arg0
= equiv_constant (folded_arg0
);
4485 const_arg1
= equiv_constant (folded_arg1
);
4487 /* Get a mode from the values actually being compared, or from the
4488 old value of MODE_ARG0 if both are constants. If the resulting
4489 mode is VOIDmode or a MODE_CC mode, we don't know what kinds
4490 of things are being compared, so we can't do anything with this
4493 if (GET_MODE (folded_arg0
) != VOIDmode
4494 && GET_MODE_CLASS (GET_MODE (folded_arg0
)) != MODE_CC
)
4495 mode_arg0
= GET_MODE (folded_arg0
);
4497 else if (GET_MODE (folded_arg1
) != VOIDmode
4498 && GET_MODE_CLASS (GET_MODE (folded_arg1
)) != MODE_CC
)
4499 mode_arg0
= GET_MODE (folded_arg1
);
4501 if (mode_arg0
== VOIDmode
|| GET_MODE_CLASS (mode_arg0
) == MODE_CC
)
4504 /* If we do not now have two constants being compared, see if we
4505 can nevertheless deduce some things about the comparison. */
4506 if (const_arg0
== 0 || const_arg1
== 0)
4508 /* Is FOLDED_ARG0 frame-pointer plus a constant? Or non-explicit
4509 constant? These aren't zero, but we don't know their sign. */
4510 if (const_arg1
== const0_rtx
4511 && (NONZERO_BASE_PLUS_P (folded_arg0
)
4512 #if 0 /* Sad to say, on sysvr4, #pragma weak can make a symbol address
4514 || GET_CODE (folded_arg0
) == SYMBOL_REF
4516 || GET_CODE (folded_arg0
) == LABEL_REF
4517 || GET_CODE (folded_arg0
) == CONST
))
4521 else if (code
== NE
)
4522 return const_true_rtx
;
4525 /* See if the two operands are the same. We don't do this
4526 for IEEE floating-point since we can't assume x == x
4527 since x might be a NaN. */
4529 if ((TARGET_FLOAT_FORMAT
!= IEEE_FLOAT_FORMAT
4530 || GET_MODE_CLASS (mode_arg0
) != MODE_FLOAT
)
4531 && (folded_arg0
== folded_arg1
4532 || (GET_CODE (folded_arg0
) == REG
4533 && GET_CODE (folded_arg1
) == REG
4534 && (reg_qty
[REGNO (folded_arg0
)]
4535 == reg_qty
[REGNO (folded_arg1
)]))
4536 || ((p0
= lookup (folded_arg0
,
4537 (safe_hash (folded_arg0
, mode_arg0
)
4538 % NBUCKETS
), mode_arg0
))
4539 && (p1
= lookup (folded_arg1
,
4540 (safe_hash (folded_arg1
, mode_arg0
)
4541 % NBUCKETS
), mode_arg0
))
4542 && p0
->first_same_value
== p1
->first_same_value
)))
4543 return ((code
== EQ
|| code
== LE
|| code
== GE
4544 || code
== LEU
|| code
== GEU
)
4545 ? const_true_rtx
: const0_rtx
);
4547 /* If FOLDED_ARG0 is a register, see if the comparison we are
4548 doing now is either the same as we did before or the reverse
4549 (we only check the reverse if not floating-point). */
4550 else if (GET_CODE (folded_arg0
) == REG
)
4552 int qty
= reg_qty
[REGNO (folded_arg0
)];
4554 if (REGNO_QTY_VALID_P (REGNO (folded_arg0
))
4555 && (comparison_dominates_p (qty_comparison_code
[qty
], code
)
4556 || (comparison_dominates_p (qty_comparison_code
[qty
],
4557 reverse_condition (code
))
4558 && GET_MODE_CLASS (mode_arg0
) == MODE_INT
))
4559 && (rtx_equal_p (qty_comparison_const
[qty
], folded_arg1
)
4561 && rtx_equal_p (qty_comparison_const
[qty
],
4563 || (GET_CODE (folded_arg1
) == REG
4564 && (reg_qty
[REGNO (folded_arg1
)]
4565 == qty_comparison_qty
[qty
]))))
4566 return (comparison_dominates_p (qty_comparison_code
[qty
],
4568 ? const_true_rtx
: const0_rtx
);
4573 /* If we are comparing against zero, see if the first operand is
4574 equivalent to an IOR with a constant. If so, we may be able to
4575 determine the result of this comparison. */
4577 if (const_arg1
== const0_rtx
)
4579 rtx y
= lookup_as_function (folded_arg0
, IOR
);
4583 && (inner_const
= equiv_constant (XEXP (y
, 1))) != 0
4584 && GET_CODE (inner_const
) == CONST_INT
4585 && INTVAL (inner_const
) != 0)
4587 int sign_bitnum
= GET_MODE_BITSIZE (mode_arg0
) - 1;
4588 int has_sign
= (HOST_BITS_PER_INT
>= sign_bitnum
4589 && (INTVAL (inner_const
) & (1 << sign_bitnum
)));
4596 return const_true_rtx
;
4599 return const_true_rtx
;
4609 new = simplify_relational_operation (code
, mode_arg0
,
4610 const_arg0
? const_arg0
: folded_arg0
,
4611 const_arg1
? const_arg1
: folded_arg1
);
4619 /* If the second operand is a LABEL_REF, see if the first is a MINUS
4620 with that LABEL_REF as its second operand. If so, the result is
4621 the first operand of that MINUS. This handles switches with an
4622 ADDR_DIFF_VEC table. */
4623 if (const_arg1
&& GET_CODE (const_arg1
) == LABEL_REF
)
4625 rtx y
= lookup_as_function (folded_arg0
, MINUS
);
4627 if (y
!= 0 && GET_CODE (XEXP (y
, 1)) == LABEL_REF
4628 && XEXP (XEXP (y
, 1), 0) == XEXP (const_arg1
, 0))
4632 /* ... fall through ... */
4635 case SMIN
: case SMAX
: case UMIN
: case UMAX
:
4636 case IOR
: case AND
: case XOR
:
4637 case MULT
: case DIV
: case UDIV
:
4638 case ASHIFT
: case LSHIFTRT
: case ASHIFTRT
:
4639 /* If we have (<op> <reg> <const_int>) for an associative OP and REG
4640 is known to be of similar form, we may be able to replace the
4641 operation with a combined operation. This may eliminate the
4642 intermediate operation if every use is simplified in this way.
4643 Note that the similar optimization done by combine.c only works
4644 if the intermediate operation's result has only one reference. */
4646 if (GET_CODE (folded_arg0
) == REG
4647 && const_arg1
&& GET_CODE (const_arg1
) == CONST_INT
)
4650 = (code
== ASHIFT
|| code
== ASHIFTRT
|| code
== LSHIFTRT
);
4651 rtx y
= lookup_as_function (folded_arg0
, code
);
4653 enum rtx_code associate_code
;
4657 || 0 == (inner_const
4658 = equiv_constant (fold_rtx (XEXP (y
, 1), 0)))
4659 || GET_CODE (inner_const
) != CONST_INT
4660 /* If we have compiled a statement like
4661 "if (x == (x & mask1))", and now are looking at
4662 "x & mask2", we will have a case where the first operand
4663 of Y is the same as our first operand. Unless we detect
4664 this case, an infinite loop will result. */
4665 || XEXP (y
, 0) == folded_arg0
)
4668 /* Don't associate these operations if they are a PLUS with the
4669 same constant and it is a power of two. These might be doable
4670 with a pre- or post-increment. Similarly for two subtracts of
4671 identical powers of two with post decrement. */
4673 if (code
== PLUS
&& INTVAL (const_arg1
) == INTVAL (inner_const
)
4675 #if defined(HAVE_PRE_INCREMENT) || defined(HAVE_POST_INCREMENT)
4676 || exact_log2 (INTVAL (const_arg1
)) >= 0
4678 #if defined(HAVE_PRE_DECREMENT) || defined(HAVE_POST_DECREMENT)
4679 || exact_log2 (- INTVAL (const_arg1
)) >= 0
4684 /* Compute the code used to compose the constants. For example,
4685 A/C1/C2 is A/(C1 * C2), so if CODE == DIV, we want MULT. */
4688 = (code
== MULT
|| code
== DIV
|| code
== UDIV
? MULT
4689 : is_shift
|| code
== PLUS
|| code
== MINUS
? PLUS
: code
);
4691 new_const
= simplify_binary_operation (associate_code
, mode
,
4692 const_arg1
, inner_const
);
4697 /* If we are associating shift operations, don't let this
4698 produce a shift of larger than the object. This could
4699 occur when we following a sign-extend by a right shift on
4700 a machine that does a sign-extend as a pair of shifts. */
4702 if (is_shift
&& GET_CODE (new_const
) == CONST_INT
4703 && INTVAL (new_const
) > GET_MODE_BITSIZE (mode
))
4706 y
= copy_rtx (XEXP (y
, 0));
4708 /* If Y contains our first operand (the most common way this
4709 can happen is if Y is a MEM), we would do into an infinite
4710 loop if we tried to fold it. So don't in that case. */
4712 if (! reg_mentioned_p (folded_arg0
, y
))
4713 y
= fold_rtx (y
, insn
);
4715 new = simplify_binary_operation (code
, mode
, y
, new_const
);
4719 return gen_rtx (code
, mode
, y
, new_const
);
4723 new = simplify_binary_operation (code
, mode
,
4724 const_arg0
? const_arg0
: folded_arg0
,
4725 const_arg1
? const_arg1
: folded_arg1
);
4729 /* (lo_sum (high X) X) is simply X. */
4730 if (code
== LO_SUM
&& const_arg0
!= 0
4731 && GET_CODE (const_arg0
) == HIGH
4732 && rtx_equal_p (XEXP (const_arg0
, 0), const_arg1
))
4738 new = simplify_ternary_operation (code
, mode
, mode_arg0
,
4739 const_arg0
? const_arg0
: folded_arg0
,
4740 const_arg1
? const_arg1
: folded_arg1
,
4741 const_arg2
? const_arg2
: XEXP (x
, 2));
4745 return new ? new : x
;
4748 /* Return a constant value currently equivalent to X.
4749 Return 0 if we don't know one. */
4755 if (GET_CODE (x
) == REG
4756 && REGNO_QTY_VALID_P (REGNO (x
))
4757 && qty_const
[reg_qty
[REGNO (x
)]])
4758 x
= gen_lowpart_if_possible (GET_MODE (x
), qty_const
[reg_qty
[REGNO (x
)]]);
4760 if (x
!= 0 && CONSTANT_P (x
))
4766 /* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a fixed-point
4767 number, return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
4768 least-significant part of X.
4769 MODE specifies how big a part of X to return.
4771 If the requested operation cannot be done, 0 is returned.
4773 This is similar to gen_lowpart in emit-rtl.c. */
4776 gen_lowpart_if_possible (mode
, x
)
4777 enum machine_mode mode
;
4780 rtx result
= gen_lowpart_common (mode
, x
);
4784 else if (GET_CODE (x
) == MEM
)
4786 /* This is the only other case we handle. */
4787 register int offset
= 0;
4790 #if WORDS_BIG_ENDIAN
4791 offset
= (MAX (GET_MODE_SIZE (GET_MODE (x
)), UNITS_PER_WORD
)
4792 - MAX (GET_MODE_SIZE (mode
), UNITS_PER_WORD
));
4794 #if BYTES_BIG_ENDIAN
4795 /* Adjust the address so that the address-after-the-data
4797 offset
-= (MIN (UNITS_PER_WORD
, GET_MODE_SIZE (mode
))
4798 - MIN (UNITS_PER_WORD
, GET_MODE_SIZE (GET_MODE (x
))));
4800 new = gen_rtx (MEM
, mode
, plus_constant (XEXP (x
, 0), offset
));
4801 if (! memory_address_p (mode
, XEXP (new, 0)))
4803 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x
);
4804 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x
);
4805 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x
);
4812 /* Given INSN, a jump insn, TAKEN indicates if we are following the "taken"
4813 branch. It will be zero if not.
4815 In certain cases, this can cause us to add an equivalence. For example,
4816 if we are following the taken case of
4818 we can add the fact that `i' and '2' are now equivalent.
4820 In any case, we can record that this comparison was passed. If the same
4821 comparison is seen later, we will know its value. */
4824 record_jump_equiv (insn
, taken
)
4828 int cond_known_true
;
4830 enum machine_mode mode
;
4831 int reversed_nonequality
= 0;
4834 /* Ensure this is the right kind of insn. */
4835 if (! condjump_p (insn
) || simplejump_p (insn
))
4838 /* See if this jump condition is known true or false. */
4840 cond_known_true
= (XEXP (SET_SRC (PATTERN (insn
)), 2) == pc_rtx
);
4842 cond_known_true
= (XEXP (SET_SRC (PATTERN (insn
)), 1) == pc_rtx
);
4844 /* Get the type of comparison being done and the operands being compared.
4845 If we had to reverse a non-equality condition, record that fact so we
4846 know that it isn't valid for floating-point. */
4847 code
= GET_CODE (XEXP (SET_SRC (PATTERN (insn
)), 0));
4848 op0
= fold_rtx (XEXP (XEXP (SET_SRC (PATTERN (insn
)), 0), 0), insn
);
4849 op1
= fold_rtx (XEXP (XEXP (SET_SRC (PATTERN (insn
)), 0), 1), insn
);
4851 code
= find_comparison_args (code
, &op0
, &op1
);
4852 if (! cond_known_true
)
4854 reversed_nonequality
= (code
!= EQ
&& code
!= NE
);
4855 code
= reverse_condition (code
);
4858 /* The mode is the mode of the non-constant. */
4859 mode
= GET_MODE (op0
);
4860 if (mode
== VOIDmode
) mode
= GET_MODE (op1
);
4862 record_jump_cond (code
, mode
, op0
, op1
, reversed_nonequality
);
4865 /* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
4866 REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
4867 Make any useful entries we can with that information. Called from
4868 above function and called recursively. */
4871 record_jump_cond (code
, mode
, op0
, op1
, reversed_nonequality
)
4873 enum machine_mode mode
;
4875 int reversed_nonequality
;
4877 int op0_hash_code
, op1_hash_code
;
4878 int op0_in_memory
, op0_in_struct
, op1_in_memory
, op1_in_struct
;
4879 struct table_elt
*op0_elt
, *op1_elt
;
4881 /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
4882 we know that they are also equal in the smaller mode (this is also
4883 true for all smaller modes whether or not there is a SUBREG, but
4884 is not worth testing for with no SUBREG. */
4886 if (code
== EQ
&& GET_CODE (op0
) == SUBREG
4887 && GET_MODE_SIZE (mode
) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0
))))
4889 enum machine_mode inner_mode
= GET_MODE (SUBREG_REG (op0
));
4890 rtx tem
= gen_lowpart_if_possible (inner_mode
, op1
);
4892 record_jump_cond (code
, mode
, SUBREG_REG (op0
),
4893 tem
? tem
: gen_rtx (SUBREG
, inner_mode
, op1
, 0),
4894 reversed_nonequality
);
4897 if (code
== EQ
&& GET_CODE (op1
) == SUBREG
4898 && GET_MODE_SIZE (mode
) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1
))))
4900 enum machine_mode inner_mode
= GET_MODE (SUBREG_REG (op1
));
4901 rtx tem
= gen_lowpart_if_possible (inner_mode
, op0
);
4903 record_jump_cond (code
, mode
, SUBREG_REG (op1
),
4904 tem
? tem
: gen_rtx (SUBREG
, inner_mode
, op0
, 0),
4905 reversed_nonequality
);
4908 /* Similarly, if this is an NE comparison, and either is a SUBREG
4909 making a smaller mode, we know the whole thing is also NE. */
4911 if (code
== NE
&& GET_CODE (op0
) == SUBREG
4912 && subreg_lowpart_p (op0
)
4913 && GET_MODE_SIZE (mode
) < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0
))))
4915 enum machine_mode inner_mode
= GET_MODE (SUBREG_REG (op0
));
4916 rtx tem
= gen_lowpart_if_possible (inner_mode
, op1
);
4918 record_jump_cond (code
, mode
, SUBREG_REG (op0
),
4919 tem
? tem
: gen_rtx (SUBREG
, inner_mode
, op1
, 0),
4920 reversed_nonequality
);
4923 if (code
== NE
&& GET_CODE (op1
) == SUBREG
4924 && subreg_lowpart_p (op1
)
4925 && GET_MODE_SIZE (mode
) < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1
))))
4927 enum machine_mode inner_mode
= GET_MODE (SUBREG_REG (op1
));
4928 rtx tem
= gen_lowpart_if_possible (inner_mode
, op0
);
4930 record_jump_cond (code
, mode
, SUBREG_REG (op1
),
4931 tem
? tem
: gen_rtx (SUBREG
, inner_mode
, op0
, 0),
4932 reversed_nonequality
);
4935 /* Hash both operands. */
4938 hash_arg_in_memory
= 0;
4939 hash_arg_in_struct
= 0;
4940 op0_hash_code
= HASH (op0
, mode
);
4941 op0_in_memory
= hash_arg_in_memory
;
4942 op0_in_struct
= hash_arg_in_struct
;
4948 hash_arg_in_memory
= 0;
4949 hash_arg_in_struct
= 0;
4950 op1_hash_code
= HASH (op1
, mode
);
4951 op1_in_memory
= hash_arg_in_memory
;
4952 op1_in_struct
= hash_arg_in_struct
;
4957 /* Look up both operands. */
4958 op0_elt
= lookup (op0
, op0_hash_code
, mode
);
4959 op1_elt
= lookup (op1
, op1_hash_code
, mode
);
4961 /* If we aren't setting two things equal all we can do is save this
4965 /* If we reversed a floating-point comparison, if OP0 is not a
4966 register, or if OP1 is neither a register or constant, we can't
4969 if (GET_CODE (op1
) != REG
)
4970 op1
= equiv_constant (op1
);
4972 if ((reversed_nonequality
&& GET_MODE_CLASS (mode
) != MODE_INT
)
4973 || GET_CODE (op0
) != REG
|| op1
== 0)
4976 /* Put OP0 in the hash table if it isn't already. This gives it a
4977 new quantity number. */
4980 if (insert_regs (op0
, 0, 0))
4982 rehash_using_reg (op0
);
4983 op0_hash_code
= HASH (op0
, mode
);
4986 op0_elt
= insert (op0
, 0, op0_hash_code
, mode
);
4987 op0_elt
->in_memory
= op0_in_memory
;
4988 op0_elt
->in_struct
= op0_in_struct
;
4991 qty_comparison_code
[reg_qty
[REGNO (op0
)]] = code
;
4992 if (GET_CODE (op1
) == REG
)
4994 /* Put OP1 in the hash table so it gets a new quantity number. */
4997 if (insert_regs (op1
, 0, 0))
4999 rehash_using_reg (op1
);
5000 op1_hash_code
= HASH (op1
, mode
);
5003 op1_elt
= insert (op1
, 0, op1_hash_code
, mode
);
5004 op1_elt
->in_memory
= op1_in_memory
;
5005 op1_elt
->in_struct
= op1_in_struct
;
5008 qty_comparison_qty
[reg_qty
[REGNO (op0
)]] = reg_qty
[REGNO (op1
)];
5009 qty_comparison_const
[reg_qty
[REGNO (op0
)]] = 0;
5013 qty_comparison_qty
[reg_qty
[REGNO (op0
)]] = -1;
5014 qty_comparison_const
[reg_qty
[REGNO (op0
)]] = op1
;
5020 /* If both are equivalent, merge the two classes. Save this class for
5021 `cse_set_around_loop'. */
5022 if (op0_elt
&& op1_elt
)
5024 merge_equiv_classes (op0_elt
, op1_elt
);
5025 last_jump_equiv_class
= op0_elt
;
5028 /* For whichever side doesn't have an equivalence, make one. */
5031 if (insert_regs (op0
, op1_elt
, 0))
5033 rehash_using_reg (op0
);
5034 op0_hash_code
= HASH (op0
, mode
);
5037 op0_elt
= insert (op0
, op1_elt
, op0_hash_code
, mode
);
5038 op0_elt
->in_memory
= op0_in_memory
;
5039 op0_elt
->in_struct
= op0_in_struct
;
5040 last_jump_equiv_class
= op0_elt
;
5045 if (insert_regs (op1
, op0_elt
, 0))
5047 rehash_using_reg (op1
);
5048 op1_hash_code
= HASH (op1
, mode
);
5051 op1_elt
= insert (op1
, op0_elt
, op1_hash_code
, mode
);
5052 op1_elt
->in_memory
= op1_in_memory
;
5053 op1_elt
->in_struct
= op1_in_struct
;
5054 last_jump_equiv_class
= op1_elt
;
5058 /* CSE processing for one instruction.
5059 First simplify sources and addresses of all assignments
5060 in the instruction, using previously-computed equivalents values.
5061 Then install the new sources and destinations in the table
5062 of available values.
5064 If IN_LIBCALL_BLOCK is nonzero, don't record any equivalence made in
5067 /* Data on one SET contained in the instruction. */
5071 /* The SET rtx itself. */
5073 /* The SET_SRC of the rtx (the original value, if it is changing). */
5075 /* The hash-table element for the SET_SRC of the SET. */
5076 struct table_elt
*src_elt
;
5077 /* Hash code for the SET_SRC. */
5079 /* Hash code for the SET_DEST. */
5081 /* The SET_DEST, with SUBREG, etc., stripped. */
5083 /* Place where the pointer to the INNER_DEST was found. */
5084 rtx
*inner_dest_loc
;
5085 /* Nonzero if the SET_SRC is in memory. */
5087 /* Nonzero if the SET_SRC is in a structure. */
5089 /* Nonzero if the SET_SRC contains something
5090 whose value cannot be predicted and understood. */
5092 /* Original machine mode, in case it becomes a CONST_INT. */
5093 enum machine_mode mode
;
5094 /* A constant equivalent for SET_SRC, if any. */
5096 /* Hash code of constant equivalent for SET_SRC. */
5097 int src_const_hash_code
;
5098 /* Table entry for constant equivalent for SET_SRC, if any. */
5099 struct table_elt
*src_const_elt
;
5103 cse_insn (insn
, in_libcall_block
)
5105 int in_libcall_block
;
5107 register rtx x
= PATTERN (insn
);
5110 register int n_sets
= 0;
5112 /* Records what this insn does to set CC0. */
5113 rtx this_insn_cc0
= 0;
5114 enum machine_mode this_insn_cc0_mode
;
5115 struct write_data writes_memory
;
5116 static struct write_data init
= {0, 0, 0, 0};
5119 struct table_elt
*src_eqv_elt
= 0;
5120 int src_eqv_volatile
;
5121 int src_eqv_in_memory
;
5122 int src_eqv_in_struct
;
5123 int src_eqv_hash_code
;
5128 writes_memory
= init
;
5130 /* Find all the SETs and CLOBBERs in this instruction.
5131 Record all the SETs in the array `set' and count them.
5132 Also determine whether there is a CLOBBER that invalidates
5133 all memory references, or all references at varying addresses. */
5135 if (GET_CODE (x
) == SET
)
5137 sets
= (struct set
*) alloca (sizeof (struct set
));
5140 /* Ignore SETs that are unconditional jumps.
5141 They never need cse processing, so this does not hurt.
5142 The reason is not efficiency but rather
5143 so that we can test at the end for instructions
5144 that have been simplified to unconditional jumps
5145 and not be misled by unchanged instructions
5146 that were unconditional jumps to begin with. */
5147 if (SET_DEST (x
) == pc_rtx
5148 && GET_CODE (SET_SRC (x
)) == LABEL_REF
)
5151 /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
5152 The hard function value register is used only once, to copy to
5153 someplace else, so it isn't worth cse'ing (and on 80386 is unsafe)!
5154 Ensure we invalidate the destination register. On the 80386 no
5155 other code would invalidate it since it is a fixed_reg. */
5157 else if (GET_CODE (SET_SRC (x
)) == CALL
)
5159 canon_reg (SET_SRC (x
), insn
);
5160 fold_rtx (SET_SRC (x
), insn
);
5161 invalidate (SET_DEST (x
));
5166 else if (GET_CODE (x
) == PARALLEL
)
5168 register int lim
= XVECLEN (x
, 0);
5170 sets
= (struct set
*) alloca (lim
* sizeof (struct set
));
5172 /* Find all regs explicitly clobbered in this insn,
5173 and ensure they are not replaced with any other regs
5174 elsewhere in this insn.
5175 When a reg that is clobbered is also used for input,
5176 we should presume that that is for a reason,
5177 and we should not substitute some other register
5178 which is not supposed to be clobbered.
5179 Therefore, this loop cannot be merged into the one below
5180 because a CALL may preceed a CLOBBER and refer to the
5181 value clobbered. We must not let a canonicalization do
5182 anything in that case. */
5183 for (i
= 0; i
< lim
; i
++)
5185 register rtx y
= XVECEXP (x
, 0, i
);
5186 if (GET_CODE (y
) == CLOBBER
&& GET_CODE (XEXP (y
, 0)) == REG
)
5187 invalidate (XEXP (y
, 0));
5190 for (i
= 0; i
< lim
; i
++)
5192 register rtx y
= XVECEXP (x
, 0, i
);
5193 if (GET_CODE (y
) == SET
)
5195 /* As above, we ignore unconditional jumps and call-insns. */
5196 if (GET_CODE (SET_SRC (y
)) == CALL
)
5198 canon_reg (SET_SRC (y
), insn
);
5199 fold_rtx (SET_SRC (y
), insn
);
5200 invalidate (SET_DEST (y
));
5202 else if (SET_DEST (y
) == pc_rtx
5203 && GET_CODE (SET_SRC (y
)) == LABEL_REF
)
5206 sets
[n_sets
++].rtl
= y
;
5208 else if (GET_CODE (y
) == CLOBBER
)
5210 /* If we clobber memory, take note of that,
5211 and canon the address.
5212 This does nothing when a register is clobbered
5213 because we have already invalidated the reg. */
5214 if (GET_CODE (XEXP (y
, 0)) == MEM
)
5216 canon_reg (XEXP (y
, 0), 0);
5217 note_mem_written (XEXP (y
, 0), &writes_memory
);
5220 else if (GET_CODE (y
) == USE
5221 && ! (GET_CODE (XEXP (y
, 0)) == REG
5222 && REGNO (XEXP (y
, 0)) < FIRST_PSEUDO_REGISTER
))
5224 else if (GET_CODE (y
) == CALL
)
5226 canon_reg (y
, insn
);
5231 else if (GET_CODE (x
) == CLOBBER
)
5233 if (GET_CODE (XEXP (x
, 0)) == MEM
)
5235 canon_reg (XEXP (x
, 0), 0);
5236 note_mem_written (XEXP (x
, 0), &writes_memory
);
5240 /* Canonicalize a USE of a pseudo register or memory location. */
5241 else if (GET_CODE (x
) == USE
5242 && ! (GET_CODE (XEXP (x
, 0)) == REG
5243 && REGNO (XEXP (x
, 0)) < FIRST_PSEUDO_REGISTER
))
5244 canon_reg (XEXP (x
, 0), 0);
5245 else if (GET_CODE (x
) == CALL
)
5247 canon_reg (x
, insn
);
5251 if (n_sets
== 1 && REG_NOTES (insn
) != 0)
5253 /* Store the equivalent value in SRC_EQV, if different. */
5254 rtx tem
= find_reg_note (insn
, REG_EQUAL
, 0);
5256 if (tem
&& ! rtx_equal_p (XEXP (tem
, 0), SET_SRC (sets
[0].rtl
)))
5257 src_eqv
= canon_reg (XEXP (tem
, 0), 0);
5260 /* Canonicalize sources and addresses of destinations.
5261 We do this in a separate pass to avoid problems when a MATCH_DUP is
5262 present in the insn pattern. In that case, we want to ensure that
5263 we don't break the duplicate nature of the pattern. So we will replace
5264 both operands at the same time. Otherwise, we would fail to find an
5265 equivalent substitution in the loop calling validate_change below.
5266 (We also speed up that loop when a canonicalization was done since
5267 recog_memoized need not be called for just a canonicalization unless
5268 a pseudo register is being replaced by a hard reg of vice versa.)
5270 We used to suppress canonicalization of DEST if it appears in SRC,
5271 but we don't do this any more.
5273 ??? The way this code is written now, if we have a MATCH_DUP between
5274 two operands that are pseudos and we would want to canonicalize them
5275 to a hard register, we won't do that. The only time this would happen
5276 is if the hard reg was a fixed register, and this should be rare.
5278 ??? This won't work if there is a MATCH_DUP between an input and an
5279 output, but these never worked and must be declared invalid. */
5281 for (i
= 0; i
< n_sets
; i
++)
5283 rtx dest
= SET_DEST (sets
[i
].rtl
);
5284 rtx src
= SET_SRC (sets
[i
].rtl
);
5285 rtx
new = canon_reg (src
, insn
);
5287 if (GET_CODE (new) == REG
&& GET_CODE (src
) == REG
5288 && ((REGNO (new) < FIRST_PSEUDO_REGISTER
)
5289 != (REGNO (src
) < FIRST_PSEUDO_REGISTER
)))
5290 validate_change (insn
, &SET_SRC (sets
[i
].rtl
), new, 0);
5292 SET_SRC (sets
[i
].rtl
) = new;
5294 if (GET_CODE (dest
) == ZERO_EXTRACT
|| GET_CODE (dest
) == SIGN_EXTRACT
)
5296 validate_change (insn
, &XEXP (dest
, 1),
5297 canon_reg (XEXP (dest
, 1), insn
), 0);
5298 validate_change (insn
, &XEXP (dest
, 2),
5299 canon_reg (XEXP (dest
, 2), insn
), 0);
5302 while (GET_CODE (dest
) == SUBREG
|| GET_CODE (dest
) == STRICT_LOW_PART
5303 || GET_CODE (dest
) == ZERO_EXTRACT
5304 || GET_CODE (dest
) == SIGN_EXTRACT
)
5305 dest
= XEXP (dest
, 0);
5307 if (GET_CODE (dest
) == MEM
)
5308 canon_reg (dest
, insn
);
5311 /* Set sets[i].src_elt to the class each source belongs to.
5312 Detect assignments from or to volatile things
5313 and set set[i] to zero so they will be ignored
5314 in the rest of this function.
5316 Nothing in this loop changes the hash table or the register chains. */
5318 for (i
= 0; i
< n_sets
; i
++)
5320 register rtx src
, dest
;
5321 register rtx src_folded
;
5322 register struct table_elt
*elt
= 0, *p
;
5323 enum machine_mode mode
;
5326 rtx src_related
= 0;
5327 struct table_elt
*src_const_elt
= 0;
5328 int src_cost
= 10000, src_eqv_cost
= 10000, src_folded_cost
= 10000;
5329 int src_related_cost
= 10000, src_elt_cost
= 10000;
5330 /* Set non-zero if we need to call force_const_mem on with the
5331 contents of src_folded before using it. */
5332 int src_folded_force_flag
= 0;
5334 dest
= SET_DEST (sets
[i
].rtl
);
5335 src
= SET_SRC (sets
[i
].rtl
);
5337 /* If SRC is a constant that has no machine mode,
5338 hash it with the destination's machine mode.
5339 This way we can keep different modes separate. */
5341 mode
= GET_MODE (src
) == VOIDmode
? GET_MODE (dest
) : GET_MODE (src
);
5342 sets
[i
].mode
= mode
;
5346 enum machine_mode eqvmode
= mode
;
5347 if (GET_CODE (dest
) == STRICT_LOW_PART
)
5348 eqvmode
= GET_MODE (SUBREG_REG (XEXP (dest
, 0)));
5350 hash_arg_in_memory
= 0;
5351 hash_arg_in_struct
= 0;
5352 src_eqv
= fold_rtx (src_eqv
, insn
);
5353 src_eqv_hash_code
= HASH (src_eqv
, eqvmode
);
5355 /* Find the equivalence class for the equivalent expression. */
5358 src_eqv_elt
= lookup (src_eqv
, src_eqv_hash_code
, eqvmode
);
5360 src_eqv_volatile
= do_not_record
;
5361 src_eqv_in_memory
= hash_arg_in_memory
;
5362 src_eqv_in_struct
= hash_arg_in_struct
;
5365 /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
5366 value of the INNER register, not the destination. So it is not
5367 a legal substitution for the source. But save it for later. */
5368 if (GET_CODE (dest
) == STRICT_LOW_PART
)
5371 src_eqv_here
= src_eqv
;
5373 /* Simplify and foldable subexpressions in SRC. Then get the fully-
5374 simplified result, which may not necessarily be valid. */
5375 src_folded
= fold_rtx (src
, insn
);
5377 /* If storing a constant in a bitfield, pre-truncate the constant
5378 so we will be able to record it later. */
5379 if (GET_CODE (SET_DEST (sets
[i
].rtl
)) == ZERO_EXTRACT
5380 || GET_CODE (SET_DEST (sets
[i
].rtl
)) == SIGN_EXTRACT
)
5382 rtx width
= XEXP (SET_DEST (sets
[i
].rtl
), 1);
5384 if (GET_CODE (src
) == CONST_INT
5385 && GET_CODE (width
) == CONST_INT
5386 && INTVAL (width
) < HOST_BITS_PER_INT
5387 && (INTVAL (src
) & ((-1) << INTVAL (width
))))
5388 src_folded
= gen_rtx (CONST_INT
, VOIDmode
,
5389 INTVAL (src
) & ((1 << INTVAL (width
)) - 1));
5392 /* Compute SRC's hash code, and also notice if it
5393 should not be recorded at all. In that case,
5394 prevent any further processing of this assignment. */
5396 hash_arg_in_memory
= 0;
5397 hash_arg_in_struct
= 0;
5400 sets
[i
].src_hash_code
= HASH (src
, mode
);
5401 sets
[i
].src_volatile
= do_not_record
;
5402 sets
[i
].src_in_memory
= hash_arg_in_memory
;
5403 sets
[i
].src_in_struct
= hash_arg_in_struct
;
5405 /* If source is a perverse subreg (such as QI treated as an SI),
5406 treat it as volatile. It may do the work of an SI in one context
5407 where the extra bits are not being used, but cannot replace an SI
5409 if (GET_CODE (src
) == SUBREG
5410 && (GET_MODE_SIZE (GET_MODE (src
))
5411 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src
)))))
5412 sets
[i
].src_volatile
= 1;
5414 /* Locate all possible equivalent forms for SRC. Try to replace
5415 SRC in the insn with each cheaper equivalent.
5417 We have the following types of equivalents: SRC itself, a folded
5418 version, a value given in a REG_EQUAL note, or a value related
5421 Each of these equivalents may be part of an additional class
5422 of equivalents (if more than one is in the table, they must be in
5423 the same class; we check for this).
5425 If the source is volatile, we don't do any table lookups.
5427 We note any constant equivalent for possible later use in a
5430 if (!sets
[i
].src_volatile
)
5431 elt
= lookup (src
, sets
[i
].src_hash_code
, mode
);
5433 sets
[i
].src_elt
= elt
;
5435 if (elt
&& src_eqv_here
&& src_eqv_elt
)
5437 if (elt
->first_same_value
!= src_eqv_elt
->first_same_value
)
5439 /* The REG_EQUAL is indicating that two formerly distinct
5440 classes are now equivalent. So merge them. */
5441 merge_equiv_classes (elt
, src_eqv_elt
);
5442 src_eqv_hash_code
= HASH (src_eqv
, elt
->mode
);
5443 src_eqv_elt
= lookup (src_eqv
, src_eqv_hash_code
, elt
->mode
);
5449 else if (src_eqv_elt
)
5452 /* Try to find a constant somewhere and record it in `src_const'.
5453 Record its table element, if any, in `src_const_elt'. Look in
5454 any known equivalences first. (If the constant is not in the
5455 table, also set `sets[i].src_const_hash_code'). */
5457 for (p
= elt
->first_same_value
; p
; p
= p
->next_same_value
)
5461 src_const_elt
= elt
;
5466 && (CONSTANT_P (src_folded
)
5467 /* Consider (minus (label_ref L1) (label_ref L2)) as
5468 "constant" here so we will record it. This allows us
5469 to fold switch statements when an ADDR_DIFF_VEC is used. */
5470 || (GET_CODE (src_folded
) == MINUS
5471 && GET_CODE (XEXP (src_folded
, 0)) == LABEL_REF
5472 && GET_CODE (XEXP (src_folded
, 1)) == LABEL_REF
)))
5473 src_const
= src_folded
, src_const_elt
= elt
;
5474 else if (src_const
== 0 && src_eqv_here
&& CONSTANT_P (src_eqv_here
))
5475 src_const
= src_eqv_here
, src_const_elt
= src_eqv_elt
;
5477 /* If we don't know if the constant is in the table, get its
5478 hash code and look it up. */
5479 if (src_const
&& src_const_elt
== 0)
5481 sets
[i
].src_const_hash_code
= HASH (src_const
, mode
);
5482 src_const_elt
= lookup (src_const
, sets
[i
].src_const_hash_code
,
5486 sets
[i
].src_const
= src_const
;
5487 sets
[i
].src_const_elt
= src_const_elt
;
5489 /* If the constant and our source are both in the table, mark them as
5490 equivalent. Otherwise, if a constant is in the table but the source
5491 isn't, set ELT to it. */
5492 if (src_const_elt
&& elt
5493 && src_const_elt
->first_same_value
!= elt
->first_same_value
)
5494 merge_equiv_classes (elt
, src_const_elt
);
5495 else if (src_const_elt
&& elt
== 0)
5496 elt
= src_const_elt
;
5498 /* See if there is a register linearly related to a constant
5499 equivalent of SRC. */
5501 && (GET_CODE (src_const
) == CONST
5502 || (src_const_elt
&& src_const_elt
->related_value
!= 0)))
5504 src_related
= use_related_value (src_const
, src_const_elt
);
5507 struct table_elt
*src_related_elt
5508 = lookup (src_related
, HASH (src_related
, mode
), mode
);
5509 if (src_related_elt
&& elt
)
5511 if (elt
->first_same_value
5512 != src_related_elt
->first_same_value
)
5513 /* This can occur when we previously saw a CONST
5514 involving a SYMBOL_REF and then see the SYMBOL_REF
5515 twice. Merge the involved classes. */
5516 merge_equiv_classes (elt
, src_related_elt
);
5519 src_related_elt
= 0;
5521 else if (src_related_elt
&& elt
== 0)
5522 elt
= src_related_elt
;
5526 /* Another possibility is that we have an AND with a constant in
5527 a mode narrower than a word. If so, it might have been generated
5528 as part of an "if" which would narrow the AND. If we already
5529 have done the AND in a wider mode, we can use a SUBREG of that
5532 if (flag_expensive_optimizations
&& ! src_related
5533 && GET_CODE (src
) == AND
&& GET_CODE (XEXP (src
, 1)) == CONST_INT
5534 && GET_MODE_SIZE (mode
) < UNITS_PER_WORD
)
5536 enum machine_mode tmode
;
5537 rtx new_and
= gen_rtx (AND
, VOIDmode
, 0, XEXP (src
, 1));
5539 for (tmode
= GET_MODE_WIDER_MODE (mode
);
5540 GET_MODE_SIZE (tmode
) <= UNITS_PER_WORD
;
5541 tmode
= GET_MODE_WIDER_MODE (tmode
))
5543 rtx inner
= gen_lowpart_if_possible (tmode
, XEXP (src
, 0));
5544 struct table_elt
*larger_elt
;
5548 PUT_MODE (new_and
, tmode
);
5549 XEXP (new_and
, 0) = inner
;
5550 larger_elt
= lookup (new_and
, HASH (new_and
, tmode
), tmode
);
5551 if (larger_elt
== 0)
5554 for (larger_elt
= larger_elt
->first_same_value
;
5555 larger_elt
; larger_elt
= larger_elt
->next_same_value
)
5556 if (GET_CODE (larger_elt
->exp
) == REG
)
5559 = gen_lowpart_if_possible (mode
, larger_elt
->exp
);
5569 if (src
== src_folded
)
5572 /* At this point, ELT, if non-zero, points to a class of expressions
5573 equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
5574 and SRC_RELATED, if non-zero, each contain additional equivalent
5575 expressions. Prune these latter expressions by deleting expressions
5576 already in the equivalence class.
5578 Check for an equivalent identical to the destination. If found,
5579 this is the preferred equivalent since it will likely lead to
5580 elimination of the insn. Indicate this by placing it in
5583 if (elt
) elt
= elt
->first_same_value
;
5584 for (p
= elt
; p
; p
= p
->next_same_value
)
5586 enum rtx_code code
= GET_CODE (p
->exp
);
5588 /* If the expression is not valid, ignore it. Then we do not
5589 have to check for validity below. In most cases, we can use
5590 `rtx_equal_p', since canonicalization has already been done. */
5591 if (code
!= REG
&& ! exp_equiv_p (p
->exp
, p
->exp
, 1, 0))
5594 if (src
&& GET_CODE (src
) == code
&& rtx_equal_p (src
, p
->exp
))
5596 else if (src_folded
&& GET_CODE (src_folded
) == code
5597 && rtx_equal_p (src_folded
, p
->exp
))
5599 else if (src_eqv_here
&& GET_CODE (src_eqv_here
) == code
5600 && rtx_equal_p (src_eqv_here
, p
->exp
))
5602 else if (src_related
&& GET_CODE (src_related
) == code
5603 && rtx_equal_p (src_related
, p
->exp
))
5606 /* This is the same as the destination of the insns, we want
5607 to prefer it. Copy it to src_related. The code below will
5608 then give it a negative cost. */
5609 if (GET_CODE (dest
) == code
&& rtx_equal_p (p
->exp
, dest
))
5614 /* Find the cheapest valid equivalent, trying all the available
5615 possibilities. Prefer items not in the hash table to ones
5616 that are when they are equal cost. Note that we can never
5617 worsen an insn as the current contents will also succeed.
5618 If we find an equivalent identical to the source, use it as best,
5619 since this insn will probably be eliminated in that case. */
5622 if (rtx_equal_p (src
, dest
))
5625 src_cost
= COST (src
);
5630 if (rtx_equal_p (src_eqv_here
, dest
))
5633 src_eqv_cost
= COST (src_eqv_here
);
5638 if (rtx_equal_p (src_folded
, dest
))
5639 src_folded_cost
= -1;
5641 src_folded_cost
= COST (src_folded
);
5646 if (rtx_equal_p (src_related
, dest
))
5647 src_related_cost
= -1;
5649 src_related_cost
= COST (src_related
);
5652 /* If this was an indirect jump insn, a known label will really be
5653 cheaper even though it looks more expensive. */
5654 if (dest
== pc_rtx
&& src_const
&& GET_CODE (src_const
) == LABEL_REF
)
5655 src_folded
= src_const
, src_folded_cost
= -1;
5657 /* Terminate loop when replacement made. This must terminate since
5658 the current contents will be tested and will always be valid. */
5663 /* Skip invalid entries. */
5664 while (elt
&& GET_CODE (elt
->exp
) != REG
5665 && ! exp_equiv_p (elt
->exp
, elt
->exp
, 1, 0))
5666 elt
= elt
->next_same_value
;
5668 if (elt
) src_elt_cost
= elt
->cost
;
5670 /* Find cheapest and skip it for the next time. For items
5671 of equal cost, use this order:
5672 src_folded, src, src_eqv, src_related and hash table entry. */
5673 if (src_folded_cost
<= src_cost
5674 && src_folded_cost
<= src_eqv_cost
5675 && src_folded_cost
<= src_related_cost
5676 && src_folded_cost
<= src_elt_cost
)
5678 trial
= src_folded
, src_folded_cost
= 10000;
5679 if (src_folded_force_flag
)
5680 trial
= force_const_mem (mode
, trial
);
5682 else if (src_cost
<= src_eqv_cost
5683 && src_cost
<= src_related_cost
5684 && src_cost
<= src_elt_cost
)
5685 trial
= src
, src_cost
= 10000;
5686 else if (src_eqv_cost
<= src_related_cost
5687 && src_eqv_cost
<= src_elt_cost
)
5688 trial
= src_eqv_here
, src_eqv_cost
= 10000;
5689 else if (src_related_cost
<= src_elt_cost
)
5690 trial
= src_related
, src_related_cost
= 10000;
5693 trial
= canon_reg (copy_rtx (elt
->exp
), 0);
5694 elt
= elt
->next_same_value
;
5695 src_elt_cost
= 10000;
5698 /* We don't normally have an insn matching (set (pc) (pc)), so
5699 check for this separately here. We will delete such an
5702 Tablejump insns contain a USE of the table, so simply replacing
5703 the operand with the constant won't match. This is simply an
5704 unconditional branch, however, and is therefore valid. Just
5705 insert the substitution here and we will delete and re-emit
5708 if (n_sets
== 1 && dest
== pc_rtx
5710 || (GET_CODE (trial
) == LABEL_REF
5711 && ! condjump_p (insn
))))
5713 /* If TRIAL is a label in front of a jump table, we are
5714 really falling through the switch (this is how casesi
5715 insns work), so we must branch around the table. */
5716 if (GET_CODE (trial
) == CODE_LABEL
5717 && NEXT_INSN (trial
) != 0
5718 && GET_CODE (NEXT_INSN (trial
)) == JUMP_INSN
5719 && (GET_CODE (PATTERN (NEXT_INSN (trial
))) == ADDR_DIFF_VEC
5720 || GET_CODE (PATTERN (NEXT_INSN (trial
))) == ADDR_VEC
))
5722 trial
= gen_rtx (LABEL_REF
, Pmode
, get_label_after (trial
));
5724 SET_SRC (sets
[i
].rtl
) = trial
;
5728 /* Look for a substitution that makes a valid insn. */
5729 else if (validate_change (insn
, &SET_SRC (sets
[i
].rtl
), trial
, 0))
5732 /* If we previously found constant pool entries for
5733 constants and this is a constant, try making a
5734 pool entry. Put it in src_folded unless we already have done
5735 this since that is where it likely came from. */
5737 else if (constant_pool_entries_cost
5738 && CONSTANT_P (trial
)
5739 && (src_folded
== 0 || GET_CODE (src_folded
) != MEM
)
5740 && GET_MODE_CLASS (mode
) != MODE_CC
)
5742 src_folded_force_flag
= 1;
5744 src_folded_cost
= constant_pool_entries_cost
;
5748 src
= SET_SRC (sets
[i
].rtl
);
5750 /* In general, it is good to have a SET with SET_SRC == SET_DEST.
5751 However, there is an important exception: If both are registers
5752 that are not the head of their equivalence class, replace SET_SRC
5753 with the head of the class. If we do not do this, we will have
5754 both registers live over a portion of the basic block. This way,
5755 their lifetimes will likely abut instead of overlapping. */
5756 if (GET_CODE (dest
) == REG
5757 && REGNO_QTY_VALID_P (REGNO (dest
))
5758 && qty_mode
[reg_qty
[REGNO (dest
)]] == GET_MODE (dest
)
5759 && qty_first_reg
[reg_qty
[REGNO (dest
)]] != REGNO (dest
)
5760 && GET_CODE (src
) == REG
&& REGNO (src
) == REGNO (dest
)
5761 /* Don't do this if the original insn had a hard reg as
5763 && (GET_CODE (sets
[i
].src
) != REG
5764 || REGNO (sets
[i
].src
) >= FIRST_PSEUDO_REGISTER
))
5765 /* We can't call canon_reg here because it won't do anything if
5766 SRC is a hard register. */
5768 int first
= qty_first_reg
[reg_qty
[REGNO (src
)]];
5770 src
= SET_SRC (sets
[i
].rtl
)
5771 = first
>= FIRST_PSEUDO_REGISTER
? regno_reg_rtx
[first
]
5772 : gen_rtx (REG
, GET_MODE (src
), first
);
5774 /* If we had a constant that is cheaper than what we are now
5775 setting SRC to, use that constant. We ignored it when we
5776 thought we could make this into a no-op. */
5777 if (src_const
&& COST (src_const
) < COST (src
)
5778 && validate_change (insn
, &SET_SRC (sets
[i
].rtl
), src_const
, 0))
5782 /* If we made a change, recompute SRC values. */
5783 if (src
!= sets
[i
].src
)
5786 hash_arg_in_memory
= 0;
5787 hash_arg_in_struct
= 0;
5789 sets
[i
].src_hash_code
= HASH (src
, mode
);
5790 sets
[i
].src_volatile
= do_not_record
;
5791 sets
[i
].src_in_memory
= hash_arg_in_memory
;
5792 sets
[i
].src_in_struct
= hash_arg_in_struct
;
5793 sets
[i
].src_elt
= lookup (src
, sets
[i
].src_hash_code
, mode
);
5796 /* If this is a single SET, we are setting a register, and we have an
5797 equivalent constant, we want to add a REG_NOTE. We don't want
5798 to write a REG_EQUAL note for a constant pseudo since verifying that
5799 that pseudo hasn't been eliminated is a pain. Such a note also
5800 won't help anything. */
5801 if (n_sets
== 1 && src_const
&& GET_CODE (dest
) == REG
5802 && GET_CODE (src_const
) != REG
)
5804 rtx tem
= find_reg_note (insn
, REG_EQUAL
, 0);
5806 /* Record the actual constant value in a REG_EQUAL note, making
5807 a new one if one does not already exist. */
5809 XEXP (tem
, 0) = src_const
;
5811 REG_NOTES (insn
) = gen_rtx (EXPR_LIST
, REG_EQUAL
,
5812 src_const
, REG_NOTES (insn
));
5814 /* If storing a constant value in a register that
5815 previously held the constant value 0,
5816 record this fact with a REG_WAS_0 note on this insn.
5818 Note that the *register* is required to have previously held 0,
5819 not just any register in the quantity and we must point to the
5820 insn that set that register to zero.
5822 Rather than track each register individually, we just see if
5823 the last set for this quantity was for this register. */
5825 if (REGNO_QTY_VALID_P (REGNO (dest
))
5826 && qty_const
[reg_qty
[REGNO (dest
)]] == const0_rtx
)
5828 /* See if we previously had a REG_WAS_0 note. */
5829 rtx note
= find_reg_note (insn
, REG_WAS_0
, 0);
5830 rtx const_insn
= qty_const_insn
[reg_qty
[REGNO (dest
)]];
5832 if ((tem
= single_set (const_insn
)) != 0
5833 && rtx_equal_p (SET_DEST (tem
), dest
))
5836 XEXP (note
, 0) = const_insn
;
5838 REG_NOTES (insn
) = gen_rtx (INSN_LIST
, REG_WAS_0
,
5839 const_insn
, REG_NOTES (insn
));
5844 /* Now deal with the destination. */
5846 sets
[i
].inner_dest_loc
= &SET_DEST (sets
[0].rtl
);
5848 /* Look within any SIGN_EXTRACT or ZERO_EXTRACT
5849 to the MEM or REG within it. */
5850 while (GET_CODE (dest
) == SIGN_EXTRACT
5851 || GET_CODE (dest
) == ZERO_EXTRACT
5852 || GET_CODE (dest
) == SUBREG
5853 || GET_CODE (dest
) == STRICT_LOW_PART
)
5855 sets
[i
].inner_dest_loc
= &XEXP (dest
, 0);
5856 dest
= XEXP (dest
, 0);
5859 sets
[i
].inner_dest
= dest
;
5861 if (GET_CODE (dest
) == MEM
)
5863 dest
= fold_rtx (dest
, insn
);
5865 /* Decide whether we invalidate everything in memory,
5866 or just things at non-fixed places.
5867 Writing a large aggregate must invalidate everything
5868 because we don't know how long it is. */
5869 note_mem_written (dest
, &writes_memory
);
5872 /* Compute the hash code of the destination now,
5873 before the effects of this instruction are recorded,
5874 since the register values used in the address computation
5875 are those before this instruction. */
5876 sets
[i
].dest_hash_code
= HASH (dest
, mode
);
5878 /* Don't enter a bit-field in the hash table
5879 because the value in it after the store
5880 may not equal what was stored, due to truncation. */
5882 if (GET_CODE (SET_DEST (sets
[i
].rtl
)) == ZERO_EXTRACT
5883 || GET_CODE (SET_DEST (sets
[i
].rtl
)) == SIGN_EXTRACT
)
5885 rtx width
= XEXP (SET_DEST (sets
[i
].rtl
), 1);
5887 if (src_const
!= 0 && GET_CODE (src_const
) == CONST_INT
5888 && GET_CODE (width
) == CONST_INT
5889 && INTVAL (width
) < HOST_BITS_PER_INT
5890 && ! (INTVAL (src_const
) & ((-1) << INTVAL (width
))))
5891 /* Exception: if the value is constant,
5892 and it won't be truncated, record it. */
5896 /* This is chosen so that the destination will be invalidated
5897 but no new value will be recorded.
5898 We must invalidate because sometimes constant
5899 values can be recorded for bitfields. */
5900 sets
[i
].src_elt
= 0;
5901 sets
[i
].src_volatile
= 1;
5907 /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
5909 else if (n_sets
== 1 && dest
== pc_rtx
&& src
== pc_rtx
)
5911 PUT_CODE (insn
, NOTE
);
5912 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
5913 NOTE_SOURCE_FILE (insn
) = 0;
5914 cse_jumps_altered
= 1;
5915 /* One less use of the label this insn used to jump to. */
5916 --LABEL_NUSES (JUMP_LABEL (insn
));
5917 /* No more processing for this set. */
5921 /* If this SET is now setting PC to a label, we know it used to
5922 be a conditional or computed branch. So we see if we can follow
5923 it. If it was a computed branch, delete it and re-emit. */
5924 else if (dest
== pc_rtx
&& GET_CODE (src
) == LABEL_REF
)
5928 /* If this is not in the format for a simple branch and
5929 we are the only SET in it, re-emit it. */
5930 if (! simplejump_p (insn
) && n_sets
== 1)
5932 rtx
new = emit_jump_insn_before (gen_jump (XEXP (src
, 0)), insn
);
5933 JUMP_LABEL (new) = XEXP (src
, 0);
5934 LABEL_NUSES (XEXP (src
, 0))++;
5939 /* Now that we've converted this jump to an unconditional jump,
5940 there is dead code after it. Delete the dead code until we
5941 reach a BARRIER, the end of the function, or a label. Do
5942 not delete NOTEs except for NOTE_INSN_DELETED since later
5943 phases assume these notes are retained. */
5947 while (NEXT_INSN (p
) != 0
5948 && GET_CODE (NEXT_INSN (p
)) != BARRIER
5949 && GET_CODE (NEXT_INSN (p
)) != CODE_LABEL
)
5951 if (GET_CODE (NEXT_INSN (p
)) != NOTE
5952 || NOTE_LINE_NUMBER (NEXT_INSN (p
)) == NOTE_INSN_DELETED
)
5953 delete_insn (NEXT_INSN (p
));
5958 /* If we don't have a BARRIER immediately after INSN, put one there.
5959 Much code assumes that there are no NOTEs between a JUMP_INSN and
5962 if (NEXT_INSN (insn
) == 0
5963 || GET_CODE (NEXT_INSN (insn
)) != BARRIER
)
5964 emit_barrier_after (insn
);
5966 /* We might have two BARRIERs separated by notes. Delete the second
5969 if (p
!= insn
&& NEXT_INSN (p
) != 0
5970 && GET_CODE (NEXT_INSN (p
)) == BARRIER
)
5971 delete_insn (NEXT_INSN (p
));
5973 cse_jumps_altered
= 1;
5977 /* No further processing for this assignment if destination
5980 else if (do_not_record
)
5983 if (sets
[i
].rtl
!= 0 && dest
!= SET_DEST (sets
[i
].rtl
))
5984 sets
[i
].dest_hash_code
= HASH (SET_DEST (sets
[i
].rtl
), mode
);
5987 /* If setting CC0, record what it was set to, or a constant, if it
5988 is equivalent to a constant. If it is being set to a floating-point
5989 value, make a COMPARE with the appropriate constant of 0. If we
5990 don't do this, later code can interpret this as a test against
5991 const0_rtx, which can cause problems if we try to put it into an
5992 insn as a floating-point operand. */
5993 if (dest
== cc0_rtx
)
5995 this_insn_cc0
= src_const
&& mode
!= VOIDmode
? src_const
: src
;
5996 this_insn_cc0_mode
= mode
;
5997 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
)
5998 this_insn_cc0
= gen_rtx (COMPARE
, VOIDmode
, this_insn_cc0
,
6004 /* Now enter all non-volatile source expressions in the hash table
6005 if they are not already present.
6006 Record their equivalence classes in src_elt.
6007 This way we can insert the corresponding destinations into
6008 the same classes even if the actual sources are no longer in them
6009 (having been invalidated). */
6011 if (src_eqv
&& src_eqv_elt
== 0 && sets
[0].rtl
!= 0 && ! src_eqv_volatile
6012 && ! rtx_equal_p (src_eqv
, SET_DEST (sets
[0].rtl
)))
6014 register struct table_elt
*elt
;
6015 register struct table_elt
*classp
= sets
[0].src_elt
;
6016 rtx dest
= SET_DEST (sets
[0].rtl
);
6017 enum machine_mode eqvmode
= GET_MODE (dest
);
6019 if (GET_CODE (dest
) == STRICT_LOW_PART
)
6021 eqvmode
= GET_MODE (SUBREG_REG (XEXP (dest
, 0)));
6024 if (insert_regs (src_eqv
, classp
, 0))
6025 src_eqv_hash_code
= HASH (src_eqv
, eqvmode
);
6026 elt
= insert (src_eqv
, classp
, src_eqv_hash_code
, eqvmode
);
6027 elt
->in_memory
= src_eqv_in_memory
;
6028 elt
->in_struct
= src_eqv_in_struct
;
6032 for (i
= 0; i
< n_sets
; i
++)
6033 if (sets
[i
].rtl
&& ! sets
[i
].src_volatile
6034 && ! rtx_equal_p (SET_SRC (sets
[i
].rtl
), SET_DEST (sets
[i
].rtl
)))
6036 if (GET_CODE (SET_DEST (sets
[i
].rtl
)) == STRICT_LOW_PART
)
6038 /* REG_EQUAL in setting a STRICT_LOW_PART
6039 gives an equivalent for the entire destination register,
6040 not just for the subreg being stored in now.
6041 This is a more interesting equivalence, so we arrange later
6042 to treat the entire reg as the destination. */
6043 sets
[i
].src_elt
= src_eqv_elt
;
6044 sets
[i
].src_hash_code
= src_eqv_hash_code
;
6048 /* Insert source and constant equivalent into hash table, if not
6050 register struct table_elt
*classp
= src_eqv_elt
;
6051 register rtx src
= sets
[i
].src
;
6052 register rtx dest
= SET_DEST (sets
[i
].rtl
);
6053 enum machine_mode mode
6054 = GET_MODE (src
) == VOIDmode
? GET_MODE (dest
) : GET_MODE (src
);
6056 if (sets
[i
].src_elt
== 0)
6058 register struct table_elt
*elt
;
6060 /* Note that these insert_regs calls cannot remove
6061 any of the src_elt's, because they would have failed to
6062 match if not still valid. */
6063 if (insert_regs (src
, classp
, 0))
6064 sets
[i
].src_hash_code
= HASH (src
, mode
);
6065 elt
= insert (src
, classp
, sets
[i
].src_hash_code
, mode
);
6066 elt
->in_memory
= sets
[i
].src_in_memory
;
6067 elt
->in_struct
= sets
[i
].src_in_struct
;
6068 sets
[i
].src_elt
= classp
= elt
;
6071 if (sets
[i
].src_const
&& sets
[i
].src_const_elt
== 0
6072 && src
!= sets
[i
].src_const
6073 && ! rtx_equal_p (sets
[i
].src_const
, src
))
6074 sets
[i
].src_elt
= insert (sets
[i
].src_const
, classp
,
6075 sets
[i
].src_const_hash_code
, mode
);
6078 else if (sets
[i
].src_elt
== 0)
6079 /* If we did not insert the source into the hash table (e.g., it was
6080 volatile), note the equivalence class for the REG_EQUAL value, if any,
6081 so that the destination goes into that class. */
6082 sets
[i
].src_elt
= src_eqv_elt
;
6084 invalidate_from_clobbers (&writes_memory
, x
);
6085 /* Memory, and some registers, are invalidate by subroutine calls. */
6086 if (GET_CODE (insn
) == CALL_INSN
)
6088 static struct write_data everything
= {0, 1, 1, 1};
6089 invalidate_memory (&everything
);
6090 invalidate_for_call ();
6093 /* Now invalidate everything set by this instruction.
6094 If a SUBREG or other funny destination is being set,
6095 sets[i].rtl is still nonzero, so here we invalidate the reg
6096 a part of which is being set. */
6098 for (i
= 0; i
< n_sets
; i
++)
6101 register rtx dest
= sets
[i
].inner_dest
;
6103 /* Needed for registers to remove the register from its
6104 previous quantity's chain.
6105 Needed for memory if this is a nonvarying address, unless
6106 we have just done an invalidate_memory that covers even those. */
6107 if (GET_CODE (dest
) == REG
|| GET_CODE (dest
) == SUBREG
6108 || (! writes_memory
.all
&& ! cse_rtx_addr_varies_p (dest
)))
6112 /* Make sure registers mentioned in destinations
6113 are safe for use in an expression to be inserted.
6114 This removes from the hash table
6115 any invalid entry that refers to one of these registers.
6117 We don't care about the return value from mention_regs because
6118 we are going to hash the SET_DEST values unconditionally. */
6120 for (i
= 0; i
< n_sets
; i
++)
6121 if (sets
[i
].rtl
&& GET_CODE (SET_DEST (sets
[i
].rtl
)) != REG
)
6122 mention_regs (SET_DEST (sets
[i
].rtl
));
6124 /* We may have just removed some of the src_elt's from the hash table.
6125 So replace each one with the current head of the same class. */
6127 for (i
= 0; i
< n_sets
; i
++)
6130 if (sets
[i
].src_elt
&& sets
[i
].src_elt
->first_same_value
== 0)
6131 /* If elt was removed, find current head of same class,
6132 or 0 if nothing remains of that class. */
6134 register struct table_elt
*elt
= sets
[i
].src_elt
;
6136 while (elt
&& elt
->prev_same_value
)
6137 elt
= elt
->prev_same_value
;
6139 while (elt
&& elt
->first_same_value
== 0)
6140 elt
= elt
->next_same_value
;
6141 sets
[i
].src_elt
= elt
? elt
->first_same_value
: 0;
6145 /* Now insert the destinations into their equivalence classes. */
6147 for (i
= 0; i
< n_sets
; i
++)
6150 register rtx dest
= SET_DEST (sets
[i
].rtl
);
6151 register struct table_elt
*elt
;
6153 /* Don't record value if we are not supposed to risk allocating
6154 floating-point values in registers that might be wider than
6156 if ((flag_float_store
6157 && GET_CODE (dest
) == MEM
6158 && GET_MODE_CLASS (GET_MODE (dest
)) == MODE_FLOAT
)
6159 /* Don't record values of destinations set inside a libcall block
6160 since we might delete the libcall. Things should have been set
6161 up so we won't want to reuse such a value, but we play it safe
6164 /* If we didn't put a REG_EQUAL value or a source into the hash
6165 table, there is no point is recording DEST. */
6166 || sets
[i
].src_elt
== 0)
6169 /* STRICT_LOW_PART isn't part of the value BEING set,
6170 and neither is the SUBREG inside it.
6171 Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT. */
6172 if (GET_CODE (dest
) == STRICT_LOW_PART
)
6173 dest
= SUBREG_REG (XEXP (dest
, 0));
6175 if (GET_CODE (dest
) == REG
)
6176 /* Registers must also be inserted into chains for quantities. */
6177 if (insert_regs (dest
, sets
[i
].src_elt
, 1))
6178 /* If `insert_regs' changes something, the hash code must be
6180 sets
[i
].dest_hash_code
= HASH (dest
, GET_MODE (dest
));
6182 elt
= insert (dest
, sets
[i
].src_elt
,
6183 sets
[i
].dest_hash_code
, GET_MODE (dest
));
6184 elt
->in_memory
= GET_CODE (sets
[i
].inner_dest
) == MEM
;
6187 /* This implicitly assumes a whole struct
6188 need not have MEM_IN_STRUCT_P.
6189 But a whole struct is *supposed* to have MEM_IN_STRUCT_P. */
6190 elt
->in_struct
= (MEM_IN_STRUCT_P (sets
[i
].inner_dest
)
6191 || sets
[i
].inner_dest
!= SET_DEST (sets
[i
].rtl
));
6194 /* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is wider
6195 than M2, and both M1 and M2 are a single word, we are also doing
6196 (set (reg:m2 foo) (subreg:m2 (bar:m1 0))) so make that equivalence
6199 However, BAR may have equivalences for which gen_lowpart_if_possible
6200 will produce a simpler value than gen_lowpart_if_possible applied to
6201 BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
6202 BAR's equivalences. If we don't get a simplified form, make
6203 the SUBREG. It will not be used in an equivalence, but will
6204 cause two similar assignments to be detected.
6206 Note the loop below will find SUBREG_REG (DEST) since we have
6207 already entered SRC and DEST of the SET in the table. */
6209 if (GET_CODE (dest
) == SUBREG
6210 && GET_MODE_SIZE (GET_MODE (dest
)) <= UNITS_PER_WORD
6211 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest
))) <= UNITS_PER_WORD
6212 && (GET_MODE_SIZE (GET_MODE (dest
))
6213 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest
))))
6214 && sets
[i
].src_elt
!= 0)
6216 enum machine_mode new_mode
= GET_MODE (SUBREG_REG (dest
));
6217 struct table_elt
*elt
, *classp
= 0;
6219 for (elt
= sets
[i
].src_elt
->first_same_value
; elt
;
6220 elt
= elt
->next_same_value
)
6224 struct table_elt
*src_elt
;
6226 /* Ignore invalid entries. */
6227 if (GET_CODE (elt
->exp
) != REG
6228 && ! exp_equiv_p (elt
->exp
, elt
->exp
, 1, 0))
6231 new_src
= gen_lowpart_if_possible (new_mode
, elt
->exp
);
6233 new_src
= gen_rtx (SUBREG
, new_mode
, elt
->exp
, 0);
6235 src_hash
= HASH (new_src
, new_mode
);
6236 src_elt
= lookup (new_src
, src_hash
, new_mode
);
6238 /* Put the new source in the hash table is if isn't
6242 if (insert_regs (new_src
, classp
, 0))
6243 src_hash
= HASH (new_src
, new_mode
);
6244 src_elt
= insert (new_src
, classp
, src_hash
, new_mode
);
6245 src_elt
->in_memory
= elt
->in_memory
;
6246 src_elt
->in_struct
= elt
->in_struct
;
6248 else if (classp
&& classp
!= src_elt
->first_same_value
)
6249 /* Show that two things that we've seen before are
6250 actually the same. */
6251 merge_equiv_classes (src_elt
, classp
);
6253 classp
= src_elt
->first_same_value
;
6258 /* Special handling for (set REG0 REG1)
6259 where REG0 is the "cheapest", cheaper than REG1.
6260 After cse, REG1 will probably not be used in the sequel,
6261 so (if easily done) change this insn to (set REG1 REG0) and
6262 replace REG1 with REG0 in the previous insn that computed their value.
6263 Then REG1 will become a dead store and won't cloud the situation
6264 for later optimizations.
6266 Do not make this change if REG1 is a hard register, because it will
6267 then be used in the sequel and we may be changing a two-operand insn
6268 into a three-operand insn.
6270 Also do not do this if we are operating on a copy of INSN. */
6272 if (n_sets
== 1 && sets
[0].rtl
&& GET_CODE (SET_DEST (sets
[0].rtl
)) == REG
6273 && NEXT_INSN (PREV_INSN (insn
)) == insn
6274 && GET_CODE (SET_SRC (sets
[0].rtl
)) == REG
6275 && REGNO (SET_SRC (sets
[0].rtl
)) >= FIRST_PSEUDO_REGISTER
6276 && REGNO_QTY_VALID_P (REGNO (SET_SRC (sets
[0].rtl
)))
6277 && (qty_first_reg
[reg_qty
[REGNO (SET_SRC (sets
[0].rtl
))]]
6278 == REGNO (SET_DEST (sets
[0].rtl
))))
6280 rtx prev
= PREV_INSN (insn
);
6281 while (prev
&& GET_CODE (prev
) == NOTE
)
6282 prev
= PREV_INSN (prev
);
6284 if (prev
&& GET_CODE (prev
) == INSN
&& GET_CODE (PATTERN (prev
)) == SET
6285 && SET_DEST (PATTERN (prev
)) == SET_SRC (sets
[0].rtl
))
6287 rtx dest
= SET_DEST (sets
[0].rtl
);
6288 rtx note
= find_reg_note (prev
, REG_EQUIV
, 0);
6290 validate_change (prev
, & SET_DEST (PATTERN (prev
)), dest
, 1);
6291 validate_change (insn
, & SET_DEST (sets
[0].rtl
),
6292 SET_SRC (sets
[0].rtl
), 1);
6293 validate_change (insn
, & SET_SRC (sets
[0].rtl
), dest
, 1);
6294 apply_change_group ();
6296 /* If REG1 was equivalent to a constant, REG0 is not. */
6298 PUT_REG_NOTE_KIND (note
, REG_EQUAL
);
6300 /* If there was a REG_WAS_0 note on PREV, remove it. Move
6301 any REG_WAS_0 note on INSN to PREV. */
6302 note
= find_reg_note (prev
, REG_WAS_0
, 0);
6304 remove_note (prev
, note
);
6306 note
= find_reg_note (insn
, REG_WAS_0
, 0);
6309 remove_note (insn
, note
);
6310 XEXP (note
, 1) = REG_NOTES (prev
);
6311 REG_NOTES (prev
) = note
;
6316 /* If this is a conditional jump insn, record any known equivalences due to
6317 the condition being tested. */
6319 last_jump_equiv_class
= 0;
6320 if (GET_CODE (insn
) == JUMP_INSN
6321 && n_sets
== 1 && GET_CODE (x
) == SET
6322 && GET_CODE (SET_SRC (x
)) == IF_THEN_ELSE
)
6323 record_jump_equiv (insn
, 0);
6326 /* If the previous insn set CC0 and this insn no longer references CC0,
6327 delete the previous insn. Here we use the fact that nothing expects CC0
6328 to be valid over an insn, which is true until the final pass. */
6329 if (prev_insn
&& GET_CODE (prev_insn
) == INSN
6330 && (tem
= single_set (prev_insn
)) != 0
6331 && SET_DEST (tem
) == cc0_rtx
6332 && ! reg_mentioned_p (cc0_rtx
, x
))
6334 PUT_CODE (prev_insn
, NOTE
);
6335 NOTE_LINE_NUMBER (prev_insn
) = NOTE_INSN_DELETED
;
6336 NOTE_SOURCE_FILE (prev_insn
) = 0;
6339 prev_insn_cc0
= this_insn_cc0
;
6340 prev_insn_cc0_mode
= this_insn_cc0_mode
;
6346 /* Store 1 in *WRITES_PTR for those categories of memory ref
6347 that must be invalidated when the expression WRITTEN is stored in.
6348 If WRITTEN is null, say everything must be invalidated. */
6351 note_mem_written (written
, writes_ptr
)
6353 struct write_data
*writes_ptr
;
6355 static struct write_data everything
= {0, 1, 1, 1};
6358 *writes_ptr
= everything
;
6359 else if (GET_CODE (written
) == MEM
)
6361 /* Pushing or popping the stack invalidates just the stack pointer. */
6362 rtx addr
= XEXP (written
, 0);
6363 if ((GET_CODE (addr
) == PRE_DEC
|| GET_CODE (addr
) == PRE_INC
6364 || GET_CODE (addr
) == POST_DEC
|| GET_CODE (addr
) == POST_INC
)
6365 && GET_CODE (XEXP (addr
, 0)) == REG
6366 && REGNO (XEXP (addr
, 0)) == STACK_POINTER_REGNUM
)
6371 else if (GET_MODE (written
) == BLKmode
)
6372 *writes_ptr
= everything
;
6373 else if (cse_rtx_addr_varies_p (written
))
6375 /* A varying address that is a sum indicates an array element,
6376 and that's just as good as a structure element
6377 in implying that we need not invalidate scalar variables. */
6378 if (!(MEM_IN_STRUCT_P (written
)
6379 || GET_CODE (XEXP (written
, 0)) == PLUS
))
6380 writes_ptr
->all
= 1;
6381 writes_ptr
->nonscalar
= 1;
6383 writes_ptr
->var
= 1;
6387 /* Perform invalidation on the basis of everything about an insn
6388 except for invalidating the actual places that are SET in it.
6389 This includes the places CLOBBERed, and anything that might
6390 alias with something that is SET or CLOBBERed.
6392 W points to the writes_memory for this insn, a struct write_data
6393 saying which kinds of memory references must be invalidated.
6394 X is the pattern of the insn. */
6397 invalidate_from_clobbers (w
, x
)
6398 struct write_data
*w
;
6401 /* If W->var is not set, W specifies no action.
6402 If W->all is set, this step gets all memory refs
6403 so they can be ignored in the rest of this function. */
6405 invalidate_memory (w
);
6409 if (reg_tick
[STACK_POINTER_REGNUM
] >= 0)
6410 reg_tick
[STACK_POINTER_REGNUM
]++;
6412 /* This should be *very* rare. */
6413 if (TEST_HARD_REG_BIT (hard_regs_in_table
, STACK_POINTER_REGNUM
))
6414 invalidate (stack_pointer_rtx
);
6417 if (GET_CODE (x
) == CLOBBER
)
6419 rtx ref
= XEXP (x
, 0);
6421 && (GET_CODE (ref
) == REG
|| GET_CODE (ref
) == SUBREG
6422 || (GET_CODE (ref
) == MEM
&& ! w
->all
)))
6425 else if (GET_CODE (x
) == PARALLEL
)
6428 for (i
= XVECLEN (x
, 0) - 1; i
>= 0; i
--)
6430 register rtx y
= XVECEXP (x
, 0, i
);
6431 if (GET_CODE (y
) == CLOBBER
)
6433 rtx ref
= XEXP (y
, 0);
6435 &&(GET_CODE (ref
) == REG
|| GET_CODE (ref
) == SUBREG
6436 || (GET_CODE (ref
) == MEM
&& !w
->all
)))
6443 /* Process X, part of the REG_NOTES of an insn. Look at any REG_EQUAL notes
6444 and replace any registers in them with either an equivalent constant
6445 or the canonical form of the register. If we are inside an address,
6446 only do this if the address remains valid.
6448 OBJECT is 0 except when within a MEM in which case it is the MEM.
6450 Return the replacement for X. */
6453 cse_process_notes (x
, object
)
6457 enum rtx_code code
= GET_CODE (x
);
6458 char *fmt
= GET_RTX_FORMAT (code
);
6475 XEXP (x
, 0) = cse_process_notes (XEXP (x
, 0), x
);
6480 if (REG_NOTE_KIND (x
) == REG_EQUAL
)
6481 XEXP (x
, 0) = cse_process_notes (XEXP (x
, 0), 0);
6483 XEXP (x
, 1) = cse_process_notes (XEXP (x
, 1), 0);
6487 i
= reg_qty
[REGNO (x
)];
6489 /* Return a constant or a constant register. */
6490 if (REGNO_QTY_VALID_P (REGNO (x
))
6491 && qty_const
[i
] != 0
6492 && (CONSTANT_P (qty_const
[i
])
6493 || GET_CODE (qty_const
[i
]) == REG
))
6495 rtx
new = gen_lowpart_if_possible (GET_MODE (x
), qty_const
[i
]);
6500 /* Otherwise, canonicalize this register. */
6501 return canon_reg (x
, 0);
6504 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++)
6506 validate_change (object
, &XEXP (x
, i
),
6507 cse_process_notes (XEXP (x
, i
), object
), 0);
6512 /* Find common subexpressions between the end test of a loop and the beginning
6513 of the loop. LOOP_START is the CODE_LABEL at the start of a loop.
6515 Often we have a loop where an expression in the exit test is used
6516 in the body of the loop. For example "while (*p) *q++ = *p++;".
6517 Because of the way we duplicate the loop exit test in front of the loop,
6518 however, we don't detect that common subexpression. This will be caught
6519 when global cse is implemented, but this is a quite common case.
6521 This function handles the most common cases of these common expressions.
6522 It is called after we have processed the basic block ending with the
6523 NOTE_INSN_LOOP_END note that ends a loop and the previous JUMP_INSN
6524 jumps to a label used only once. */
6527 cse_around_loop (loop_start
)
6532 struct table_elt
*p
;
6534 /* If the jump at the end of the loop doesn't go to the start, we don't
6536 for (insn
= PREV_INSN (loop_start
);
6537 insn
&& (GET_CODE (insn
) == NOTE
&& NOTE_LINE_NUMBER (insn
) >= 0);
6538 insn
= PREV_INSN (insn
))
6542 || GET_CODE (insn
) != NOTE
6543 || NOTE_LINE_NUMBER (insn
) != NOTE_INSN_LOOP_BEG
)
6546 /* If the last insn of the loop (the end test) was an NE comparison,
6547 we will interpret it as an EQ comparison, since we fell through
6548 the loop. Any equivalances resulting from that comparison are
6549 therefore not valid and must be invalidated. */
6550 if (last_jump_equiv_class
)
6551 for (p
= last_jump_equiv_class
->first_same_value
; p
;
6552 p
= p
->next_same_value
)
6553 if (GET_CODE (p
->exp
) == MEM
|| GET_CODE (p
->exp
) == REG
6554 || GET_CODE (p
->exp
) == SUBREG
)
6555 invalidate (p
->exp
);
6557 /* Process insns starting after LOOP_START until we hit a CALL_INSN or
6558 a CODE_LABEL (we could handle a CALL_INSN, but it isn't worth it).
6560 The only thing we do with SET_DEST is invalidate entries, so we
6561 can safely process each SET in order. It is slightly less efficient
6562 to do so, but we only want to handle the most common cases. */
6564 for (insn
= NEXT_INSN (loop_start
);
6565 GET_CODE (insn
) != CALL_INSN
&& GET_CODE (insn
) != CODE_LABEL
6566 && ! (GET_CODE (insn
) == NOTE
6567 && NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_END
);
6568 insn
= NEXT_INSN (insn
))
6570 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i'
6571 && (GET_CODE (PATTERN (insn
)) == SET
6572 || GET_CODE (PATTERN (insn
)) == CLOBBER
))
6573 cse_set_around_loop (PATTERN (insn
), insn
, loop_start
);
6574 else if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i'
6575 && GET_CODE (PATTERN (insn
)) == PARALLEL
)
6576 for (i
= XVECLEN (PATTERN (insn
), 0) - 1; i
>= 0; i
--)
6577 if (GET_CODE (XVECEXP (PATTERN (insn
), 0, i
)) == SET
6578 || GET_CODE (XVECEXP (PATTERN (insn
), 0, i
)) == CLOBBER
)
6579 cse_set_around_loop (XVECEXP (PATTERN (insn
), 0, i
), insn
,
6584 /* Variable used for communications between the next two routines. */
6586 static struct write_data skipped_writes_memory
;
6588 /* Process one SET of an insn that was skipped. We ignore CLOBBERs
6589 since they are done elsewhere. This function is called via note_stores. */
6592 invalidate_skipped_set (dest
, set
)
6596 if (GET_CODE (set
) == CLOBBER
6603 if (GET_CODE (dest
) == MEM
)
6604 note_mem_written (dest
, &skipped_writes_memory
);
6606 if (GET_CODE (dest
) == REG
|| GET_CODE (dest
) == SUBREG
6607 || (! skipped_writes_memory
.all
&& ! cse_rtx_addr_varies_p (dest
)))
6611 /* Invalidate all insns from START up to the end of the function or the
6612 next label. This called when we wish to CSE around a block that is
6613 conditionally executed. */
6616 invalidate_skipped_block (start
)
6621 static struct write_data init
= {0, 0, 0, 0};
6622 static struct write_data everything
= {0, 1, 1, 1};
6624 for (insn
= start
; insn
&& GET_CODE (insn
) != CODE_LABEL
;
6625 insn
= NEXT_INSN (insn
))
6627 if (GET_RTX_CLASS (GET_CODE (insn
)) != 'i')
6630 skipped_writes_memory
= init
;
6632 if (GET_CODE (insn
) == CALL_INSN
)
6634 invalidate_for_call ();
6635 skipped_writes_memory
= everything
;
6638 note_stores (PATTERN (insn
), invalidate_skipped_set
);
6639 invalidate_from_clobbers (&skipped_writes_memory
, PATTERN (insn
));
6643 /* Used for communication between the following two routines; contains a
6644 value to be checked for modification. */
6646 static rtx cse_check_loop_start_value
;
6648 /* If modifying X will modify the value in CSE_CHECK_LOOP_START_VALUE,
6649 indicate that fact by setting CSE_CHECK_LOOP_START_VALUE to 0. */
6652 cse_check_loop_start (x
, set
)
6656 if (cse_check_loop_start_value
== 0
6657 || GET_CODE (x
) == CC0
|| GET_CODE (x
) == PC
)
6660 if ((GET_CODE (x
) == MEM
&& GET_CODE (cse_check_loop_start_value
) == MEM
)
6661 || reg_overlap_mentioned_p (x
, cse_check_loop_start_value
))
6662 cse_check_loop_start_value
= 0;
6665 /* X is a SET or CLOBBER contained in INSN that was found near the start of
6666 a loop that starts with the label at LOOP_START.
6668 If X is a SET, we see if its SET_SRC is currently in our hash table.
6669 If so, we see if it has a value equal to some register used only in the
6670 loop exit code (as marked by jump.c).
6672 If those two conditions are true, we search backwards from the start of
6673 the loop to see if that same value was loaded into a register that still
6674 retains its value at the start of the loop.
6676 If so, we insert an insn after the load to copy the destination of that
6677 load into the equivalent register and (try to) replace our SET_SRC with that
6680 In any event, we invalidate whatever this SET or CLOBBER modifies. */
6683 cse_set_around_loop (x
, insn
, loop_start
)
6689 struct table_elt
*src_elt
;
6690 static struct write_data init
= {0, 0, 0, 0};
6691 struct write_data writes_memory
;
6693 writes_memory
= init
;
6695 /* If this is a SET, see if we can replace SET_SRC, but ignore SETs that
6696 are setting PC or CC0 or whose SET_SRC is already a register. */
6697 if (GET_CODE (x
) == SET
6698 && GET_CODE (SET_DEST (x
)) != PC
&& GET_CODE (SET_DEST (x
)) != CC0
6699 && GET_CODE (SET_SRC (x
)) != REG
)
6701 src_elt
= lookup (SET_SRC (x
),
6702 HASH (SET_SRC (x
), GET_MODE (SET_DEST (x
))),
6703 GET_MODE (SET_DEST (x
)));
6706 for (src_elt
= src_elt
->first_same_value
; src_elt
;
6707 src_elt
= src_elt
->next_same_value
)
6708 if (GET_CODE (src_elt
->exp
) == REG
&& REG_LOOP_TEST_P (src_elt
->exp
)
6709 && COST (src_elt
->exp
) < COST (SET_SRC (x
)))
6713 /* Look for an insn in front of LOOP_START that sets
6714 something in the desired mode to SET_SRC (x) before we hit
6715 a label or CALL_INSN. */
6717 for (p
= prev_nonnote_insn (loop_start
);
6718 p
&& GET_CODE (p
) != CALL_INSN
6719 && GET_CODE (p
) != CODE_LABEL
;
6720 p
= prev_nonnote_insn (p
))
6721 if ((set
= single_set (p
)) != 0
6722 && GET_CODE (SET_DEST (set
)) == REG
6723 && GET_MODE (SET_DEST (set
)) == src_elt
->mode
6724 && rtx_equal_p (SET_SRC (set
), SET_SRC (x
)))
6726 /* We now have to ensure that nothing between P
6727 and LOOP_START modified anything referenced in
6728 SET_SRC (x). We know that nothing within the loop
6729 can modify it, or we would have invalidated it in
6733 cse_check_loop_start_value
= SET_SRC (x
);
6734 for (q
= p
; q
!= loop_start
; q
= NEXT_INSN (q
))
6735 if (GET_RTX_CLASS (GET_CODE (q
)) == 'i')
6736 note_stores (PATTERN (q
), cse_check_loop_start
);
6738 /* If nothing was changed and we can replace our
6739 SET_SRC, add an insn after P to copy its destination
6740 to what we will be replacing SET_SRC with. */
6741 if (cse_check_loop_start_value
6742 && validate_change (insn
, &SET_SRC (x
),
6744 emit_insn_after (gen_move_insn (src_elt
->exp
,
6752 /* Now invalidate anything modified by X. */
6753 note_mem_written (SET_DEST (x
), &writes_memory
);
6755 if (writes_memory
.var
)
6756 invalidate_memory (&writes_memory
);
6758 /* See comment on similar code in cse_insn for explanation of these tests. */
6759 if (GET_CODE (SET_DEST (x
)) == REG
|| GET_CODE (SET_DEST (x
)) == SUBREG
6760 || (GET_CODE (SET_DEST (x
)) == MEM
&& ! writes_memory
.all
6761 && ! cse_rtx_addr_varies_p (SET_DEST (x
))))
6762 invalidate (SET_DEST (x
));
6765 /* Find the end of INSN's basic block and return its range,
6766 the total number of SETs in all the insns of the block, the last insn of the
6767 block, and the branch path.
6769 The branch path indicates which branches should be followed. If a non-zero
6770 path size is specified, the block should be rescanned and a different set
6771 of branches will be taken. The branch path is only used if
6772 FLAG_CSE_FOLLOW_JUMPS or FLAG_CSE_SKIP_BLOCKS is non-zero.
6774 DATA is a pointer to a struct cse_basic_block_data, defined below, that is
6775 used to describe the block. It is filled in with the information about
6776 the current block. The incoming structure's branch path, if any, is used
6777 to construct the output branch path. */
6779 /* Define maximum length of a branch path. */
6781 #define PATHLENGTH 20
6783 struct cse_basic_block_data
{
6784 /* Lowest CUID value of insns in block. */
6786 /* Highest CUID value of insns in block. */
6788 /* Total number of SETs in block. */
6790 /* Last insn in the block. */
6792 /* Size of current branch path, if any. */
6794 /* Current branch path, indicating which branches will be taken. */
6795 struct branch_path
{
6796 /* The branch insn. */
6798 /* Whether it should be taken or not. AROUND is the same as taken
6799 except that it is used when the destination label is not preceded
6801 enum taken
{TAKEN
, NOT_TAKEN
, AROUND
} status
;
6806 cse_end_of_basic_block (insn
, data
, follow_jumps
, after_loop
, skip_blocks
)
6808 struct cse_basic_block_data
*data
;
6815 int low_cuid
= INSN_CUID (insn
), high_cuid
= INSN_CUID (insn
);
6816 int path_size
= data
->path_size
;
6820 /* Update the previous branch path, if any. If the last branch was
6821 previously TAKEN, mark it NOT_TAKEN. If it was previously NOT_TAKEN,
6822 shorten the path by one and look at the previous branch. We know that
6823 at least one branch must have been taken if PATH_SIZE is non-zero. */
6824 while (path_size
> 0)
6826 if (data
->path
[path_size
- 1].status
!= NOT_TAKEN
)
6828 data
->path
[path_size
- 1].status
= NOT_TAKEN
;
6835 /* Scan to end of this basic block. */
6836 while (p
&& GET_CODE (p
) != CODE_LABEL
)
6838 /* Don't cse out the end of a loop. This makes a difference
6839 only for the unusual loops that always execute at least once;
6840 all other loops have labels there so we will stop in any case.
6841 Cse'ing out the end of the loop is dangerous because it
6842 might cause an invariant expression inside the loop
6843 to be reused after the end of the loop. This would make it
6844 hard to move the expression out of the loop in loop.c,
6845 especially if it is one of several equivalent expressions
6846 and loop.c would like to eliminate it.
6848 If we are running after loop.c has finished, we can ignore
6849 the NOTE_INSN_LOOP_END. */
6851 if (! after_loop
&& GET_CODE (p
) == NOTE
6852 && NOTE_LINE_NUMBER (p
) == NOTE_INSN_LOOP_END
)
6855 /* Don't cse over a call to setjmp; on some machines (eg vax)
6856 the regs restored by the longjmp come from
6857 a later time than the setjmp. */
6858 if (GET_CODE (p
) == NOTE
6859 && NOTE_LINE_NUMBER (p
) == NOTE_INSN_SETJMP
)
6862 /* A PARALLEL can have lots of SETs in it,
6863 especially if it is really an ASM_OPERANDS. */
6864 if (GET_RTX_CLASS (GET_CODE (p
)) == 'i'
6865 && GET_CODE (PATTERN (p
)) == PARALLEL
)
6866 nsets
+= XVECLEN (PATTERN (p
), 0);
6867 else if (GET_CODE (p
) != NOTE
)
6870 if (INSN_CUID (p
) > high_cuid
)
6871 high_cuid
= INSN_CUID (p
);
6872 if (INSN_CUID (p
) < low_cuid
)
6873 low_cuid
= INSN_CUID(p
);
6875 /* See if this insn is in our branch path. If it is and we are to
6877 if (path_entry
< path_size
&& data
->path
[path_entry
].branch
== p
)
6879 if (data
->path
[path_entry
].status
!= NOT_TAKEN
)
6882 /* Point to next entry in path, if any. */
6886 /* If this is a conditional jump, we can follow it if -fcse-follow-jumps
6887 was specified, we haven't reached our maximum path length, there are
6888 insns following the target of the jump, this is the only use of the
6889 jump label, and the target label is preceded by a BARRIER.
6891 Alternatively, we can follow the jump if it branches around a
6892 block of code and there are no other branches into the block.
6893 In this case invalidate_skipped_block will be called to invalidate any
6894 registers set in the block when following the jump. */
6896 else if ((follow_jumps
|| skip_blocks
) && path_size
< PATHLENGTH
- 1
6897 && GET_CODE (p
) == JUMP_INSN
6898 && GET_CODE (PATTERN (p
)) == SET
6899 && GET_CODE (SET_SRC (PATTERN (p
))) == IF_THEN_ELSE
6900 && LABEL_NUSES (JUMP_LABEL (p
)) == 1
6901 && NEXT_INSN (JUMP_LABEL (p
)) != 0)
6903 for (q
= PREV_INSN (JUMP_LABEL (p
)); q
; q
= PREV_INSN (q
))
6904 if ((GET_CODE (q
) != NOTE
6905 || NOTE_LINE_NUMBER (q
) == NOTE_INSN_LOOP_END
6906 || NOTE_LINE_NUMBER (q
) == NOTE_INSN_SETJMP
)
6907 && (GET_CODE (q
) != CODE_LABEL
|| LABEL_NUSES (q
) != 0))
6910 /* If we ran into a BARRIER, this code is an extension of the
6911 basic block when the branch is taken. */
6912 if (follow_jumps
&& q
!= 0 && GET_CODE (q
) == BARRIER
)
6914 /* Don't allow ourself to keep walking around an
6915 always-executed loop. */
6916 if (next_real_insn (q
) == next_real_insn (insn
))
6919 /* Similarly, don't put a branch in our path more than once. */
6920 for (i
= 0; i
< path_entry
; i
++)
6921 if (data
->path
[i
].branch
== p
)
6924 if (i
!= path_entry
)
6927 data
->path
[path_entry
].branch
= p
;
6928 data
->path
[path_entry
++].status
= TAKEN
;
6930 /* This branch now ends our path. It was possible that we
6931 didn't see this branch the last time around (when the
6932 insn in front of the target was a JUMP_INSN that was
6933 turned into a no-op). */
6934 path_size
= path_entry
;
6937 /* Mark block so we won't scan it again later. */
6938 PUT_MODE (NEXT_INSN (p
), QImode
);
6940 /* Detect a branch around a block of code. */
6941 else if (skip_blocks
&& q
!= 0 && GET_CODE (q
) != CODE_LABEL
)
6945 if (next_real_insn (q
) == next_real_insn (insn
))
6948 for (i
= 0; i
< path_entry
; i
++)
6949 if (data
->path
[i
].branch
== p
)
6952 if (i
!= path_entry
)
6955 /* This is no_labels_between_p (p, q) with an added check for
6956 reaching the end of a function (in case Q precedes P). */
6957 for (tmp
= NEXT_INSN (p
); tmp
&& tmp
!= q
; tmp
= NEXT_INSN (tmp
))
6958 if (GET_CODE (tmp
) == CODE_LABEL
)
6963 data
->path
[path_entry
].branch
= p
;
6964 data
->path
[path_entry
++].status
= AROUND
;
6966 path_size
= path_entry
;
6969 /* Mark block so we won't scan it again later. */
6970 PUT_MODE (NEXT_INSN (p
), QImode
);
6977 data
->low_cuid
= low_cuid
;
6978 data
->high_cuid
= high_cuid
;
6979 data
->nsets
= nsets
;
6982 /* If all jumps in the path are not taken, set our path length to zero
6983 so a rescan won't be done. */
6984 for (i
= path_size
- 1; i
>= 0; i
--)
6985 if (data
->path
[i
].status
!= NOT_TAKEN
)
6989 data
->path_size
= 0;
6991 data
->path_size
= path_size
;
6993 /* End the current branch path. */
6994 data
->path
[path_size
].branch
= 0;
6997 static rtx
cse_basic_block ();
6999 /* Perform cse on the instructions of a function.
7000 F is the first instruction.
7001 NREGS is one plus the highest pseudo-reg number used in the instruction.
7003 AFTER_LOOP is 1 if this is the cse call done after loop optimization
7004 (only if -frerun-cse-after-loop).
7006 Returns 1 if jump_optimize should be redone due to simplifications
7007 in conditional jump instructions. */
7010 cse_main (f
, nregs
, after_loop
, file
)
7016 struct cse_basic_block_data val
;
7017 register rtx insn
= f
;
7020 cse_jumps_altered
= 0;
7021 constant_pool_entries_cost
= 0;
7028 all_minus_one
= (int *) alloca (nregs
* sizeof (int));
7029 consec_ints
= (int *) alloca (nregs
* sizeof (int));
7031 for (i
= 0; i
< nregs
; i
++)
7033 all_minus_one
[i
] = -1;
7037 reg_next_eqv
= (int *) alloca (nregs
* sizeof (int));
7038 reg_prev_eqv
= (int *) alloca (nregs
* sizeof (int));
7039 reg_qty
= (int *) alloca (nregs
* sizeof (int));
7040 reg_in_table
= (int *) alloca (nregs
* sizeof (int));
7041 reg_tick
= (int *) alloca (nregs
* sizeof (int));
7043 /* Discard all the free elements of the previous function
7044 since they are allocated in the temporarily obstack. */
7045 bzero (table
, sizeof table
);
7046 free_element_chain
= 0;
7047 n_elements_made
= 0;
7049 /* Find the largest uid. */
7052 uid_cuid
= (short *) alloca ((i
+ 1) * sizeof (short));
7053 bzero (uid_cuid
, (i
+ 1) * sizeof (short));
7055 /* Compute the mapping from uids to cuids.
7056 CUIDs are numbers assigned to insns, like uids,
7057 except that cuids increase monotonically through the code.
7058 Don't assign cuids to line-number NOTEs, so that the distance in cuids
7059 between two insns is not affected by -g. */
7061 for (insn
= f
, i
= 0; insn
; insn
= NEXT_INSN (insn
))
7063 if (GET_CODE (insn
) != NOTE
7064 || NOTE_LINE_NUMBER (insn
) < 0)
7065 INSN_CUID (insn
) = ++i
;
7067 /* Give a line number note the same cuid as preceding insn. */
7068 INSN_CUID (insn
) = i
;
7071 /* Initialize which registers are clobbered by calls. */
7073 CLEAR_HARD_REG_SET (regs_invalidated_by_call
);
7075 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
7076 if ((call_used_regs
[i
]
7077 /* Used to check !fixed_regs[i] here, but that isn't safe;
7078 fixed regs are still call-clobbered, and sched can get
7079 confused if they can "live across calls".
7081 The frame pointer is always preserved across calls. The arg
7082 pointer is if it is fixed. The stack pointer usually is, unless
7083 RETURN_POPS_ARGS, in which case an explicit CLOBBER
7084 will be present. If we are generating PIC code, the PIC offset
7085 table register is preserved across calls. */
7087 && i
!= STACK_POINTER_REGNUM
7088 && i
!= FRAME_POINTER_REGNUM
7089 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
7090 && ! (i
== ARG_POINTER_REGNUM
&& fixed_regs
[i
])
7092 #ifdef PIC_OFFSET_TABLE_REGNUM
7093 && ! (i
== PIC_OFFSET_TABLE_REGNUM
&& flag_pic
)
7097 SET_HARD_REG_BIT (regs_invalidated_by_call
, i
);
7099 /* Loop over basic blocks.
7100 Compute the maximum number of qty's needed for each basic block
7101 (which is 2 for each SET). */
7105 cse_end_of_basic_block (insn
, &val
, flag_cse_follow_jumps
, after_loop
,
7106 flag_cse_skip_blocks
);
7108 /* If this basic block was already processed or has no sets, skip it. */
7109 if (val
.nsets
== 0 || GET_MODE (insn
) == QImode
)
7111 PUT_MODE (insn
, VOIDmode
);
7112 insn
= (val
.last
? NEXT_INSN (val
.last
) : 0);
7117 cse_basic_block_start
= val
.low_cuid
;
7118 cse_basic_block_end
= val
.high_cuid
;
7119 max_qty
= val
.nsets
* 2;
7122 fprintf (file
, ";; Processing block from %d to %d, %d sets.\n",
7123 INSN_UID (insn
), val
.last
? INSN_UID (val
.last
) : 0,
7126 /* Make MAX_QTY bigger to give us room to optimize
7127 past the end of this basic block, if that should prove useful. */
7133 /* If this basic block is being extended by following certain jumps,
7134 (see `cse_end_of_basic_block'), we reprocess the code from the start.
7135 Otherwise, we start after this basic block. */
7136 if (val
.path_size
> 0)
7137 cse_basic_block (insn
, val
.last
, val
.path
, 0);
7140 int old_cse_jumps_altered
= cse_jumps_altered
;
7143 /* When cse changes a conditional jump to an unconditional
7144 jump, we want to reprocess the block, since it will give
7145 us a new branch path to investigate. */
7146 cse_jumps_altered
= 0;
7147 temp
= cse_basic_block (insn
, val
.last
, val
.path
, ! after_loop
);
7148 if (cse_jumps_altered
== 0
7149 || (flag_cse_follow_jumps
== 0 && flag_cse_skip_blocks
== 0))
7152 cse_jumps_altered
|= old_cse_jumps_altered
;
7160 /* Tell refers_to_mem_p that qty_const info is not available. */
7163 if (max_elements_made
< n_elements_made
)
7164 max_elements_made
= n_elements_made
;
7166 return cse_jumps_altered
;
7169 /* Process a single basic block. FROM and TO and the limits of the basic
7170 block. NEXT_BRANCH points to the branch path when following jumps or
7171 a null path when not following jumps.
7173 AROUND_LOOP is non-zero if we are to try to cse around to the start of a
7174 loop. This is true when we are being called for the last time on a
7175 block and this CSE pass is before loop.c. */
7178 cse_basic_block (from
, to
, next_branch
, around_loop
)
7179 register rtx from
, to
;
7180 struct branch_path
*next_branch
;
7185 int in_libcall_block
= 0;
7187 /* Each of these arrays is undefined before max_reg, so only allocate
7188 the space actually needed and adjust the start below. */
7190 qty_first_reg
= (int *) alloca ((max_qty
- max_reg
) * sizeof (int));
7191 qty_last_reg
= (int *) alloca ((max_qty
- max_reg
) * sizeof (int));
7192 qty_mode
= (enum machine_mode
*) alloca ((max_qty
- max_reg
) * sizeof (enum machine_mode
));
7193 qty_const
= (rtx
*) alloca ((max_qty
- max_reg
) * sizeof (rtx
));
7194 qty_const_insn
= (rtx
*) alloca ((max_qty
- max_reg
) * sizeof (rtx
));
7196 = (enum rtx_code
*) alloca ((max_qty
- max_reg
) * sizeof (enum rtx_code
));
7197 qty_comparison_qty
= (int *) alloca ((max_qty
- max_reg
) * sizeof (int));
7198 qty_comparison_const
= (rtx
*) alloca ((max_qty
- max_reg
) * sizeof (rtx
));
7200 qty_first_reg
-= max_reg
;
7201 qty_last_reg
-= max_reg
;
7202 qty_mode
-= max_reg
;
7203 qty_const
-= max_reg
;
7204 qty_const_insn
-= max_reg
;
7205 qty_comparison_code
-= max_reg
;
7206 qty_comparison_qty
-= max_reg
;
7207 qty_comparison_const
-= max_reg
;
7211 /* TO might be a label. If so, protect it from being deleted. */
7212 if (to
!= 0 && GET_CODE (to
) == CODE_LABEL
)
7215 for (insn
= from
; insn
!= to
; insn
= NEXT_INSN (insn
))
7217 register enum rtx_code code
;
7219 /* See if this is a branch that is part of the path. If so, and it is
7220 to be taken, do so. */
7221 if (next_branch
->branch
== insn
)
7223 enum taken status
= next_branch
++->status
;
7224 if (status
!= NOT_TAKEN
)
7226 if (status
== TAKEN
)
7227 record_jump_equiv (insn
, 1);
7229 invalidate_skipped_block (NEXT_INSN (insn
));
7231 /* Set the last insn as the jump insn; it doesn't affect cc0.
7232 Then follow this branch. */
7237 insn
= JUMP_LABEL (insn
);
7242 code
= GET_CODE (insn
);
7243 if (GET_MODE (insn
) == QImode
)
7244 PUT_MODE (insn
, VOIDmode
);
7246 if (GET_RTX_CLASS (code
) == 'i')
7248 /* Process notes first so we have all notes in canonical forms when
7249 looking for duplicate operations. */
7251 if (REG_NOTES (insn
))
7252 REG_NOTES (insn
) = cse_process_notes (REG_NOTES (insn
), 0);
7254 /* Track when we are inside in LIBCALL block. Inside such a block,
7255 we do not want to record destinations. The last insn of a
7256 LIBCALL block is not considered to be part of the block, since
7257 its desitination is the result of the block and hence should be
7260 if (find_reg_note (insn
, REG_LIBCALL
, 0))
7261 in_libcall_block
= 1;
7262 else if (find_reg_note (insn
, REG_RETVAL
, 0))
7263 in_libcall_block
= 0;
7265 cse_insn (insn
, in_libcall_block
);
7268 /* If INSN is now an unconditional jump, skip to the end of our
7269 basic block by pretending that we just did the last insn in the
7270 basic block. If we are jumping to the end of our block, show
7271 that we can have one usage of TO. */
7273 if (simplejump_p (insn
))
7278 if (JUMP_LABEL (insn
) == to
)
7281 insn
= PREV_INSN (to
);
7284 /* See if it is ok to keep on going past the label
7285 which used to end our basic block. Remember that we incremented
7286 the count of that label, so we decrement it here. If we made
7287 a jump unconditional, TO_USAGE will be one; in that case, we don't
7288 want to count the use in that jump. */
7290 if (to
!= 0 && NEXT_INSN (insn
) == to
7291 && GET_CODE (to
) == CODE_LABEL
&& --LABEL_NUSES (to
) == to_usage
)
7293 struct cse_basic_block_data val
;
7295 insn
= NEXT_INSN (to
);
7297 if (LABEL_NUSES (to
) == 0)
7300 /* Find the end of the following block. Note that we won't be
7301 following branches in this case. If TO was the last insn
7302 in the function, we are done. Similarly, if we deleted the
7303 insn after TO, it must have been because it was preceded by
7304 a BARRIER. In that case, we are done with this block because it
7305 has no continuation. */
7307 if (insn
== 0 || INSN_DELETED_P (insn
))
7312 cse_end_of_basic_block (insn
, &val
, 0, 0, 0);
7314 /* If the tables we allocated have enough space left
7315 to handle all the SETs in the next basic block,
7316 continue through it. Otherwise, return,
7317 and that block will be scanned individually. */
7318 if (val
.nsets
* 2 + next_qty
> max_qty
)
7321 cse_basic_block_start
= val
.low_cuid
;
7322 cse_basic_block_end
= val
.high_cuid
;
7325 /* Prevent TO from being deleted if it is a label. */
7326 if (to
!= 0 && GET_CODE (to
) == CODE_LABEL
)
7329 /* Back up so we process the first insn in the extension. */
7330 insn
= PREV_INSN (insn
);
7334 if (next_qty
> max_qty
)
7337 /* If we are running before loop.c, we stopped on a NOTE_INSN_LOOP_END, and
7338 the previous insn is the only insn that branches to the head of a loop,
7339 we can cse into the loop. Don't do this if we changed the jump
7340 structure of a loop unless we aren't going to be following jumps. */
7342 if ((cse_jumps_altered
== 0
7343 || (flag_cse_follow_jumps
== 0 && flag_cse_skip_blocks
== 0))
7344 && around_loop
&& to
!= 0
7345 && GET_CODE (to
) == NOTE
&& NOTE_LINE_NUMBER (to
) == NOTE_INSN_LOOP_END
7346 && GET_CODE (PREV_INSN (to
)) == JUMP_INSN
7347 && JUMP_LABEL (PREV_INSN (to
)) != 0
7348 && LABEL_NUSES (JUMP_LABEL (PREV_INSN (to
))) == 1)
7349 cse_around_loop (JUMP_LABEL (PREV_INSN (to
)));
7351 return to
? NEXT_INSN (to
) : 0;
7354 /* Count the number of times registers are used (not set) in X.
7355 COUNTS is an array in which we accumulate the count, INCR is how much
7356 we count each register usage. */
7359 count_reg_usage (x
, counts
, incr
)
7364 enum rtx_code code
= GET_CODE (x
);
7371 counts
[REGNO (x
)] += incr
;
7385 /* Unless we are setting a REG, count everything in SET_DEST. */
7386 if (GET_CODE (SET_DEST (x
)) != REG
)
7387 count_reg_usage (SET_DEST (x
), counts
, incr
);
7388 count_reg_usage (SET_SRC (x
), counts
, incr
);
7394 count_reg_usage (PATTERN (x
), counts
, incr
);
7396 /* Things used in a REG_EQUAL note aren't dead since loop may try to
7400 count_reg_usage (REG_NOTES (x
), counts
, incr
);
7405 if (REG_NOTE_KIND (x
) == REG_EQUAL
)
7406 count_reg_usage (XEXP (x
, 0), counts
, incr
);
7408 count_reg_usage (XEXP (x
, 1), counts
, incr
);
7412 fmt
= GET_RTX_FORMAT (code
);
7413 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
7416 count_reg_usage (XEXP (x
, i
), counts
, incr
);
7417 else if (fmt
[i
] == 'E')
7418 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
7419 count_reg_usage (XVECEXP (x
, i
, j
), counts
, incr
);
7423 /* Scan all the insns and delete any that are dead; i.e., they store a register
7424 that is never used or they copy a register to itself.
7426 This is used to remove insns made obviously dead by cse. It improves the
7427 heuristics in loop since it won't try to move dead invariants out of loops
7428 or make givs for dead quantities. The remaining passes of the compilation
7429 are also sped up. */
7432 delete_dead_from_cse (insns
, nreg
)
7436 int *counts
= (int *) alloca (nreg
* sizeof (int));
7441 /* First count the number of times each register is used. */
7442 bzero (counts
, sizeof (int) * nreg
);
7443 for (insn
= next_real_insn (insns
); insn
; insn
= next_real_insn (insn
))
7444 count_reg_usage (insn
, counts
, 1);
7446 /* Go from the last insn to the first and delete insns that only set unused
7447 registers or copy a register to itself. As we delete an insn, remove
7448 usage counts for registers it uses. */
7449 for (insn
= prev_real_insn (get_last_insn ());
7450 insn
; insn
= prev_real_insn (insn
))
7454 if (GET_CODE (PATTERN (insn
)) == SET
)
7456 if (GET_CODE (SET_DEST (PATTERN (insn
))) == REG
7457 && SET_DEST (PATTERN (insn
)) == SET_SRC (PATTERN (insn
)))
7461 else if (GET_CODE (SET_DEST (PATTERN (insn
))) == CC0
7462 && ! side_effects_p (SET_SRC (PATTERN (insn
)))
7463 && ((tem
= next_nonnote_insn (insn
)) == 0
7464 || GET_RTX_CLASS (GET_CODE (tem
)) != 'i'
7465 || ! reg_referenced_p (cc0_rtx
, PATTERN (tem
))))
7468 else if (GET_CODE (SET_DEST (PATTERN (insn
))) != REG
7469 || REGNO (SET_DEST (PATTERN (insn
))) < FIRST_PSEUDO_REGISTER
7470 || counts
[REGNO (SET_DEST (PATTERN (insn
)))] != 0
7471 || side_effects_p (SET_SRC (PATTERN (insn
))))
7474 else if (GET_CODE (PATTERN (insn
)) == PARALLEL
)
7475 for (i
= XVECLEN (PATTERN (insn
), 0) - 1; i
>= 0; i
--)
7477 rtx elt
= XVECEXP (PATTERN (insn
), 0, i
);
7479 if (GET_CODE (elt
) == SET
)
7481 if (GET_CODE (SET_DEST (elt
)) == REG
7482 && SET_DEST (elt
) == SET_SRC (elt
))
7486 else if (GET_CODE (SET_DEST (elt
)) == CC0
7487 && ! side_effects_p (SET_SRC (elt
))
7488 && ((tem
= next_nonnote_insn (insn
)) == 0
7489 || GET_RTX_CLASS (GET_CODE (tem
)) != 'i'
7490 || ! reg_referenced_p (cc0_rtx
, PATTERN (tem
))))
7493 else if (GET_CODE (SET_DEST (elt
)) != REG
7494 || REGNO (SET_DEST (elt
)) < FIRST_PSEUDO_REGISTER
7495 || counts
[REGNO (SET_DEST (elt
))] != 0
7496 || side_effects_p (SET_SRC (elt
)))
7499 else if (GET_CODE (elt
) != CLOBBER
&& GET_CODE (elt
) != USE
)
7505 /* If this is a dead insn, delete it and show registers in it aren't
7506 being used. If this is the last insn of a libcall sequence, don't
7507 delete it even if it is dead because we don't know how to do so
7510 if (! live_insn
&& ! find_reg_note (insn
, REG_RETVAL
, 0))
7512 count_reg_usage (insn
, counts
, -1);
7513 PUT_CODE (insn
, NOTE
);
7514 NOTE_SOURCE_FILE (insn
) = 0;
7515 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;