1 /* Common subexpression elimination for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1992 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
24 #include "hard-reg-set.h"
27 #include "insn-config.h"
33 /* The basic idea of common subexpression elimination is to go
34 through the code, keeping a record of expressions that would
35 have the same value at the current scan point, and replacing
36 expressions encountered with the cheapest equivalent expression.
38 It is too complicated to keep track of the different possibilities
39 when control paths merge; so, at each label, we forget all that is
40 known and start fresh. This can be described as processing each
41 basic block separately. Note, however, that these are not quite
42 the same as the basic blocks found by a later pass and used for
43 data flow analysis and register packing. We do not need to start fresh
44 after a conditional jump instruction if there is no label there.
46 We use two data structures to record the equivalent expressions:
47 a hash table for most expressions, and several vectors together
48 with "quantity numbers" to record equivalent (pseudo) registers.
50 The use of the special data structure for registers is desirable
51 because it is faster. It is possible because registers references
52 contain a fairly small number, the register number, taken from
53 a contiguously allocated series, and two register references are
54 identical if they have the same number. General expressions
55 do not have any such thing, so the only way to retrieve the
56 information recorded on an expression other than a register
57 is to keep it in a hash table.
59 Registers and "quantity numbers":
61 At the start of each basic block, all of the (hardware and pseudo)
62 registers used in the function are given distinct quantity
63 numbers to indicate their contents. During scan, when the code
64 copies one register into another, we copy the quantity number.
65 When a register is loaded in any other way, we allocate a new
66 quantity number to describe the value generated by this operation.
67 `reg_qty' records what quantity a register is currently thought
70 All real quantity numbers are greater than or equal to `max_reg'.
71 If register N has not been assigned a quantity, reg_qty[N] will equal N.
73 Quantity numbers below `max_reg' do not exist and none of the `qty_...'
74 variables should be referenced with an index below `max_reg'.
76 We also maintain a bidirectional chain of registers for each
77 quantity number. `qty_first_reg', `qty_last_reg',
78 `reg_next_eqv' and `reg_prev_eqv' hold these chains.
80 The first register in a chain is the one whose lifespan is least local.
81 Among equals, it is the one that was seen first.
82 We replace any equivalent register with that one.
84 If two registers have the same quantity number, it must be true that
85 REG expressions with `qty_mode' must be in the hash table for both
86 registers and must be in the same class.
88 The converse is not true. Since hard registers may be referenced in
89 any mode, two REG expressions might be equivalent in the hash table
90 but not have the same quantity number if the quantity number of one
91 of the registers is not the same mode as those expressions.
93 Constants and quantity numbers
95 When a quantity has a known constant value, that value is stored
96 in the appropriate element of qty_const. This is in addition to
97 putting the constant in the hash table as is usual for non-regs.
99 Whether a reg or a constant is preferred is determined by the configuration
100 macro CONST_COSTS and will often depend on the constant value. In any
101 event, expressions containing constants can be simplified, by fold_rtx.
103 When a quantity has a known nearly constant value (such as an address
104 of a stack slot), that value is stored in the appropriate element
107 Integer constants don't have a machine mode. However, cse
108 determines the intended machine mode from the destination
109 of the instruction that moves the constant. The machine mode
110 is recorded in the hash table along with the actual RTL
111 constant expression so that different modes are kept separate.
115 To record known equivalences among expressions in general
116 we use a hash table called `table'. It has a fixed number of buckets
117 that contain chains of `struct table_elt' elements for expressions.
118 These chains connect the elements whose expressions have the same
121 Other chains through the same elements connect the elements which
122 currently have equivalent values.
124 Register references in an expression are canonicalized before hashing
125 the expression. This is done using `reg_qty' and `qty_first_reg'.
126 The hash code of a register reference is computed using the quantity
127 number, not the register number.
129 When the value of an expression changes, it is necessary to remove from the
130 hash table not just that expression but all expressions whose values
131 could be different as a result.
133 1. If the value changing is in memory, except in special cases
134 ANYTHING referring to memory could be changed. That is because
135 nobody knows where a pointer does not point.
136 The function `invalidate_memory' removes what is necessary.
138 The special cases are when the address is constant or is
139 a constant plus a fixed register such as the frame pointer
140 or a static chain pointer. When such addresses are stored in,
141 we can tell exactly which other such addresses must be invalidated
142 due to overlap. `invalidate' does this.
143 All expressions that refer to non-constant
144 memory addresses are also invalidated. `invalidate_memory' does this.
146 2. If the value changing is a register, all expressions
147 containing references to that register, and only those,
150 Because searching the entire hash table for expressions that contain
151 a register is very slow, we try to figure out when it isn't necessary.
152 Precisely, this is necessary only when expressions have been
153 entered in the hash table using this register, and then the value has
154 changed, and then another expression wants to be added to refer to
155 the register's new value. This sequence of circumstances is rare
156 within any one basic block.
158 The vectors `reg_tick' and `reg_in_table' are used to detect this case.
159 reg_tick[i] is incremented whenever a value is stored in register i.
160 reg_in_table[i] holds -1 if no references to register i have been
161 entered in the table; otherwise, it contains the value reg_tick[i] had
162 when the references were entered. If we want to enter a reference
163 and reg_in_table[i] != reg_tick[i], we must scan and remove old references.
164 Until we want to enter a new entry, the mere fact that the two vectors
165 don't match makes the entries be ignored if anyone tries to match them.
167 Registers themselves are entered in the hash table as well as in
168 the equivalent-register chains. However, the vectors `reg_tick'
169 and `reg_in_table' do not apply to expressions which are simple
170 register references. These expressions are removed from the table
171 immediately when they become invalid, and this can be done even if
172 we do not immediately search for all the expressions that refer to
175 A CLOBBER rtx in an instruction invalidates its operand for further
176 reuse. A CLOBBER or SET rtx whose operand is a MEM:BLK
177 invalidates everything that resides in memory.
181 Constant expressions that differ only by an additive integer
182 are called related. When a constant expression is put in
183 the table, the related expression with no constant term
184 is also entered. These are made to point at each other
185 so that it is possible to find out if there exists any
186 register equivalent to an expression related to a given expression. */
188 /* One plus largest register number used in this function. */
192 /* Length of vectors indexed by quantity number.
193 We know in advance we will not need a quantity number this big. */
197 /* Next quantity number to be allocated.
198 This is 1 + the largest number needed so far. */
202 /* Indexed by quantity number, gives the first (or last) (pseudo) register
203 in the chain of registers that currently contain this quantity. */
205 static int *qty_first_reg
;
206 static int *qty_last_reg
;
208 /* Index by quantity number, gives the mode of the quantity. */
210 static enum machine_mode
*qty_mode
;
212 /* Indexed by quantity number, gives the rtx of the constant value of the
213 quantity, or zero if it does not have a known value.
214 A sum of the frame pointer (or arg pointer) plus a constant
215 can also be entered here. */
217 static rtx
*qty_const
;
219 /* Indexed by qty number, gives the insn that stored the constant value
220 recorded in `qty_const'. */
222 static rtx
*qty_const_insn
;
224 /* The next three variables are used to track when a comparison between a
225 quantity and some constant or register has been passed. In that case, we
226 know the results of the comparison in case we see it again. These variables
227 record a comparison that is known to be true. */
229 /* Indexed by qty number, gives the rtx code of a comparison with a known
230 result involving this quantity. If none, it is UNKNOWN. */
231 static enum rtx_code
*qty_comparison_code
;
233 /* Indexed by qty number, gives the constant being compared against in a
234 comparison of known result. If no such comparison, it is undefined.
235 If the comparison is not with a constant, it is zero. */
237 static rtx
*qty_comparison_const
;
239 /* Indexed by qty number, gives the quantity being compared against in a
240 comparison of known result. If no such comparison, if it undefined.
241 If the comparison is not with a register, it is -1. */
243 static int *qty_comparison_qty
;
246 /* For machines that have a CC0, we do not record its value in the hash
247 table since its use is guaranteed to be the insn immediately following
248 its definition and any other insn is presumed to invalidate it.
250 Instead, we store below the value last assigned to CC0. If it should
251 happen to be a constant, it is stored in preference to the actual
252 assigned value. In case it is a constant, we store the mode in which
253 the constant should be interpreted. */
255 static rtx prev_insn_cc0
;
256 static enum machine_mode prev_insn_cc0_mode
;
259 /* Previous actual insn. 0 if at first insn of basic block. */
261 static rtx prev_insn
;
263 /* Insn being scanned. */
265 static rtx this_insn
;
267 /* Index by (pseudo) register number, gives the quantity number
268 of the register's current contents. */
272 /* Index by (pseudo) register number, gives the number of the next (or
273 previous) (pseudo) register in the chain of registers sharing the same
276 Or -1 if this register is at the end of the chain.
278 If reg_qty[N] == N, reg_next_eqv[N] is undefined. */
280 static int *reg_next_eqv
;
281 static int *reg_prev_eqv
;
283 /* Index by (pseudo) register number, gives the number of times
284 that register has been altered in the current basic block. */
286 static int *reg_tick
;
288 /* Index by (pseudo) register number, gives the reg_tick value at which
289 rtx's containing this register are valid in the hash table.
290 If this does not equal the current reg_tick value, such expressions
291 existing in the hash table are invalid.
292 If this is -1, no expressions containing this register have been
293 entered in the table. */
295 static int *reg_in_table
;
297 /* A HARD_REG_SET containing all the hard registers for which there is
298 currently a REG expression in the hash table. Note the difference
299 from the above variables, which indicate if the REG is mentioned in some
300 expression in the table. */
302 static HARD_REG_SET hard_regs_in_table
;
304 /* A HARD_REG_SET containing all the hard registers that are invalidated
307 static HARD_REG_SET regs_invalidated_by_call
;
309 /* Two vectors of ints:
310 one containing max_reg -1's; the other max_reg + 500 (an approximation
311 for max_qty) elements where element i contains i.
312 These are used to initialize various other vectors fast. */
314 static int *all_minus_one
;
315 static int *consec_ints
;
317 /* CUID of insn that starts the basic block currently being cse-processed. */
319 static int cse_basic_block_start
;
321 /* CUID of insn that ends the basic block currently being cse-processed. */
323 static int cse_basic_block_end
;
325 /* Vector mapping INSN_UIDs to cuids.
326 The cuids are like uids but increase monotonically always.
327 We use them to see whether a reg is used outside a given basic block. */
329 static int *uid_cuid
;
331 /* Highest UID in UID_CUID. */
334 /* Get the cuid of an insn. */
336 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
338 /* Nonzero if cse has altered conditional jump insns
339 in such a way that jump optimization should be redone. */
341 static int cse_jumps_altered
;
343 /* canon_hash stores 1 in do_not_record
344 if it notices a reference to CC0, PC, or some other volatile
347 static int do_not_record
;
349 /* canon_hash stores 1 in hash_arg_in_memory
350 if it notices a reference to memory within the expression being hashed. */
352 static int hash_arg_in_memory
;
354 /* canon_hash stores 1 in hash_arg_in_struct
355 if it notices a reference to memory that's part of a structure. */
357 static int hash_arg_in_struct
;
359 /* The hash table contains buckets which are chains of `struct table_elt's,
360 each recording one expression's information.
361 That expression is in the `exp' field.
363 Those elements with the same hash code are chained in both directions
364 through the `next_same_hash' and `prev_same_hash' fields.
366 Each set of expressions with equivalent values
367 are on a two-way chain through the `next_same_value'
368 and `prev_same_value' fields, and all point with
369 the `first_same_value' field at the first element in
370 that chain. The chain is in order of increasing cost.
371 Each element's cost value is in its `cost' field.
373 The `in_memory' field is nonzero for elements that
374 involve any reference to memory. These elements are removed
375 whenever a write is done to an unidentified location in memory.
376 To be safe, we assume that a memory address is unidentified unless
377 the address is either a symbol constant or a constant plus
378 the frame pointer or argument pointer.
380 The `in_struct' field is nonzero for elements that
381 involve any reference to memory inside a structure or array.
383 The `related_value' field is used to connect related expressions
384 (that differ by adding an integer).
385 The related expressions are chained in a circular fashion.
386 `related_value' is zero for expressions for which this
389 The `cost' field stores the cost of this element's expression.
391 The `is_const' flag is set if the element is a constant (including
394 The `flag' field is used as a temporary during some search routines.
396 The `mode' field is usually the same as GET_MODE (`exp'), but
397 if `exp' is a CONST_INT and has no machine mode then the `mode'
398 field is the mode it was being used as. Each constant is
399 recorded separately for each mode it is used with. */
405 struct table_elt
*next_same_hash
;
406 struct table_elt
*prev_same_hash
;
407 struct table_elt
*next_same_value
;
408 struct table_elt
*prev_same_value
;
409 struct table_elt
*first_same_value
;
410 struct table_elt
*related_value
;
412 enum machine_mode mode
;
421 /* We don't want a lot of buckets, because we rarely have very many
422 things stored in the hash table, and a lot of buckets slows
423 down a lot of loops that happen frequently. */
426 /* Compute hash code of X in mode M. Special-case case where X is a pseudo
427 register (hard registers may require `do_not_record' to be set). */
430 (GET_CODE (X) == REG && REGNO (X) >= FIRST_PSEUDO_REGISTER \
431 ? ((((int) REG << 7) + reg_qty[REGNO (X)]) % NBUCKETS) \
432 : canon_hash (X, M) % NBUCKETS)
434 /* Determine whether register number N is considered a fixed register for CSE.
435 It is desirable to replace other regs with fixed regs, to reduce need for
437 A reg wins if it is either the frame pointer or designated as fixed,
438 but not if it is an overlapping register. */
439 #ifdef OVERLAPPING_REGNO_P
440 #define FIXED_REGNO_P(N) \
441 (((N) == FRAME_POINTER_REGNUM || fixed_regs[N]) \
442 && ! OVERLAPPING_REGNO_P ((N)))
444 #define FIXED_REGNO_P(N) \
445 ((N) == FRAME_POINTER_REGNUM || fixed_regs[N])
448 /* Compute cost of X, as stored in the `cost' field of a table_elt. Fixed
449 hard registers and pointers into the frame are the cheapest with a cost
450 of 0. Next come pseudos with a cost of one and other hard registers with
451 a cost of 2. Aside from these special cases, call `rtx_cost'. */
453 #define CHEAP_REG(N) \
454 ((N) == FRAME_POINTER_REGNUM || (N) == STACK_POINTER_REGNUM \
455 || (N) == ARG_POINTER_REGNUM \
456 || ((N) >= FIRST_VIRTUAL_REGISTER && (N) <= LAST_VIRTUAL_REGISTER) \
457 || ((N) < FIRST_PSEUDO_REGISTER \
458 && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
461 (GET_CODE (X) == REG \
462 ? (CHEAP_REG (REGNO (X)) ? 0 \
463 : REGNO (X) >= FIRST_PSEUDO_REGISTER ? 1 \
465 : rtx_cost (X, SET) * 2)
467 /* Determine if the quantity number for register X represents a valid index
468 into the `qty_...' variables. */
470 #define REGNO_QTY_VALID_P(N) (reg_qty[N] != (N))
472 static struct table_elt
*table
[NBUCKETS
];
474 /* Chain of `struct table_elt's made so far for this function
475 but currently removed from the table. */
477 static struct table_elt
*free_element_chain
;
479 /* Number of `struct table_elt' structures made so far for this function. */
481 static int n_elements_made
;
483 /* Maximum value `n_elements_made' has had so far in this compilation
484 for functions previously processed. */
486 static int max_elements_made
;
488 /* Surviving equivalence class when two equivalence classes are merged
489 by recording the effects of a jump in the last insn. Zero if the
490 last insn was not a conditional jump. */
492 static struct table_elt
*last_jump_equiv_class
;
494 /* Set to the cost of a constant pool reference if one was found for a
495 symbolic constant. If this was found, it means we should try to
496 convert constants into constant pool entries if they don't fit in
499 static int constant_pool_entries_cost
;
501 /* Bits describing what kind of values in memory must be invalidated
502 for a particular instruction. If all three bits are zero,
503 no memory refs need to be invalidated. Each bit is more powerful
504 than the preceding ones, and if a bit is set then the preceding
507 Here is how the bits are set:
508 Pushing onto the stack invalidates only the stack pointer,
509 writing at a fixed address invalidates only variable addresses,
510 writing in a structure element at variable address
511 invalidates all but scalar variables,
512 and writing in anything else at variable address invalidates everything. */
516 int sp
: 1; /* Invalidate stack pointer. */
517 int var
: 1; /* Invalidate variable addresses. */
518 int nonscalar
: 1; /* Invalidate all but scalar variables. */
519 int all
: 1; /* Invalidate all memory refs. */
522 /* Define maximum length of a branch path. */
524 #define PATHLENGTH 10
526 /* This data describes a block that will be processed by cse_basic_block. */
528 struct cse_basic_block_data
{
529 /* Lowest CUID value of insns in block. */
531 /* Highest CUID value of insns in block. */
533 /* Total number of SETs in block. */
535 /* Last insn in the block. */
537 /* Size of current branch path, if any. */
539 /* Current branch path, indicating which branches will be taken. */
541 /* The branch insn. */
543 /* Whether it should be taken or not. AROUND is the same as taken
544 except that it is used when the destination label is not preceded
546 enum taken
{TAKEN
, NOT_TAKEN
, AROUND
} status
;
550 /* Nonzero if X has the form (PLUS frame-pointer integer). We check for
551 virtual regs here because the simplify_*_operation routines are called
552 by integrate.c, which is called before virtual register instantiation. */
554 #define FIXED_BASE_PLUS_P(X) \
555 ((X) == frame_pointer_rtx || (X) == arg_pointer_rtx \
556 || (X) == virtual_stack_vars_rtx \
557 || (X) == virtual_incoming_args_rtx \
558 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
559 && (XEXP (X, 0) == frame_pointer_rtx \
560 || XEXP (X, 0) == arg_pointer_rtx \
561 || XEXP (X, 0) == virtual_stack_vars_rtx \
562 || XEXP (X, 0) == virtual_incoming_args_rtx)))
564 /* Similar, but also allows reference to the stack pointer.
566 This used to include FIXED_BASE_PLUS_P, however, we can't assume that
567 arg_pointer_rtx by itself is nonzero, because on at least one machine,
568 the i960, the arg pointer is zero when it is unused. */
570 #define NONZERO_BASE_PLUS_P(X) \
571 ((X) == frame_pointer_rtx \
572 || (X) == virtual_stack_vars_rtx \
573 || (X) == virtual_incoming_args_rtx \
574 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
575 && (XEXP (X, 0) == frame_pointer_rtx \
576 || XEXP (X, 0) == arg_pointer_rtx \
577 || XEXP (X, 0) == virtual_stack_vars_rtx \
578 || XEXP (X, 0) == virtual_incoming_args_rtx)) \
579 || (X) == stack_pointer_rtx \
580 || (X) == virtual_stack_dynamic_rtx \
581 || (X) == virtual_outgoing_args_rtx \
582 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
583 && (XEXP (X, 0) == stack_pointer_rtx \
584 || XEXP (X, 0) == virtual_stack_dynamic_rtx \
585 || XEXP (X, 0) == virtual_outgoing_args_rtx)))
587 static void new_basic_block
PROTO((void));
588 static void make_new_qty
PROTO((int));
589 static void make_regs_eqv
PROTO((int, int));
590 static void delete_reg_equiv
PROTO((int));
591 static int mention_regs
PROTO((rtx
));
592 static int insert_regs
PROTO((rtx
, struct table_elt
*, int));
593 static void free_element
PROTO((struct table_elt
*));
594 static void remove_from_table
PROTO((struct table_elt
*, int));
595 static struct table_elt
*get_element
PROTO((void));
596 static struct table_elt
*lookup
PROTO((rtx
, int, enum machine_mode
)),
597 *lookup_for_remove
PROTO((rtx
, int, enum machine_mode
));
598 static rtx lookup_as_function
PROTO((rtx
, enum rtx_code
));
599 static struct table_elt
*insert
PROTO((rtx
, struct table_elt
*, int,
601 static void merge_equiv_classes
PROTO((struct table_elt
*,
602 struct table_elt
*));
603 static void invalidate
PROTO((rtx
));
604 static void remove_invalid_refs
PROTO((int));
605 static void rehash_using_reg
PROTO((rtx
));
606 static void invalidate_memory
PROTO((struct write_data
*));
607 static void invalidate_for_call
PROTO((void));
608 static rtx use_related_value
PROTO((rtx
, struct table_elt
*));
609 static int canon_hash
PROTO((rtx
, enum machine_mode
));
610 static int safe_hash
PROTO((rtx
, enum machine_mode
));
611 static int exp_equiv_p
PROTO((rtx
, rtx
, int, int));
612 static void set_nonvarying_address_components
PROTO((rtx
, int, rtx
*,
615 static int refers_to_p
PROTO((rtx
, rtx
));
616 static int refers_to_mem_p
PROTO((rtx
, rtx
, HOST_WIDE_INT
,
618 static int cse_rtx_addr_varies_p
PROTO((rtx
));
619 static rtx canon_reg
PROTO((rtx
, rtx
));
620 static void find_best_addr
PROTO((rtx
, rtx
*));
621 static enum rtx_code find_comparison_args
PROTO((enum rtx_code
, rtx
*, rtx
*,
623 enum machine_mode
*));
624 static rtx cse_gen_binary
PROTO((enum rtx_code
, enum machine_mode
,
626 static rtx simplify_plus_minus
PROTO((enum rtx_code
, enum machine_mode
,
628 static rtx fold_rtx
PROTO((rtx
, rtx
));
629 static rtx equiv_constant
PROTO((rtx
));
630 static void record_jump_equiv
PROTO((rtx
, int));
631 static void record_jump_cond
PROTO((enum rtx_code
, enum machine_mode
,
633 static void cse_insn
PROTO((rtx
, int));
634 static void note_mem_written
PROTO((rtx
, struct write_data
*));
635 static void invalidate_from_clobbers
PROTO((struct write_data
*, rtx
));
636 static rtx cse_process_notes
PROTO((rtx
, rtx
));
637 static void cse_around_loop
PROTO((rtx
));
638 static void invalidate_skipped_set
PROTO((rtx
, rtx
));
639 static void invalidate_skipped_block
PROTO((rtx
));
640 static void cse_check_loop_start
PROTO((rtx
, rtx
));
641 static void cse_set_around_loop
PROTO((rtx
, rtx
, rtx
));
642 static rtx cse_basic_block
PROTO((rtx
, rtx
, struct branch_path
*, int));
643 static void count_reg_usage
PROTO((rtx
, int *, int));
645 /* Return an estimate of the cost of computing rtx X.
646 One use is in cse, to decide which expression to keep in the hash table.
647 Another is in rtl generation, to pick the cheapest way to multiply.
648 Other uses like the latter are expected in the future. */
650 /* Return the right cost to give to an operation
651 to make the cost of the corresponding register-to-register instruction
652 N times that of a fast register-to-register instruction. */
654 #define COSTS_N_INSNS(N) ((N) * 4 - 2)
657 rtx_cost (x
, outer_code
)
659 enum rtx_code outer_code
;
662 register enum rtx_code code
;
669 /* Compute the default costs of certain things.
670 Note that RTX_COSTS can override the defaults. */
676 /* Count multiplication by 2**n as a shift,
677 because if we are considering it, we would output it as a shift. */
678 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
679 && exact_log2 (INTVAL (XEXP (x
, 1))) >= 0)
682 total
= COSTS_N_INSNS (5);
688 total
= COSTS_N_INSNS (7);
691 /* Used in loop.c and combine.c as a marker. */
695 /* We don't want these to be used in substitutions because
696 we have no way of validating the resulting insn. So assign
697 anything containing an ASM_OPERANDS a very high cost. */
707 return ! CHEAP_REG (REGNO (x
));
710 /* If we can't tie these modes, make this expensive. The larger
711 the mode, the more expensive it is. */
712 if (! MODES_TIEABLE_P (GET_MODE (x
), GET_MODE (SUBREG_REG (x
))))
713 return COSTS_N_INSNS (2
714 + GET_MODE_SIZE (GET_MODE (x
)) / UNITS_PER_WORD
);
717 RTX_COSTS (x
, code
, outer_code
);
719 CONST_COSTS (x
, code
, outer_code
);
722 /* Sum the costs of the sub-rtx's, plus cost of this operation,
723 which is already in total. */
725 fmt
= GET_RTX_FORMAT (code
);
726 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
728 total
+= rtx_cost (XEXP (x
, i
), code
);
729 else if (fmt
[i
] == 'E')
730 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
731 total
+= rtx_cost (XVECEXP (x
, i
, j
), code
);
736 /* Clear the hash table and initialize each register with its own quantity,
737 for a new basic block. */
746 bzero (reg_tick
, max_reg
* sizeof (int));
748 bcopy (all_minus_one
, reg_in_table
, max_reg
* sizeof (int));
749 bcopy (consec_ints
, reg_qty
, max_reg
* sizeof (int));
750 CLEAR_HARD_REG_SET (hard_regs_in_table
);
752 /* The per-quantity values used to be initialized here, but it is
753 much faster to initialize each as it is made in `make_new_qty'. */
755 for (i
= 0; i
< NBUCKETS
; i
++)
757 register struct table_elt
*this, *next
;
758 for (this = table
[i
]; this; this = next
)
760 next
= this->next_same_hash
;
765 bzero (table
, sizeof table
);
774 /* Say that register REG contains a quantity not in any register before
775 and initialize that quantity. */
783 if (next_qty
>= max_qty
)
786 q
= reg_qty
[reg
] = next_qty
++;
787 qty_first_reg
[q
] = reg
;
788 qty_last_reg
[q
] = reg
;
789 qty_const
[q
] = qty_const_insn
[q
] = 0;
790 qty_comparison_code
[q
] = UNKNOWN
;
792 reg_next_eqv
[reg
] = reg_prev_eqv
[reg
] = -1;
795 /* Make reg NEW equivalent to reg OLD.
796 OLD is not changing; NEW is. */
799 make_regs_eqv (new, old
)
800 register int new, old
;
802 register int lastr
, firstr
;
803 register int q
= reg_qty
[old
];
805 /* Nothing should become eqv until it has a "non-invalid" qty number. */
806 if (! REGNO_QTY_VALID_P (old
))
810 firstr
= qty_first_reg
[q
];
811 lastr
= qty_last_reg
[q
];
813 /* Prefer fixed hard registers to anything. Prefer pseudo regs to other
814 hard regs. Among pseudos, if NEW will live longer than any other reg
815 of the same qty, and that is beyond the current basic block,
816 make it the new canonical replacement for this qty. */
817 if (! (firstr
< FIRST_PSEUDO_REGISTER
&& FIXED_REGNO_P (firstr
))
818 /* Certain fixed registers might be of the class NO_REGS. This means
819 that not only can they not be allocated by the compiler, but
820 they cannot be used in substitutions or canonicalizations
822 && (new >= FIRST_PSEUDO_REGISTER
|| REGNO_REG_CLASS (new) != NO_REGS
)
823 && ((new < FIRST_PSEUDO_REGISTER
&& FIXED_REGNO_P (new))
824 || (new >= FIRST_PSEUDO_REGISTER
825 && (firstr
< FIRST_PSEUDO_REGISTER
826 || ((uid_cuid
[regno_last_uid
[new]] > cse_basic_block_end
827 || (uid_cuid
[regno_first_uid
[new]]
828 < cse_basic_block_start
))
829 && (uid_cuid
[regno_last_uid
[new]]
830 > uid_cuid
[regno_last_uid
[firstr
]]))))))
832 reg_prev_eqv
[firstr
] = new;
833 reg_next_eqv
[new] = firstr
;
834 reg_prev_eqv
[new] = -1;
835 qty_first_reg
[q
] = new;
839 /* If NEW is a hard reg (known to be non-fixed), insert at end.
840 Otherwise, insert before any non-fixed hard regs that are at the
841 end. Registers of class NO_REGS cannot be used as an
842 equivalent for anything. */
843 while (lastr
< FIRST_PSEUDO_REGISTER
&& reg_prev_eqv
[lastr
] >= 0
844 && (REGNO_REG_CLASS (lastr
) == NO_REGS
|| ! FIXED_REGNO_P (lastr
))
845 && new >= FIRST_PSEUDO_REGISTER
)
846 lastr
= reg_prev_eqv
[lastr
];
847 reg_next_eqv
[new] = reg_next_eqv
[lastr
];
848 if (reg_next_eqv
[lastr
] >= 0)
849 reg_prev_eqv
[reg_next_eqv
[lastr
]] = new;
851 qty_last_reg
[q
] = new;
852 reg_next_eqv
[lastr
] = new;
853 reg_prev_eqv
[new] = lastr
;
857 /* Remove REG from its equivalence class. */
860 delete_reg_equiv (reg
)
863 register int n
= reg_next_eqv
[reg
];
864 register int p
= reg_prev_eqv
[reg
];
865 register int q
= reg_qty
[reg
];
867 /* If invalid, do nothing. N and P above are undefined in that case. */
878 qty_first_reg
[q
] = n
;
883 /* Remove any invalid expressions from the hash table
884 that refer to any of the registers contained in expression X.
886 Make sure that newly inserted references to those registers
887 as subexpressions will be considered valid.
889 mention_regs is not called when a register itself
890 is being stored in the table.
892 Return 1 if we have done something that may have changed the hash code
899 register enum rtx_code code
;
902 register int changed
= 0;
910 register int regno
= REGNO (x
);
911 register int endregno
912 = regno
+ (regno
>= FIRST_PSEUDO_REGISTER
? 1
913 : HARD_REGNO_NREGS (regno
, GET_MODE (x
)));
916 for (i
= regno
; i
< endregno
; i
++)
918 if (reg_in_table
[i
] >= 0 && reg_in_table
[i
] != reg_tick
[i
])
919 remove_invalid_refs (i
);
921 reg_in_table
[i
] = reg_tick
[i
];
927 /* If X is a comparison or a COMPARE and either operand is a register
928 that does not have a quantity, give it one. This is so that a later
929 call to record_jump_equiv won't cause X to be assigned a different
930 hash code and not found in the table after that call.
932 It is not necessary to do this here, since rehash_using_reg can
933 fix up the table later, but doing this here eliminates the need to
934 call that expensive function in the most common case where the only
935 use of the register is in the comparison. */
937 if (code
== COMPARE
|| GET_RTX_CLASS (code
) == '<')
939 if (GET_CODE (XEXP (x
, 0)) == REG
940 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x
, 0))))
941 if (insert_regs (XEXP (x
, 0), NULL_PTR
, 0))
943 rehash_using_reg (XEXP (x
, 0));
947 if (GET_CODE (XEXP (x
, 1)) == REG
948 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x
, 1))))
949 if (insert_regs (XEXP (x
, 1), NULL_PTR
, 0))
951 rehash_using_reg (XEXP (x
, 1));
956 fmt
= GET_RTX_FORMAT (code
);
957 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
959 changed
|= mention_regs (XEXP (x
, i
));
960 else if (fmt
[i
] == 'E')
961 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
962 changed
|= mention_regs (XVECEXP (x
, i
, j
));
967 /* Update the register quantities for inserting X into the hash table
968 with a value equivalent to CLASSP.
969 (If the class does not contain a REG, it is irrelevant.)
970 If MODIFIED is nonzero, X is a destination; it is being modified.
971 Note that delete_reg_equiv should be called on a register
972 before insert_regs is done on that register with MODIFIED != 0.
974 Nonzero value means that elements of reg_qty have changed
975 so X's hash code may be different. */
978 insert_regs (x
, classp
, modified
)
980 struct table_elt
*classp
;
983 if (GET_CODE (x
) == REG
)
985 register int regno
= REGNO (x
);
987 /* If REGNO is in the equivalence table already but is of the
988 wrong mode for that equivalence, don't do anything here. */
990 if (REGNO_QTY_VALID_P (regno
)
991 && qty_mode
[reg_qty
[regno
]] != GET_MODE (x
))
994 if (modified
|| ! REGNO_QTY_VALID_P (regno
))
997 for (classp
= classp
->first_same_value
;
999 classp
= classp
->next_same_value
)
1000 if (GET_CODE (classp
->exp
) == REG
1001 && GET_MODE (classp
->exp
) == GET_MODE (x
))
1003 make_regs_eqv (regno
, REGNO (classp
->exp
));
1007 make_new_qty (regno
);
1008 qty_mode
[reg_qty
[regno
]] = GET_MODE (x
);
1013 /* If X is a SUBREG, we will likely be inserting the inner register in the
1014 table. If that register doesn't have an assigned quantity number at
1015 this point but does later, the insertion that we will be doing now will
1016 not be accessible because its hash code will have changed. So assign
1017 a quantity number now. */
1019 else if (GET_CODE (x
) == SUBREG
&& GET_CODE (SUBREG_REG (x
)) == REG
1020 && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x
))))
1022 insert_regs (SUBREG_REG (x
), NULL_PTR
, 0);
1023 mention_regs (SUBREG_REG (x
));
1027 return mention_regs (x
);
1030 /* Look in or update the hash table. */
1032 /* Put the element ELT on the list of free elements. */
1036 struct table_elt
*elt
;
1038 elt
->next_same_hash
= free_element_chain
;
1039 free_element_chain
= elt
;
1042 /* Return an element that is free for use. */
1044 static struct table_elt
*
1047 struct table_elt
*elt
= free_element_chain
;
1050 free_element_chain
= elt
->next_same_hash
;
1054 return (struct table_elt
*) oballoc (sizeof (struct table_elt
));
1057 /* Remove table element ELT from use in the table.
1058 HASH is its hash code, made using the HASH macro.
1059 It's an argument because often that is known in advance
1060 and we save much time not recomputing it. */
1063 remove_from_table (elt
, hash
)
1064 register struct table_elt
*elt
;
1070 /* Mark this element as removed. See cse_insn. */
1071 elt
->first_same_value
= 0;
1073 /* Remove the table element from its equivalence class. */
1076 register struct table_elt
*prev
= elt
->prev_same_value
;
1077 register struct table_elt
*next
= elt
->next_same_value
;
1079 if (next
) next
->prev_same_value
= prev
;
1082 prev
->next_same_value
= next
;
1085 register struct table_elt
*newfirst
= next
;
1088 next
->first_same_value
= newfirst
;
1089 next
= next
->next_same_value
;
1094 /* Remove the table element from its hash bucket. */
1097 register struct table_elt
*prev
= elt
->prev_same_hash
;
1098 register struct table_elt
*next
= elt
->next_same_hash
;
1100 if (next
) next
->prev_same_hash
= prev
;
1103 prev
->next_same_hash
= next
;
1104 else if (table
[hash
] == elt
)
1108 /* This entry is not in the proper hash bucket. This can happen
1109 when two classes were merged by `merge_equiv_classes'. Search
1110 for the hash bucket that it heads. This happens only very
1111 rarely, so the cost is acceptable. */
1112 for (hash
= 0; hash
< NBUCKETS
; hash
++)
1113 if (table
[hash
] == elt
)
1118 /* Remove the table element from its related-value circular chain. */
1120 if (elt
->related_value
!= 0 && elt
->related_value
!= elt
)
1122 register struct table_elt
*p
= elt
->related_value
;
1123 while (p
->related_value
!= elt
)
1124 p
= p
->related_value
;
1125 p
->related_value
= elt
->related_value
;
1126 if (p
->related_value
== p
)
1127 p
->related_value
= 0;
1133 /* Look up X in the hash table and return its table element,
1134 or 0 if X is not in the table.
1136 MODE is the machine-mode of X, or if X is an integer constant
1137 with VOIDmode then MODE is the mode with which X will be used.
1139 Here we are satisfied to find an expression whose tree structure
1142 static struct table_elt
*
1143 lookup (x
, hash
, mode
)
1146 enum machine_mode mode
;
1148 register struct table_elt
*p
;
1150 for (p
= table
[hash
]; p
; p
= p
->next_same_hash
)
1151 if (mode
== p
->mode
&& ((x
== p
->exp
&& GET_CODE (x
) == REG
)
1152 || exp_equiv_p (x
, p
->exp
, GET_CODE (x
) != REG
, 0)))
1158 /* Like `lookup' but don't care whether the table element uses invalid regs.
1159 Also ignore discrepancies in the machine mode of a register. */
1161 static struct table_elt
*
1162 lookup_for_remove (x
, hash
, mode
)
1165 enum machine_mode mode
;
1167 register struct table_elt
*p
;
1169 if (GET_CODE (x
) == REG
)
1171 int regno
= REGNO (x
);
1172 /* Don't check the machine mode when comparing registers;
1173 invalidating (REG:SI 0) also invalidates (REG:DF 0). */
1174 for (p
= table
[hash
]; p
; p
= p
->next_same_hash
)
1175 if (GET_CODE (p
->exp
) == REG
1176 && REGNO (p
->exp
) == regno
)
1181 for (p
= table
[hash
]; p
; p
= p
->next_same_hash
)
1182 if (mode
== p
->mode
&& (x
== p
->exp
|| exp_equiv_p (x
, p
->exp
, 0, 0)))
1189 /* Look for an expression equivalent to X and with code CODE.
1190 If one is found, return that expression. */
1193 lookup_as_function (x
, code
)
1197 register struct table_elt
*p
= lookup (x
, safe_hash (x
, VOIDmode
) % NBUCKETS
,
1202 for (p
= p
->first_same_value
; p
; p
= p
->next_same_value
)
1204 if (GET_CODE (p
->exp
) == code
1205 /* Make sure this is a valid entry in the table. */
1206 && exp_equiv_p (p
->exp
, p
->exp
, 1, 0))
1213 /* Insert X in the hash table, assuming HASH is its hash code
1214 and CLASSP is an element of the class it should go in
1215 (or 0 if a new class should be made).
1216 It is inserted at the proper position to keep the class in
1217 the order cheapest first.
1219 MODE is the machine-mode of X, or if X is an integer constant
1220 with VOIDmode then MODE is the mode with which X will be used.
1222 For elements of equal cheapness, the most recent one
1223 goes in front, except that the first element in the list
1224 remains first unless a cheaper element is added. The order of
1225 pseudo-registers does not matter, as canon_reg will be called to
1226 find the cheapest when a register is retrieved from the table.
1228 The in_memory field in the hash table element is set to 0.
1229 The caller must set it nonzero if appropriate.
1231 You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1232 and if insert_regs returns a nonzero value
1233 you must then recompute its hash code before calling here.
1235 If necessary, update table showing constant values of quantities. */
1237 #define CHEAPER(X,Y) ((X)->cost < (Y)->cost)
1239 static struct table_elt
*
1240 insert (x
, classp
, hash
, mode
)
1242 register struct table_elt
*classp
;
1244 enum machine_mode mode
;
1246 register struct table_elt
*elt
;
1248 /* If X is a register and we haven't made a quantity for it,
1249 something is wrong. */
1250 if (GET_CODE (x
) == REG
&& ! REGNO_QTY_VALID_P (REGNO (x
)))
1253 /* If X is a hard register, show it is being put in the table. */
1254 if (GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
1256 int regno
= REGNO (x
);
1257 int endregno
= regno
+ HARD_REGNO_NREGS (regno
, GET_MODE (x
));
1260 for (i
= regno
; i
< endregno
; i
++)
1261 SET_HARD_REG_BIT (hard_regs_in_table
, i
);
1265 /* Put an element for X into the right hash bucket. */
1267 elt
= get_element ();
1269 elt
->cost
= COST (x
);
1270 elt
->next_same_value
= 0;
1271 elt
->prev_same_value
= 0;
1272 elt
->next_same_hash
= table
[hash
];
1273 elt
->prev_same_hash
= 0;
1274 elt
->related_value
= 0;
1277 elt
->is_const
= (CONSTANT_P (x
)
1278 /* GNU C++ takes advantage of this for `this'
1279 (and other const values). */
1280 || (RTX_UNCHANGING_P (x
)
1281 && GET_CODE (x
) == REG
1282 && REGNO (x
) >= FIRST_PSEUDO_REGISTER
)
1283 || FIXED_BASE_PLUS_P (x
));
1286 table
[hash
]->prev_same_hash
= elt
;
1289 /* Put it into the proper value-class. */
1292 classp
= classp
->first_same_value
;
1293 if (CHEAPER (elt
, classp
))
1294 /* Insert at the head of the class */
1296 register struct table_elt
*p
;
1297 elt
->next_same_value
= classp
;
1298 classp
->prev_same_value
= elt
;
1299 elt
->first_same_value
= elt
;
1301 for (p
= classp
; p
; p
= p
->next_same_value
)
1302 p
->first_same_value
= elt
;
1306 /* Insert not at head of the class. */
1307 /* Put it after the last element cheaper than X. */
1308 register struct table_elt
*p
, *next
;
1309 for (p
= classp
; (next
= p
->next_same_value
) && CHEAPER (next
, elt
);
1311 /* Put it after P and before NEXT. */
1312 elt
->next_same_value
= next
;
1314 next
->prev_same_value
= elt
;
1315 elt
->prev_same_value
= p
;
1316 p
->next_same_value
= elt
;
1317 elt
->first_same_value
= classp
;
1321 elt
->first_same_value
= elt
;
1323 /* If this is a constant being set equivalent to a register or a register
1324 being set equivalent to a constant, note the constant equivalence.
1326 If this is a constant, it cannot be equivalent to a different constant,
1327 and a constant is the only thing that can be cheaper than a register. So
1328 we know the register is the head of the class (before the constant was
1331 If this is a register that is not already known equivalent to a
1332 constant, we must check the entire class.
1334 If this is a register that is already known equivalent to an insn,
1335 update `qty_const_insn' to show that `this_insn' is the latest
1336 insn making that quantity equivalent to the constant. */
1338 if (elt
->is_const
&& classp
&& GET_CODE (classp
->exp
) == REG
)
1340 qty_const
[reg_qty
[REGNO (classp
->exp
)]]
1341 = gen_lowpart_if_possible (qty_mode
[reg_qty
[REGNO (classp
->exp
)]], x
);
1342 qty_const_insn
[reg_qty
[REGNO (classp
->exp
)]] = this_insn
;
1345 else if (GET_CODE (x
) == REG
&& classp
&& ! qty_const
[reg_qty
[REGNO (x
)]])
1347 register struct table_elt
*p
;
1349 for (p
= classp
; p
!= 0; p
= p
->next_same_value
)
1353 qty_const
[reg_qty
[REGNO (x
)]]
1354 = gen_lowpart_if_possible (GET_MODE (x
), p
->exp
);
1355 qty_const_insn
[reg_qty
[REGNO (x
)]] = this_insn
;
1361 else if (GET_CODE (x
) == REG
&& qty_const
[reg_qty
[REGNO (x
)]]
1362 && GET_MODE (x
) == qty_mode
[reg_qty
[REGNO (x
)]])
1363 qty_const_insn
[reg_qty
[REGNO (x
)]] = this_insn
;
1365 /* If this is a constant with symbolic value,
1366 and it has a term with an explicit integer value,
1367 link it up with related expressions. */
1368 if (GET_CODE (x
) == CONST
)
1370 rtx subexp
= get_related_value (x
);
1372 struct table_elt
*subelt
, *subelt_prev
;
1376 /* Get the integer-free subexpression in the hash table. */
1377 subhash
= safe_hash (subexp
, mode
) % NBUCKETS
;
1378 subelt
= lookup (subexp
, subhash
, mode
);
1380 subelt
= insert (subexp
, NULL_PTR
, subhash
, mode
);
1381 /* Initialize SUBELT's circular chain if it has none. */
1382 if (subelt
->related_value
== 0)
1383 subelt
->related_value
= subelt
;
1384 /* Find the element in the circular chain that precedes SUBELT. */
1385 subelt_prev
= subelt
;
1386 while (subelt_prev
->related_value
!= subelt
)
1387 subelt_prev
= subelt_prev
->related_value
;
1388 /* Put new ELT into SUBELT's circular chain just before SUBELT.
1389 This way the element that follows SUBELT is the oldest one. */
1390 elt
->related_value
= subelt_prev
->related_value
;
1391 subelt_prev
->related_value
= elt
;
1398 /* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1399 CLASS2 into CLASS1. This is done when we have reached an insn which makes
1400 the two classes equivalent.
1402 CLASS1 will be the surviving class; CLASS2 should not be used after this
1405 Any invalid entries in CLASS2 will not be copied. */
1408 merge_equiv_classes (class1
, class2
)
1409 struct table_elt
*class1
, *class2
;
1411 struct table_elt
*elt
, *next
, *new;
1413 /* Ensure we start with the head of the classes. */
1414 class1
= class1
->first_same_value
;
1415 class2
= class2
->first_same_value
;
1417 /* If they were already equal, forget it. */
1418 if (class1
== class2
)
1421 for (elt
= class2
; elt
; elt
= next
)
1425 enum machine_mode mode
= elt
->mode
;
1427 next
= elt
->next_same_value
;
1429 /* Remove old entry, make a new one in CLASS1's class.
1430 Don't do this for invalid entries as we cannot find their
1431 hash code (it also isn't necessary). */
1432 if (GET_CODE (exp
) == REG
|| exp_equiv_p (exp
, exp
, 1, 0))
1434 hash_arg_in_memory
= 0;
1435 hash_arg_in_struct
= 0;
1436 hash
= HASH (exp
, mode
);
1438 if (GET_CODE (exp
) == REG
)
1439 delete_reg_equiv (REGNO (exp
));
1441 remove_from_table (elt
, hash
);
1443 if (insert_regs (exp
, class1
, 0))
1444 hash
= HASH (exp
, mode
);
1445 new = insert (exp
, class1
, hash
, mode
);
1446 new->in_memory
= hash_arg_in_memory
;
1447 new->in_struct
= hash_arg_in_struct
;
1452 /* Remove from the hash table, or mark as invalid,
1453 all expressions whose values could be altered by storing in X.
1454 X is a register, a subreg, or a memory reference with nonvarying address
1455 (because, when a memory reference with a varying address is stored in,
1456 all memory references are removed by invalidate_memory
1457 so specific invalidation is superfluous).
1459 A nonvarying address may be just a register or just
1460 a symbol reference, or it may be either of those plus
1461 a numeric offset. */
1468 register struct table_elt
*p
;
1470 HOST_WIDE_INT start
, end
;
1472 /* If X is a register, dependencies on its contents
1473 are recorded through the qty number mechanism.
1474 Just change the qty number of the register,
1475 mark it as invalid for expressions that refer to it,
1476 and remove it itself. */
1478 if (GET_CODE (x
) == REG
)
1480 register int regno
= REGNO (x
);
1481 register int hash
= HASH (x
, GET_MODE (x
));
1483 /* Remove REGNO from any quantity list it might be on and indicate
1484 that it's value might have changed. If it is a pseudo, remove its
1485 entry from the hash table.
1487 For a hard register, we do the first two actions above for any
1488 additional hard registers corresponding to X. Then, if any of these
1489 registers are in the table, we must remove any REG entries that
1490 overlap these registers. */
1492 delete_reg_equiv (regno
);
1495 if (regno
>= FIRST_PSEUDO_REGISTER
)
1496 remove_from_table (lookup_for_remove (x
, hash
, GET_MODE (x
)), hash
);
1499 HOST_WIDE_INT in_table
1500 = TEST_HARD_REG_BIT (hard_regs_in_table
, regno
);
1501 int endregno
= regno
+ HARD_REGNO_NREGS (regno
, GET_MODE (x
));
1502 int tregno
, tendregno
;
1503 register struct table_elt
*p
, *next
;
1505 CLEAR_HARD_REG_BIT (hard_regs_in_table
, regno
);
1507 for (i
= regno
+ 1; i
< endregno
; i
++)
1509 in_table
|= TEST_HARD_REG_BIT (hard_regs_in_table
, i
);
1510 CLEAR_HARD_REG_BIT (hard_regs_in_table
, i
);
1511 delete_reg_equiv (i
);
1516 for (hash
= 0; hash
< NBUCKETS
; hash
++)
1517 for (p
= table
[hash
]; p
; p
= next
)
1519 next
= p
->next_same_hash
;
1521 if (GET_CODE (p
->exp
) != REG
1522 || REGNO (p
->exp
) >= FIRST_PSEUDO_REGISTER
)
1525 tregno
= REGNO (p
->exp
);
1527 = tregno
+ HARD_REGNO_NREGS (tregno
, GET_MODE (p
->exp
));
1528 if (tendregno
> regno
&& tregno
< endregno
)
1529 remove_from_table (p
, hash
);
1536 if (GET_CODE (x
) == SUBREG
)
1538 if (GET_CODE (SUBREG_REG (x
)) != REG
)
1540 invalidate (SUBREG_REG (x
));
1544 /* X is not a register; it must be a memory reference with
1545 a nonvarying address. Remove all hash table elements
1546 that refer to overlapping pieces of memory. */
1548 if (GET_CODE (x
) != MEM
)
1551 set_nonvarying_address_components (XEXP (x
, 0), GET_MODE_SIZE (GET_MODE (x
)),
1552 &base
, &start
, &end
);
1554 for (i
= 0; i
< NBUCKETS
; i
++)
1556 register struct table_elt
*next
;
1557 for (p
= table
[i
]; p
; p
= next
)
1559 next
= p
->next_same_hash
;
1560 if (refers_to_mem_p (p
->exp
, base
, start
, end
))
1561 remove_from_table (p
, i
);
1566 /* Remove all expressions that refer to register REGNO,
1567 since they are already invalid, and we are about to
1568 mark that register valid again and don't want the old
1569 expressions to reappear as valid. */
1572 remove_invalid_refs (regno
)
1576 register struct table_elt
*p
, *next
;
1578 for (i
= 0; i
< NBUCKETS
; i
++)
1579 for (p
= table
[i
]; p
; p
= next
)
1581 next
= p
->next_same_hash
;
1582 if (GET_CODE (p
->exp
) != REG
1583 && refers_to_regno_p (regno
, regno
+ 1, p
->exp
, NULL_PTR
))
1584 remove_from_table (p
, i
);
1588 /* Recompute the hash codes of any valid entries in the hash table that
1589 reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
1591 This is called when we make a jump equivalence. */
1594 rehash_using_reg (x
)
1598 struct table_elt
*p
, *next
;
1601 if (GET_CODE (x
) == SUBREG
)
1604 /* If X is not a register or if the register is known not to be in any
1605 valid entries in the table, we have no work to do. */
1607 if (GET_CODE (x
) != REG
1608 || reg_in_table
[REGNO (x
)] < 0
1609 || reg_in_table
[REGNO (x
)] != reg_tick
[REGNO (x
)])
1612 /* Scan all hash chains looking for valid entries that mention X.
1613 If we find one and it is in the wrong hash chain, move it. We can skip
1614 objects that are registers, since they are handled specially. */
1616 for (i
= 0; i
< NBUCKETS
; i
++)
1617 for (p
= table
[i
]; p
; p
= next
)
1619 next
= p
->next_same_hash
;
1620 if (GET_CODE (p
->exp
) != REG
&& reg_mentioned_p (x
, p
->exp
)
1621 && exp_equiv_p (p
->exp
, p
->exp
, 1, 0)
1622 && i
!= (hash
= safe_hash (p
->exp
, p
->mode
) % NBUCKETS
))
1624 if (p
->next_same_hash
)
1625 p
->next_same_hash
->prev_same_hash
= p
->prev_same_hash
;
1627 if (p
->prev_same_hash
)
1628 p
->prev_same_hash
->next_same_hash
= p
->next_same_hash
;
1630 table
[i
] = p
->next_same_hash
;
1632 p
->next_same_hash
= table
[hash
];
1633 p
->prev_same_hash
= 0;
1635 table
[hash
]->prev_same_hash
= p
;
1641 /* Remove from the hash table all expressions that reference memory,
1642 or some of them as specified by *WRITES. */
1645 invalidate_memory (writes
)
1646 struct write_data
*writes
;
1649 register struct table_elt
*p
, *next
;
1650 int all
= writes
->all
;
1651 int nonscalar
= writes
->nonscalar
;
1653 for (i
= 0; i
< NBUCKETS
; i
++)
1654 for (p
= table
[i
]; p
; p
= next
)
1656 next
= p
->next_same_hash
;
1659 || (nonscalar
&& p
->in_struct
)
1660 || cse_rtx_addr_varies_p (p
->exp
)))
1661 remove_from_table (p
, i
);
1665 /* Remove from the hash table any expression that is a call-clobbered
1666 register. Also update their TICK values. */
1669 invalidate_for_call ()
1671 int regno
, endregno
;
1674 struct table_elt
*p
, *next
;
1677 /* Go through all the hard registers. For each that is clobbered in
1678 a CALL_INSN, remove the register from quantity chains and update
1679 reg_tick if defined. Also see if any of these registers is currently
1682 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1683 if (TEST_HARD_REG_BIT (regs_invalidated_by_call
, regno
))
1685 delete_reg_equiv (regno
);
1686 if (reg_tick
[regno
] >= 0)
1689 in_table
|= TEST_HARD_REG_BIT (hard_regs_in_table
, regno
);
1692 /* In the case where we have no call-clobbered hard registers in the
1693 table, we are done. Otherwise, scan the table and remove any
1694 entry that overlaps a call-clobbered register. */
1697 for (hash
= 0; hash
< NBUCKETS
; hash
++)
1698 for (p
= table
[hash
]; p
; p
= next
)
1700 next
= p
->next_same_hash
;
1702 if (GET_CODE (p
->exp
) != REG
1703 || REGNO (p
->exp
) >= FIRST_PSEUDO_REGISTER
)
1706 regno
= REGNO (p
->exp
);
1707 endregno
= regno
+ HARD_REGNO_NREGS (regno
, GET_MODE (p
->exp
));
1709 for (i
= regno
; i
< endregno
; i
++)
1710 if (TEST_HARD_REG_BIT (regs_invalidated_by_call
, i
))
1712 remove_from_table (p
, hash
);
1718 /* Given an expression X of type CONST,
1719 and ELT which is its table entry (or 0 if it
1720 is not in the hash table),
1721 return an alternate expression for X as a register plus integer.
1722 If none can be found, return 0. */
1725 use_related_value (x
, elt
)
1727 struct table_elt
*elt
;
1729 register struct table_elt
*relt
= 0;
1730 register struct table_elt
*p
, *q
;
1731 HOST_WIDE_INT offset
;
1733 /* First, is there anything related known?
1734 If we have a table element, we can tell from that.
1735 Otherwise, must look it up. */
1737 if (elt
!= 0 && elt
->related_value
!= 0)
1739 else if (elt
== 0 && GET_CODE (x
) == CONST
)
1741 rtx subexp
= get_related_value (x
);
1743 relt
= lookup (subexp
,
1744 safe_hash (subexp
, GET_MODE (subexp
)) % NBUCKETS
,
1751 /* Search all related table entries for one that has an
1752 equivalent register. */
1757 /* This loop is strange in that it is executed in two different cases.
1758 The first is when X is already in the table. Then it is searching
1759 the RELATED_VALUE list of X's class (RELT). The second case is when
1760 X is not in the table. Then RELT points to a class for the related
1763 Ensure that, whatever case we are in, that we ignore classes that have
1764 the same value as X. */
1766 if (rtx_equal_p (x
, p
->exp
))
1769 for (q
= p
->first_same_value
; q
; q
= q
->next_same_value
)
1770 if (GET_CODE (q
->exp
) == REG
)
1776 p
= p
->related_value
;
1778 /* We went all the way around, so there is nothing to be found.
1779 Alternatively, perhaps RELT was in the table for some other reason
1780 and it has no related values recorded. */
1781 if (p
== relt
|| p
== 0)
1788 offset
= (get_integer_term (x
) - get_integer_term (p
->exp
));
1789 /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity. */
1790 return plus_constant (q
->exp
, offset
);
1793 /* Hash an rtx. We are careful to make sure the value is never negative.
1794 Equivalent registers hash identically.
1795 MODE is used in hashing for CONST_INTs only;
1796 otherwise the mode of X is used.
1798 Store 1 in do_not_record if any subexpression is volatile.
1800 Store 1 in hash_arg_in_memory if X contains a MEM rtx
1801 which does not have the RTX_UNCHANGING_P bit set.
1802 In this case, also store 1 in hash_arg_in_struct
1803 if there is a MEM rtx which has the MEM_IN_STRUCT_P bit set.
1805 Note that cse_insn knows that the hash code of a MEM expression
1806 is just (int) MEM plus the hash code of the address. */
1809 canon_hash (x
, mode
)
1811 enum machine_mode mode
;
1814 register int hash
= 0;
1815 register enum rtx_code code
;
1818 /* repeat is used to turn tail-recursion into iteration. */
1823 code
= GET_CODE (x
);
1828 register int regno
= REGNO (x
);
1830 /* On some machines, we can't record any non-fixed hard register,
1831 because extending its life will cause reload problems. We
1832 consider ap, fp, and sp to be fixed for this purpose.
1833 On all machines, we can't record any global registers. */
1835 if (regno
< FIRST_PSEUDO_REGISTER
1836 && (global_regs
[regno
]
1837 #ifdef SMALL_REGISTER_CLASSES
1838 || (! fixed_regs
[regno
]
1839 && regno
!= FRAME_POINTER_REGNUM
1840 && regno
!= ARG_POINTER_REGNUM
1841 && regno
!= STACK_POINTER_REGNUM
)
1848 return hash
+ ((int) REG
<< 7) + reg_qty
[regno
];
1852 hash
+= ((int) mode
+ ((int) CONST_INT
<< 7)
1853 + INTVAL (x
) + (INTVAL (x
) >> HASHBITS
));
1854 return ((1 << HASHBITS
) - 1) & hash
;
1857 /* This is like the general case, except that it only counts
1858 the integers representing the constant. */
1859 hash
+= (int) code
+ (int) GET_MODE (x
);
1862 for (i
= 2; i
< GET_RTX_LENGTH (CONST_DOUBLE
); i
++)
1864 int tem
= XINT (x
, i
);
1865 hash
+= ((1 << HASHBITS
) - 1) & (tem
+ (tem
>> HASHBITS
));
1870 /* Assume there is only one rtx object for any given label. */
1872 /* Use `and' to ensure a positive number. */
1873 return (hash
+ ((HOST_WIDE_INT
) LABEL_REF
<< 7)
1874 + ((HOST_WIDE_INT
) XEXP (x
, 0) & ((1 << HASHBITS
) - 1)));
1877 return (hash
+ ((HOST_WIDE_INT
) SYMBOL_REF
<< 7)
1878 + ((HOST_WIDE_INT
) XEXP (x
, 0) & ((1 << HASHBITS
) - 1)));
1881 if (MEM_VOLATILE_P (x
))
1886 if (! RTX_UNCHANGING_P (x
))
1888 hash_arg_in_memory
= 1;
1889 if (MEM_IN_STRUCT_P (x
)) hash_arg_in_struct
= 1;
1891 /* Now that we have already found this special case,
1892 might as well speed it up as much as possible. */
1904 case UNSPEC_VOLATILE
:
1909 if (MEM_VOLATILE_P (x
))
1916 i
= GET_RTX_LENGTH (code
) - 1;
1917 hash
+= (int) code
+ (int) GET_MODE (x
);
1918 fmt
= GET_RTX_FORMAT (code
);
1923 rtx tem
= XEXP (x
, i
);
1926 /* If the operand is a REG that is equivalent to a constant, hash
1927 as if we were hashing the constant, since we will be comparing
1929 if (tem
!= 0 && GET_CODE (tem
) == REG
1930 && REGNO_QTY_VALID_P (REGNO (tem
))
1931 && qty_mode
[reg_qty
[REGNO (tem
)]] == GET_MODE (tem
)
1932 && (tem1
= qty_const
[reg_qty
[REGNO (tem
)]]) != 0
1933 && CONSTANT_P (tem1
))
1936 /* If we are about to do the last recursive call
1937 needed at this level, change it into iteration.
1938 This function is called enough to be worth it. */
1944 hash
+= canon_hash (tem
, 0);
1946 else if (fmt
[i
] == 'E')
1947 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
1948 hash
+= canon_hash (XVECEXP (x
, i
, j
), 0);
1949 else if (fmt
[i
] == 's')
1951 register char *p
= XSTR (x
, i
);
1955 register int tem
= *p
++;
1956 hash
+= ((1 << HASHBITS
) - 1) & (tem
+ (tem
>> HASHBITS
));
1959 else if (fmt
[i
] == 'i')
1961 register int tem
= XINT (x
, i
);
1962 hash
+= ((1 << HASHBITS
) - 1) & (tem
+ (tem
>> HASHBITS
));
1970 /* Like canon_hash but with no side effects. */
1975 enum machine_mode mode
;
1977 int save_do_not_record
= do_not_record
;
1978 int save_hash_arg_in_memory
= hash_arg_in_memory
;
1979 int save_hash_arg_in_struct
= hash_arg_in_struct
;
1980 int hash
= canon_hash (x
, mode
);
1981 hash_arg_in_memory
= save_hash_arg_in_memory
;
1982 hash_arg_in_struct
= save_hash_arg_in_struct
;
1983 do_not_record
= save_do_not_record
;
1987 /* Return 1 iff X and Y would canonicalize into the same thing,
1988 without actually constructing the canonicalization of either one.
1989 If VALIDATE is nonzero,
1990 we assume X is an expression being processed from the rtl
1991 and Y was found in the hash table. We check register refs
1992 in Y for being marked as valid.
1994 If EQUAL_VALUES is nonzero, we allow a register to match a constant value
1995 that is known to be in the register. Ordinarily, we don't allow them
1996 to match, because letting them match would cause unpredictable results
1997 in all the places that search a hash table chain for an equivalent
1998 for a given value. A possible equivalent that has different structure
1999 has its hash code computed from different data. Whether the hash code
2000 is the same as that of the the given value is pure luck. */
2003 exp_equiv_p (x
, y
, validate
, equal_values
)
2009 register enum rtx_code code
;
2012 /* Note: it is incorrect to assume an expression is equivalent to itself
2013 if VALIDATE is nonzero. */
2014 if (x
== y
&& !validate
)
2016 if (x
== 0 || y
== 0)
2019 code
= GET_CODE (x
);
2020 if (code
!= GET_CODE (y
))
2025 /* If X is a constant and Y is a register or vice versa, they may be
2026 equivalent. We only have to validate if Y is a register. */
2027 if (CONSTANT_P (x
) && GET_CODE (y
) == REG
2028 && REGNO_QTY_VALID_P (REGNO (y
))
2029 && GET_MODE (y
) == qty_mode
[reg_qty
[REGNO (y
)]]
2030 && rtx_equal_p (x
, qty_const
[reg_qty
[REGNO (y
)]])
2031 && (! validate
|| reg_in_table
[REGNO (y
)] == reg_tick
[REGNO (y
)]))
2034 if (CONSTANT_P (y
) && code
== REG
2035 && REGNO_QTY_VALID_P (REGNO (x
))
2036 && GET_MODE (x
) == qty_mode
[reg_qty
[REGNO (x
)]]
2037 && rtx_equal_p (y
, qty_const
[reg_qty
[REGNO (x
)]]))
2043 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2044 if (GET_MODE (x
) != GET_MODE (y
))
2054 return INTVAL (x
) == INTVAL (y
);
2058 return XEXP (x
, 0) == XEXP (y
, 0);
2062 int regno
= REGNO (y
);
2064 = regno
+ (regno
>= FIRST_PSEUDO_REGISTER
? 1
2065 : HARD_REGNO_NREGS (regno
, GET_MODE (y
)));
2068 /* If the quantities are not the same, the expressions are not
2069 equivalent. If there are and we are not to validate, they
2070 are equivalent. Otherwise, ensure all regs are up-to-date. */
2072 if (reg_qty
[REGNO (x
)] != reg_qty
[regno
])
2078 for (i
= regno
; i
< endregno
; i
++)
2079 if (reg_in_table
[i
] != reg_tick
[i
])
2085 /* For commutative operations, check both orders. */
2093 return ((exp_equiv_p (XEXP (x
, 0), XEXP (y
, 0), validate
, equal_values
)
2094 && exp_equiv_p (XEXP (x
, 1), XEXP (y
, 1),
2095 validate
, equal_values
))
2096 || (exp_equiv_p (XEXP (x
, 0), XEXP (y
, 1),
2097 validate
, equal_values
)
2098 && exp_equiv_p (XEXP (x
, 1), XEXP (y
, 0),
2099 validate
, equal_values
)));
2102 /* Compare the elements. If any pair of corresponding elements
2103 fail to match, return 0 for the whole things. */
2105 fmt
= GET_RTX_FORMAT (code
);
2106 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2111 if (! exp_equiv_p (XEXP (x
, i
), XEXP (y
, i
), validate
, equal_values
))
2116 if (XVECLEN (x
, i
) != XVECLEN (y
, i
))
2118 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2119 if (! exp_equiv_p (XVECEXP (x
, i
, j
), XVECEXP (y
, i
, j
),
2120 validate
, equal_values
))
2125 if (strcmp (XSTR (x
, i
), XSTR (y
, i
)))
2130 if (XINT (x
, i
) != XINT (y
, i
))
2135 if (XWINT (x
, i
) != XWINT (y
, i
))
2150 /* Return 1 iff any subexpression of X matches Y.
2151 Here we do not require that X or Y be valid (for registers referred to)
2152 for being in the hash table. */
2159 register enum rtx_code code
;
2165 if (x
== 0 || y
== 0)
2168 code
= GET_CODE (x
);
2169 /* If X as a whole has the same code as Y, they may match.
2171 if (code
== GET_CODE (y
))
2173 if (exp_equiv_p (x
, y
, 0, 1))
2177 /* X does not match, so try its subexpressions. */
2179 fmt
= GET_RTX_FORMAT (code
);
2180 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2189 if (refers_to_p (XEXP (x
, i
), y
))
2192 else if (fmt
[i
] == 'E')
2195 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2196 if (refers_to_p (XVECEXP (x
, i
, j
), y
))
2203 /* Given ADDR and SIZE (a memory address, and the size of the memory reference),
2204 set PBASE, PSTART, and PEND which correspond to the base of the address,
2205 the starting offset, and ending offset respectively.
2207 ADDR is known to be a nonvarying address.
2209 cse_address_varies_p returns zero for nonvarying addresses. */
2212 set_nonvarying_address_components (addr
, size
, pbase
, pstart
, pend
)
2216 HOST_WIDE_INT
*pstart
, *pend
;
2225 /* Registers with nonvarying addresses usually have constant equivalents;
2226 but the frame pointer register is also possible. */
2227 if (GET_CODE (base
) == REG
2229 && REGNO_QTY_VALID_P (REGNO (base
))
2230 && qty_mode
[reg_qty
[REGNO (base
)]] == GET_MODE (base
)
2231 && qty_const
[reg_qty
[REGNO (base
)]] != 0)
2232 base
= qty_const
[reg_qty
[REGNO (base
)]];
2233 else if (GET_CODE (base
) == PLUS
2234 && GET_CODE (XEXP (base
, 1)) == CONST_INT
2235 && GET_CODE (XEXP (base
, 0)) == REG
2237 && REGNO_QTY_VALID_P (REGNO (XEXP (base
, 0)))
2238 && (qty_mode
[reg_qty
[REGNO (XEXP (base
, 0))]]
2239 == GET_MODE (XEXP (base
, 0)))
2240 && qty_const
[reg_qty
[REGNO (XEXP (base
, 0))]])
2242 start
= INTVAL (XEXP (base
, 1));
2243 base
= qty_const
[reg_qty
[REGNO (XEXP (base
, 0))]];
2246 /* By definition, operand1 of a LO_SUM is the associated constant
2247 address. Use the associated constant address as the base instead. */
2248 if (GET_CODE (base
) == LO_SUM
)
2249 base
= XEXP (base
, 1);
2251 /* Strip off CONST. */
2252 if (GET_CODE (base
) == CONST
)
2253 base
= XEXP (base
, 0);
2255 if (GET_CODE (base
) == PLUS
2256 && GET_CODE (XEXP (base
, 1)) == CONST_INT
)
2258 start
+= INTVAL (XEXP (base
, 1));
2259 base
= XEXP (base
, 0);
2264 /* Set the return values. */
2270 /* Return 1 iff any subexpression of X refers to memory
2271 at an address of BASE plus some offset
2272 such that any of the bytes' offsets fall between START (inclusive)
2273 and END (exclusive).
2275 The value is undefined if X is a varying address (as determined by
2276 cse_rtx_addr_varies_p). This function is not used in such cases.
2278 When used in the cse pass, `qty_const' is nonzero, and it is used
2279 to treat an address that is a register with a known constant value
2280 as if it were that constant value.
2281 In the loop pass, `qty_const' is zero, so this is not done. */
2284 refers_to_mem_p (x
, base
, start
, end
)
2286 HOST_WIDE_INT start
, end
;
2288 register HOST_WIDE_INT i
;
2289 register enum rtx_code code
;
2292 if (GET_CODE (base
) == CONST_INT
)
2294 start
+= INTVAL (base
);
2295 end
+= INTVAL (base
);
2303 code
= GET_CODE (x
);
2306 register rtx addr
= XEXP (x
, 0); /* Get the address. */
2308 HOST_WIDE_INT mystart
, myend
;
2310 set_nonvarying_address_components (addr
, GET_MODE_SIZE (GET_MODE (x
)),
2311 &mybase
, &mystart
, &myend
);
2314 /* refers_to_mem_p is never called with varying addresses.
2315 If the base addresses are not equal, there is no chance
2316 of the memory addresses conflicting. */
2317 if (! rtx_equal_p (mybase
, base
))
2320 return myend
> start
&& mystart
< end
;
2323 /* X does not match, so try its subexpressions. */
2325 fmt
= GET_RTX_FORMAT (code
);
2326 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2335 if (refers_to_mem_p (XEXP (x
, i
), base
, start
, end
))
2338 else if (fmt
[i
] == 'E')
2341 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2342 if (refers_to_mem_p (XVECEXP (x
, i
, j
), base
, start
, end
))
2349 /* Nonzero if X refers to memory at a varying address;
2350 except that a register which has at the moment a known constant value
2351 isn't considered variable. */
2354 cse_rtx_addr_varies_p (x
)
2357 /* We need not check for X and the equivalence class being of the same
2358 mode because if X is equivalent to a constant in some mode, it
2359 doesn't vary in any mode. */
2361 if (GET_CODE (x
) == MEM
2362 && GET_CODE (XEXP (x
, 0)) == REG
2363 && REGNO_QTY_VALID_P (REGNO (XEXP (x
, 0)))
2364 && GET_MODE (XEXP (x
, 0)) == qty_mode
[reg_qty
[REGNO (XEXP (x
, 0))]]
2365 && qty_const
[reg_qty
[REGNO (XEXP (x
, 0))]] != 0)
2368 if (GET_CODE (x
) == MEM
2369 && GET_CODE (XEXP (x
, 0)) == PLUS
2370 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
2371 && GET_CODE (XEXP (XEXP (x
, 0), 0)) == REG
2372 && REGNO_QTY_VALID_P (REGNO (XEXP (XEXP (x
, 0), 0)))
2373 && (GET_MODE (XEXP (XEXP (x
, 0), 0))
2374 == qty_mode
[reg_qty
[REGNO (XEXP (XEXP (x
, 0), 0))]])
2375 && qty_const
[reg_qty
[REGNO (XEXP (XEXP (x
, 0), 0))]])
2378 return rtx_addr_varies_p (x
);
2381 /* Canonicalize an expression:
2382 replace each register reference inside it
2383 with the "oldest" equivalent register.
2385 If INSN is non-zero and we are replacing a pseudo with a hard register
2386 or vice versa, validate_change is used to ensure that INSN remains valid
2387 after we make our substitution. The calls are made with IN_GROUP non-zero
2388 so apply_change_group must be called upon the outermost return from this
2389 function (unless INSN is zero). The result of apply_change_group can
2390 generally be discarded since the changes we are making are optional. */
2398 register enum rtx_code code
;
2404 code
= GET_CODE (x
);
2422 /* Never replace a hard reg, because hard regs can appear
2423 in more than one machine mode, and we must preserve the mode
2424 of each occurrence. Also, some hard regs appear in
2425 MEMs that are shared and mustn't be altered. Don't try to
2426 replace any reg that maps to a reg of class NO_REGS. */
2427 if (REGNO (x
) < FIRST_PSEUDO_REGISTER
2428 || ! REGNO_QTY_VALID_P (REGNO (x
)))
2431 first
= qty_first_reg
[reg_qty
[REGNO (x
)]];
2432 return (first
>= FIRST_PSEUDO_REGISTER
? regno_reg_rtx
[first
]
2433 : REGNO_REG_CLASS (first
) == NO_REGS
? x
2434 : gen_rtx (REG
, qty_mode
[reg_qty
[REGNO (x
)]], first
));
2438 fmt
= GET_RTX_FORMAT (code
);
2439 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2445 rtx
new = canon_reg (XEXP (x
, i
), insn
);
2447 /* If replacing pseudo with hard reg or vice versa, ensure the
2448 insn remains valid. Likewise if the insn has MATCH_DUPs. */
2449 if (insn
!= 0 && new != 0
2450 && GET_CODE (new) == REG
&& GET_CODE (XEXP (x
, i
)) == REG
2451 && (((REGNO (new) < FIRST_PSEUDO_REGISTER
)
2452 != (REGNO (XEXP (x
, i
)) < FIRST_PSEUDO_REGISTER
))
2453 || insn_n_dups
[recog_memoized (insn
)] > 0))
2454 validate_change (insn
, &XEXP (x
, i
), new, 1);
2458 else if (fmt
[i
] == 'E')
2459 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2460 XVECEXP (x
, i
, j
) = canon_reg (XVECEXP (x
, i
, j
), insn
);
2466 /* LOC is a location with INSN that is an operand address (the contents of
2467 a MEM). Find the best equivalent address to use that is valid for this
2470 On most CISC machines, complicated address modes are costly, and rtx_cost
2471 is a good approximation for that cost. However, most RISC machines have
2472 only a few (usually only one) memory reference formats. If an address is
2473 valid at all, it is often just as cheap as any other address. Hence, for
2474 RISC machines, we use the configuration macro `ADDRESS_COST' to compare the
2475 costs of various addresses. For two addresses of equal cost, choose the one
2476 with the highest `rtx_cost' value as that has the potential of eliminating
2477 the most insns. For equal costs, we choose the first in the equivalence
2478 class. Note that we ignore the fact that pseudo registers are cheaper
2479 than hard registers here because we would also prefer the pseudo registers.
2483 find_best_addr (insn
, loc
)
2487 struct table_elt
*elt
, *p
;
2490 int found_better
= 1;
2491 int save_do_not_record
= do_not_record
;
2492 int save_hash_arg_in_memory
= hash_arg_in_memory
;
2493 int save_hash_arg_in_struct
= hash_arg_in_struct
;
2498 /* Do not try to replace constant addresses or addresses of local and
2499 argument slots. These MEM expressions are made only once and inserted
2500 in many instructions, as well as being used to control symbol table
2501 output. It is not safe to clobber them.
2503 There are some uncommon cases where the address is already in a register
2504 for some reason, but we cannot take advantage of that because we have
2505 no easy way to unshare the MEM. In addition, looking up all stack
2506 addresses is costly. */
2507 if ((GET_CODE (addr
) == PLUS
2508 && GET_CODE (XEXP (addr
, 0)) == REG
2509 && GET_CODE (XEXP (addr
, 1)) == CONST_INT
2510 && (regno
= REGNO (XEXP (addr
, 0)),
2511 regno
== FRAME_POINTER_REGNUM
|| regno
== ARG_POINTER_REGNUM
))
2512 || (GET_CODE (addr
) == REG
2513 && (regno
= REGNO (addr
),
2514 regno
== FRAME_POINTER_REGNUM
|| regno
== ARG_POINTER_REGNUM
))
2515 || CONSTANT_ADDRESS_P (addr
))
2518 /* If this address is not simply a register, try to fold it. This will
2519 sometimes simplify the expression. Many simplifications
2520 will not be valid, but some, usually applying the associative rule, will
2521 be valid and produce better code. */
2522 if (GET_CODE (addr
) != REG
2523 && validate_change (insn
, loc
, fold_rtx (addr
, insn
), 0))
2526 /* If this address is not in the hash table, we can't look for equivalences
2527 of the whole address. Also, ignore if volatile. */
2530 hash_code
= HASH (addr
, Pmode
);
2531 addr_volatile
= do_not_record
;
2532 do_not_record
= save_do_not_record
;
2533 hash_arg_in_memory
= save_hash_arg_in_memory
;
2534 hash_arg_in_struct
= save_hash_arg_in_struct
;
2539 elt
= lookup (addr
, hash_code
, Pmode
);
2541 #ifndef ADDRESS_COST
2544 our_cost
= elt
->cost
;
2546 /* Find the lowest cost below ours that works. */
2547 for (elt
= elt
->first_same_value
; elt
; elt
= elt
->next_same_value
)
2548 if (elt
->cost
< our_cost
2549 && (GET_CODE (elt
->exp
) == REG
2550 || exp_equiv_p (elt
->exp
, elt
->exp
, 1, 0))
2551 && validate_change (insn
, loc
,
2552 canon_reg (copy_rtx (elt
->exp
), NULL_RTX
), 0))
2559 /* We need to find the best (under the criteria documented above) entry
2560 in the class that is valid. We use the `flag' field to indicate
2561 choices that were invalid and iterate until we can't find a better
2562 one that hasn't already been tried. */
2564 for (p
= elt
->first_same_value
; p
; p
= p
->next_same_value
)
2567 while (found_better
)
2569 int best_addr_cost
= ADDRESS_COST (*loc
);
2570 int best_rtx_cost
= (elt
->cost
+ 1) >> 1;
2571 struct table_elt
*best_elt
= elt
;
2574 for (p
= elt
->first_same_value
; p
; p
= p
->next_same_value
)
2576 && (GET_CODE (p
->exp
) == REG
2577 || exp_equiv_p (p
->exp
, p
->exp
, 1, 0))
2578 && (ADDRESS_COST (p
->exp
) < best_addr_cost
2579 || (ADDRESS_COST (p
->exp
) == best_addr_cost
2580 && (p
->cost
+ 1) >> 1 > best_rtx_cost
)))
2583 best_addr_cost
= ADDRESS_COST (p
->exp
);
2584 best_rtx_cost
= (p
->cost
+ 1) >> 1;
2590 if (validate_change (insn
, loc
,
2591 canon_reg (copy_rtx (best_elt
->exp
),
2600 /* If the address is a binary operation with the first operand a register
2601 and the second a constant, do the same as above, but looking for
2602 equivalences of the register. Then try to simplify before checking for
2603 the best address to use. This catches a few cases: First is when we
2604 have REG+const and the register is another REG+const. We can often merge
2605 the constants and eliminate one insn and one register. It may also be
2606 that a machine has a cheap REG+REG+const. Finally, this improves the
2607 code on the Alpha for unaligned byte stores. */
2609 if (flag_expensive_optimizations
2610 && (GET_RTX_CLASS (GET_CODE (*loc
)) == '2'
2611 || GET_RTX_CLASS (GET_CODE (*loc
)) == 'c')
2612 && GET_CODE (XEXP (*loc
, 0)) == REG
2613 && GET_CODE (XEXP (*loc
, 1)) == CONST_INT
)
2615 rtx c
= XEXP (*loc
, 1);
2618 hash_code
= HASH (XEXP (*loc
, 0), Pmode
);
2619 do_not_record
= save_do_not_record
;
2620 hash_arg_in_memory
= save_hash_arg_in_memory
;
2621 hash_arg_in_struct
= save_hash_arg_in_struct
;
2623 elt
= lookup (XEXP (*loc
, 0), hash_code
, Pmode
);
2627 /* We need to find the best (under the criteria documented above) entry
2628 in the class that is valid. We use the `flag' field to indicate
2629 choices that were invalid and iterate until we can't find a better
2630 one that hasn't already been tried. */
2632 for (p
= elt
->first_same_value
; p
; p
= p
->next_same_value
)
2635 while (found_better
)
2637 int best_addr_cost
= ADDRESS_COST (*loc
);
2638 int best_rtx_cost
= (COST (*loc
) + 1) >> 1;
2639 struct table_elt
*best_elt
= elt
;
2640 rtx best_rtx
= *loc
;
2643 for (p
= elt
->first_same_value
; p
; p
= p
->next_same_value
)
2645 && (GET_CODE (p
->exp
) == REG
2646 || exp_equiv_p (p
->exp
, p
->exp
, 1, 0)))
2648 rtx
new = cse_gen_binary (GET_CODE (*loc
), Pmode
, p
->exp
, c
);
2650 if ((ADDRESS_COST (new) < best_addr_cost
2651 || (ADDRESS_COST (new) == best_addr_cost
2652 && (COST (new) + 1) >> 1 > best_rtx_cost
)))
2655 best_addr_cost
= ADDRESS_COST (new);
2656 best_rtx_cost
= (COST (new) + 1) >> 1;
2664 if (validate_change (insn
, loc
,
2665 canon_reg (copy_rtx (best_rtx
),
2676 /* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
2677 operation (EQ, NE, GT, etc.), follow it back through the hash table and
2678 what values are being compared.
2680 *PARG1 and *PARG2 are updated to contain the rtx representing the values
2681 actually being compared. For example, if *PARG1 was (cc0) and *PARG2
2682 was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
2683 compared to produce cc0.
2685 The return value is the comparison operator and is either the code of
2686 A or the code corresponding to the inverse of the comparison. */
2688 static enum rtx_code
2689 find_comparison_args (code
, parg1
, parg2
, pmode1
, pmode2
)
2692 enum machine_mode
*pmode1
, *pmode2
;
2696 arg1
= *parg1
, arg2
= *parg2
;
2698 /* If ARG2 is const0_rtx, see what ARG1 is equivalent to. */
2700 while (arg2
== CONST0_RTX (GET_MODE (arg1
)))
2702 /* Set non-zero when we find something of interest. */
2704 int reverse_code
= 0;
2705 struct table_elt
*p
= 0;
2707 /* If arg1 is a COMPARE, extract the comparison arguments from it.
2708 On machines with CC0, this is the only case that can occur, since
2709 fold_rtx will return the COMPARE or item being compared with zero
2712 if (GET_CODE (arg1
) == COMPARE
&& arg2
== const0_rtx
)
2715 /* If ARG1 is a comparison operator and CODE is testing for
2716 STORE_FLAG_VALUE, get the inner arguments. */
2718 else if (GET_RTX_CLASS (GET_CODE (arg1
)) == '<')
2721 || (GET_MODE_CLASS (GET_MODE (arg1
)) == MODE_INT
2722 && code
== LT
&& STORE_FLAG_VALUE
== -1)
2723 #ifdef FLOAT_STORE_FLAG_VALUE
2724 || (GET_MODE_CLASS (GET_MODE (arg1
)) == MODE_FLOAT
2725 && FLOAT_STORE_FLAG_VALUE
< 0)
2730 || (GET_MODE_CLASS (GET_MODE (arg1
)) == MODE_INT
2731 && code
== GE
&& STORE_FLAG_VALUE
== -1)
2732 #ifdef FLOAT_STORE_FLAG_VALUE
2733 || (GET_MODE_CLASS (GET_MODE (arg1
)) == MODE_FLOAT
2734 && FLOAT_STORE_FLAG_VALUE
< 0)
2737 x
= arg1
, reverse_code
= 1;
2740 /* ??? We could also check for
2742 (ne (and (eq (...) (const_int 1))) (const_int 0))
2744 and related forms, but let's wait until we see them occurring. */
2747 /* Look up ARG1 in the hash table and see if it has an equivalence
2748 that lets us see what is being compared. */
2749 p
= lookup (arg1
, safe_hash (arg1
, GET_MODE (arg1
)) % NBUCKETS
,
2751 if (p
) p
= p
->first_same_value
;
2753 for (; p
; p
= p
->next_same_value
)
2755 enum machine_mode inner_mode
= GET_MODE (p
->exp
);
2757 /* If the entry isn't valid, skip it. */
2758 if (! exp_equiv_p (p
->exp
, p
->exp
, 1, 0))
2761 if (GET_CODE (p
->exp
) == COMPARE
2762 /* Another possibility is that this machine has a compare insn
2763 that includes the comparison code. In that case, ARG1 would
2764 be equivalent to a comparison operation that would set ARG1 to
2765 either STORE_FLAG_VALUE or zero. If this is an NE operation,
2766 ORIG_CODE is the actual comparison being done; if it is an EQ,
2767 we must reverse ORIG_CODE. On machine with a negative value
2768 for STORE_FLAG_VALUE, also look at LT and GE operations. */
2771 && GET_MODE_CLASS (inner_mode
) == MODE_INT
2772 && (GET_MODE_BITSIZE (inner_mode
)
2773 <= HOST_BITS_PER_WIDE_INT
)
2774 && (STORE_FLAG_VALUE
2775 & ((HOST_WIDE_INT
) 1
2776 << (GET_MODE_BITSIZE (inner_mode
) - 1))))
2777 #ifdef FLOAT_STORE_FLAG_VALUE
2779 && GET_MODE_CLASS (inner_mode
) == MODE_FLOAT
2780 && FLOAT_STORE_FLAG_VALUE
< 0)
2783 && GET_RTX_CLASS (GET_CODE (p
->exp
)) == '<'))
2788 else if ((code
== EQ
2790 && GET_MODE_CLASS (inner_mode
) == MODE_INT
2791 && (GET_MODE_BITSIZE (inner_mode
)
2792 <= HOST_BITS_PER_WIDE_INT
)
2793 && (STORE_FLAG_VALUE
2794 & ((HOST_WIDE_INT
) 1
2795 << (GET_MODE_BITSIZE (inner_mode
) - 1))))
2796 #ifdef FLOAT_STORE_FLAG_VALUE
2798 && GET_MODE_CLASS (inner_mode
) == MODE_FLOAT
2799 && FLOAT_STORE_FLAG_VALUE
< 0)
2802 && GET_RTX_CLASS (GET_CODE (p
->exp
)) == '<')
2809 /* If this is fp + constant, the equivalent is a better operand since
2810 it may let us predict the value of the comparison. */
2811 else if (NONZERO_BASE_PLUS_P (p
->exp
))
2818 /* If we didn't find a useful equivalence for ARG1, we are done.
2819 Otherwise, set up for the next iteration. */
2823 arg1
= XEXP (x
, 0), arg2
= XEXP (x
, 1);
2824 if (GET_RTX_CLASS (GET_CODE (x
)) == '<')
2825 code
= GET_CODE (x
);
2828 code
= reverse_condition (code
);
2831 /* Return our results. Return the modes from before fold_rtx
2832 because fold_rtx might produce const_int, and then it's too late. */
2833 *pmode1
= GET_MODE (arg1
), *pmode2
= GET_MODE (arg2
);
2834 *parg1
= fold_rtx (arg1
, 0), *parg2
= fold_rtx (arg2
, 0);
2839 /* Try to simplify a unary operation CODE whose output mode is to be
2840 MODE with input operand OP whose mode was originally OP_MODE.
2841 Return zero if no simplification can be made. */
2844 simplify_unary_operation (code
, mode
, op
, op_mode
)
2846 enum machine_mode mode
;
2848 enum machine_mode op_mode
;
2850 register int width
= GET_MODE_BITSIZE (mode
);
2852 /* The order of these tests is critical so that, for example, we don't
2853 check the wrong mode (input vs. output) for a conversion operation,
2854 such as FIX. At some point, this should be simplified. */
2856 #if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
2857 if (code
== FLOAT
&& GET_CODE (op
) == CONST_INT
)
2861 #ifdef REAL_ARITHMETIC
2862 REAL_VALUE_FROM_INT (d
, INTVAL (op
), INTVAL (op
) < 0 ? ~0 : 0);
2864 d
= (double) INTVAL (op
);
2866 return CONST_DOUBLE_FROM_REAL_VALUE (d
, mode
);
2868 else if (code
== UNSIGNED_FLOAT
&& GET_CODE (op
) == CONST_INT
)
2872 #ifdef REAL_ARITHMETIC
2873 REAL_VALUE_FROM_INT (d
, INTVAL (op
), 0);
2875 d
= (double) (unsigned int) INTVAL (op
);
2877 return CONST_DOUBLE_FROM_REAL_VALUE (d
, mode
);
2880 else if (code
== FLOAT
&& GET_CODE (op
) == CONST_DOUBLE
2881 && GET_MODE (op
) == VOIDmode
)
2885 #ifdef REAL_ARITHMETIC
2886 REAL_VALUE_FROM_INT (d
, CONST_DOUBLE_LOW (op
), CONST_DOUBLE_HIGH (op
));
2888 if (CONST_DOUBLE_HIGH (op
) < 0)
2890 d
= (double) (~ CONST_DOUBLE_HIGH (op
));
2891 d
*= ((double) ((HOST_WIDE_INT
) 1 << (HOST_BITS_PER_WIDE_INT
/ 2))
2892 * (double) ((HOST_WIDE_INT
) 1 << (HOST_BITS_PER_WIDE_INT
/ 2)));
2893 d
+= (double) (unsigned HOST_WIDE_INT
) (~ CONST_DOUBLE_LOW (op
));
2898 d
= (double) CONST_DOUBLE_HIGH (op
);
2899 d
*= ((double) ((HOST_WIDE_INT
) 1 << (HOST_BITS_PER_WIDE_INT
/ 2))
2900 * (double) ((HOST_WIDE_INT
) 1 << (HOST_BITS_PER_WIDE_INT
/ 2)));
2901 d
+= (double) (unsigned HOST_WIDE_INT
) CONST_DOUBLE_LOW (op
);
2903 #endif /* REAL_ARITHMETIC */
2904 return CONST_DOUBLE_FROM_REAL_VALUE (d
, mode
);
2906 else if (code
== UNSIGNED_FLOAT
&& GET_CODE (op
) == CONST_DOUBLE
2907 && GET_MODE (op
) == VOIDmode
)
2911 #ifdef REAL_ARITHMETIC
2912 REAL_VALUE_FROM_UNSIGNED_INT (d
, CONST_DOUBLE_LOW (op
),
2913 CONST_DOUBLE_HIGH (op
));
2915 d
= (double) CONST_DOUBLE_HIGH (op
);
2916 d
*= ((double) ((HOST_WIDE_INT
) 1 << (HOST_BITS_PER_WIDE_INT
/ 2))
2917 * (double) ((HOST_WIDE_INT
) 1 << (HOST_BITS_PER_WIDE_INT
/ 2)));
2918 d
+= (double) (unsigned HOST_WIDE_INT
) CONST_DOUBLE_LOW (op
);
2919 #endif /* REAL_ARITHMETIC */
2920 return CONST_DOUBLE_FROM_REAL_VALUE (d
, mode
);
2924 if (GET_CODE (op
) == CONST_INT
2925 && width
<= HOST_BITS_PER_WIDE_INT
&& width
> 0)
2927 register HOST_WIDE_INT arg0
= INTVAL (op
);
2928 register HOST_WIDE_INT val
;
2941 val
= (arg0
>= 0 ? arg0
: - arg0
);
2945 /* Don't use ffs here. Instead, get low order bit and then its
2946 number. If arg0 is zero, this will return 0, as desired. */
2947 arg0
&= GET_MODE_MASK (mode
);
2948 val
= exact_log2 (arg0
& (- arg0
)) + 1;
2956 if (op_mode
== VOIDmode
)
2958 if (GET_MODE_BITSIZE (op_mode
) == HOST_BITS_PER_WIDE_INT
)
2960 /* If we were really extending the mode,
2961 we would have to distinguish between zero-extension
2962 and sign-extension. */
2963 if (width
!= GET_MODE_BITSIZE (op_mode
))
2967 else if (GET_MODE_BITSIZE (op_mode
) < HOST_BITS_PER_WIDE_INT
)
2968 val
= arg0
& ~((HOST_WIDE_INT
) (-1) << GET_MODE_BITSIZE (op_mode
));
2974 if (op_mode
== VOIDmode
)
2976 if (GET_MODE_BITSIZE (op_mode
) == HOST_BITS_PER_WIDE_INT
)
2978 /* If we were really extending the mode,
2979 we would have to distinguish between zero-extension
2980 and sign-extension. */
2981 if (width
!= GET_MODE_BITSIZE (op_mode
))
2985 else if (GET_MODE_BITSIZE (op_mode
) < HOST_BITS_PER_WIDE_INT
)
2988 = arg0
& ~((HOST_WIDE_INT
) (-1) << GET_MODE_BITSIZE (op_mode
));
2990 & ((HOST_WIDE_INT
) 1 << (GET_MODE_BITSIZE (op_mode
) - 1)))
2991 val
-= (HOST_WIDE_INT
) 1 << GET_MODE_BITSIZE (op_mode
);
3004 /* Clear the bits that don't belong in our mode,
3005 unless they and our sign bit are all one.
3006 So we get either a reasonable negative value or a reasonable
3007 unsigned value for this mode. */
3008 if (width
< HOST_BITS_PER_WIDE_INT
3009 && ((val
& ((HOST_WIDE_INT
) (-1) << (width
- 1)))
3010 != ((HOST_WIDE_INT
) (-1) << (width
- 1))))
3011 val
&= (1 << width
) - 1;
3013 return GEN_INT (val
);
3016 /* We can do some operations on integer CONST_DOUBLEs. Also allow
3017 for a DImode operation on a CONST_INT. */
3018 else if (GET_MODE (op
) == VOIDmode
3019 && (GET_CODE (op
) == CONST_DOUBLE
|| GET_CODE (op
) == CONST_INT
))
3021 HOST_WIDE_INT l1
, h1
, lv
, hv
;
3023 if (GET_CODE (op
) == CONST_DOUBLE
)
3024 l1
= CONST_DOUBLE_LOW (op
), h1
= CONST_DOUBLE_HIGH (op
);
3026 l1
= INTVAL (op
), h1
= l1
< 0 ? -1 : 0;
3036 neg_double (l1
, h1
, &lv
, &hv
);
3041 neg_double (l1
, h1
, &lv
, &hv
);
3049 lv
= HOST_BITS_PER_WIDE_INT
+ exact_log2 (h1
& (-h1
)) + 1;
3051 lv
= exact_log2 (l1
& (-l1
)) + 1;
3055 if (GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
3056 return GEN_INT (l1
& GET_MODE_MASK (mode
));
3062 if (op_mode
== VOIDmode
3063 || GET_MODE_BITSIZE (op_mode
) > HOST_BITS_PER_WIDE_INT
)
3067 lv
= l1
& GET_MODE_MASK (op_mode
);
3071 if (op_mode
== VOIDmode
3072 || GET_MODE_BITSIZE (op_mode
) > HOST_BITS_PER_WIDE_INT
)
3076 lv
= l1
& GET_MODE_MASK (op_mode
);
3077 if (GET_MODE_BITSIZE (op_mode
) < HOST_BITS_PER_WIDE_INT
3078 && (lv
& ((HOST_WIDE_INT
) 1
3079 << (GET_MODE_BITSIZE (op_mode
) - 1))) != 0)
3080 lv
-= (HOST_WIDE_INT
) 1 << GET_MODE_BITSIZE (op_mode
);
3082 hv
= (lv
< 0) ? ~ (HOST_WIDE_INT
) 0 : 0;
3093 return immed_double_const (lv
, hv
, mode
);
3096 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3097 else if (GET_CODE (op
) == CONST_DOUBLE
3098 && GET_MODE_CLASS (mode
) == MODE_FLOAT
)
3104 if (setjmp (handler
))
3105 /* There used to be a warning here, but that is inadvisable.
3106 People may want to cause traps, and the natural way
3107 to do it should not get a warning. */
3110 set_float_handler (handler
);
3112 REAL_VALUE_FROM_CONST_DOUBLE (d
, op
);
3117 d
= REAL_VALUE_NEGATE (d
);
3121 if (REAL_VALUE_NEGATIVE (d
))
3122 d
= REAL_VALUE_NEGATE (d
);
3125 case FLOAT_TRUNCATE
:
3126 d
= real_value_truncate (mode
, d
);
3130 /* All this does is change the mode. */
3134 d
= REAL_VALUE_RNDZINT (d
);
3138 d
= REAL_VALUE_UNSIGNED_RNDZINT (d
);
3148 x
= immed_real_const_1 (d
, mode
);
3149 set_float_handler (NULL_PTR
);
3152 else if (GET_CODE (op
) == CONST_DOUBLE
&& GET_MODE_CLASS (mode
) == MODE_INT
3153 && width
<= HOST_BITS_PER_WIDE_INT
&& width
> 0)
3160 if (setjmp (handler
))
3163 set_float_handler (handler
);
3165 REAL_VALUE_FROM_CONST_DOUBLE (d
, op
);
3170 val
= REAL_VALUE_FIX (d
);
3174 val
= REAL_VALUE_UNSIGNED_FIX (d
);
3181 set_float_handler (NULL_PTR
);
3183 /* Clear the bits that don't belong in our mode,
3184 unless they and our sign bit are all one.
3185 So we get either a reasonable negative value or a reasonable
3186 unsigned value for this mode. */
3187 if (width
< HOST_BITS_PER_WIDE_INT
3188 && ((val
& ((HOST_WIDE_INT
) (-1) << (width
- 1)))
3189 != ((HOST_WIDE_INT
) (-1) << (width
- 1))))
3190 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
3192 return GEN_INT (val
);
3195 /* This was formerly used only for non-IEEE float.
3196 eggert@twinsun.com says it is safe for IEEE also. */
3199 /* There are some simplifications we can do even if the operands
3205 /* (not (not X)) == X, similarly for NEG. */
3206 if (GET_CODE (op
) == code
)
3207 return XEXP (op
, 0);
3211 /* (sign_extend (truncate (minus (label_ref L1) (label_ref L2))))
3212 becomes just the MINUS if its mode is MODE. This allows
3213 folding switch statements on machines using casesi (such as
3215 if (GET_CODE (op
) == TRUNCATE
3216 && GET_MODE (XEXP (op
, 0)) == mode
3217 && GET_CODE (XEXP (op
, 0)) == MINUS
3218 && GET_CODE (XEXP (XEXP (op
, 0), 0)) == LABEL_REF
3219 && GET_CODE (XEXP (XEXP (op
, 0), 1)) == LABEL_REF
)
3220 return XEXP (op
, 0);
3228 /* Simplify a binary operation CODE with result mode MODE, operating on OP0
3229 and OP1. Return 0 if no simplification is possible.
3231 Don't use this for relational operations such as EQ or LT.
3232 Use simplify_relational_operation instead. */
3235 simplify_binary_operation (code
, mode
, op0
, op1
)
3237 enum machine_mode mode
;
3240 register HOST_WIDE_INT arg0
, arg1
, arg0s
, arg1s
;
3242 int width
= GET_MODE_BITSIZE (mode
);
3245 /* Relational operations don't work here. We must know the mode
3246 of the operands in order to do the comparison correctly.
3247 Assuming a full word can give incorrect results.
3248 Consider comparing 128 with -128 in QImode. */
3250 if (GET_RTX_CLASS (code
) == '<')
3253 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3254 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
3255 && GET_CODE (op0
) == CONST_DOUBLE
&& GET_CODE (op1
) == CONST_DOUBLE
3256 && mode
== GET_MODE (op0
) && mode
== GET_MODE (op1
))
3258 REAL_VALUE_TYPE f0
, f1
, value
;
3261 if (setjmp (handler
))
3264 set_float_handler (handler
);
3266 REAL_VALUE_FROM_CONST_DOUBLE (f0
, op0
);
3267 REAL_VALUE_FROM_CONST_DOUBLE (f1
, op1
);
3268 f0
= real_value_truncate (mode
, f0
);
3269 f1
= real_value_truncate (mode
, f1
);
3271 #ifdef REAL_ARITHMETIC
3272 REAL_ARITHMETIC (value
, rtx_to_tree_code (code
), f0
, f1
);
3286 #ifndef REAL_INFINITY
3293 value
= MIN (f0
, f1
);
3296 value
= MAX (f0
, f1
);
3303 set_float_handler (NULL_PTR
);
3304 value
= real_value_truncate (mode
, value
);
3305 return immed_real_const_1 (value
, mode
);
3307 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
3309 /* We can fold some multi-word operations. */
3310 if (GET_MODE_CLASS (mode
) == MODE_INT
3311 && GET_CODE (op0
) == CONST_DOUBLE
3312 && (GET_CODE (op1
) == CONST_DOUBLE
|| GET_CODE (op1
) == CONST_INT
))
3314 HOST_WIDE_INT l1
, l2
, h1
, h2
, lv
, hv
;
3316 l1
= CONST_DOUBLE_LOW (op0
), h1
= CONST_DOUBLE_HIGH (op0
);
3318 if (GET_CODE (op1
) == CONST_DOUBLE
)
3319 l2
= CONST_DOUBLE_LOW (op1
), h2
= CONST_DOUBLE_HIGH (op1
);
3321 l2
= INTVAL (op1
), h2
= l2
< 0 ? -1 : 0;
3326 /* A - B == A + (-B). */
3327 neg_double (l2
, h2
, &lv
, &hv
);
3330 /* .. fall through ... */
3333 add_double (l1
, h1
, l2
, h2
, &lv
, &hv
);
3337 mul_double (l1
, h1
, l2
, h2
, &lv
, &hv
);
3340 case DIV
: case MOD
: case UDIV
: case UMOD
:
3341 /* We'd need to include tree.h to do this and it doesn't seem worth
3346 lv
= l1
& l2
, hv
= h1
& h2
;
3350 lv
= l1
| l2
, hv
= h1
| h2
;
3354 lv
= l1
^ l2
, hv
= h1
^ h2
;
3360 && ((unsigned HOST_WIDE_INT
) l1
3361 < (unsigned HOST_WIDE_INT
) l2
)))
3370 && ((unsigned HOST_WIDE_INT
) l1
3371 > (unsigned HOST_WIDE_INT
) l2
)))
3378 if ((unsigned HOST_WIDE_INT
) h1
< (unsigned HOST_WIDE_INT
) h2
3380 && ((unsigned HOST_WIDE_INT
) l1
3381 < (unsigned HOST_WIDE_INT
) l2
)))
3388 if ((unsigned HOST_WIDE_INT
) h1
> (unsigned HOST_WIDE_INT
) h2
3390 && ((unsigned HOST_WIDE_INT
) l1
3391 > (unsigned HOST_WIDE_INT
) l2
)))
3397 case LSHIFTRT
: case ASHIFTRT
:
3398 case ASHIFT
: case LSHIFT
:
3399 case ROTATE
: case ROTATERT
:
3400 #ifdef SHIFT_COUNT_TRUNCATED
3401 l2
&= (GET_MODE_BITSIZE (mode
) - 1), h2
= 0;
3404 if (h2
!= 0 || l2
< 0 || l2
>= GET_MODE_BITSIZE (mode
))
3407 if (code
== LSHIFTRT
|| code
== ASHIFTRT
)
3408 rshift_double (l1
, h1
, l2
, GET_MODE_BITSIZE (mode
), &lv
, &hv
,
3410 else if (code
== ASHIFT
|| code
== LSHIFT
)
3411 lshift_double (l1
, h1
, l2
, GET_MODE_BITSIZE (mode
), &lv
, &hv
,
3413 else if (code
== ROTATE
)
3414 lrotate_double (l1
, h1
, l2
, GET_MODE_BITSIZE (mode
), &lv
, &hv
);
3415 else /* code == ROTATERT */
3416 rrotate_double (l1
, h1
, l2
, GET_MODE_BITSIZE (mode
), &lv
, &hv
);
3423 return immed_double_const (lv
, hv
, mode
);
3426 if (GET_CODE (op0
) != CONST_INT
|| GET_CODE (op1
) != CONST_INT
3427 || width
> HOST_BITS_PER_WIDE_INT
|| width
== 0)
3429 /* Even if we can't compute a constant result,
3430 there are some cases worth simplifying. */
3435 /* In IEEE floating point, x+0 is not the same as x. Similarly
3436 for the other optimizations below. */
3437 if (TARGET_FLOAT_FORMAT
== IEEE_FLOAT_FORMAT
3438 && GET_MODE_CLASS (mode
) != MODE_INT
)
3441 if (op1
== CONST0_RTX (mode
))
3444 /* ((-a) + b) -> (b - a) and similarly for (a + (-b)) */
3445 if (GET_CODE (op0
) == NEG
)
3446 return cse_gen_binary (MINUS
, mode
, op1
, XEXP (op0
, 0));
3447 else if (GET_CODE (op1
) == NEG
)
3448 return cse_gen_binary (MINUS
, mode
, op0
, XEXP (op1
, 0));
3450 /* Handle both-operands-constant cases. We can only add
3451 CONST_INTs to constants since the sum of relocatable symbols
3452 can't be handled by most assemblers. */
3454 if (CONSTANT_P (op0
) && GET_CODE (op1
) == CONST_INT
)
3455 return plus_constant (op0
, INTVAL (op1
));
3456 else if (CONSTANT_P (op1
) && GET_CODE (op0
) == CONST_INT
)
3457 return plus_constant (op1
, INTVAL (op0
));
3459 /* If one of the operands is a PLUS or a MINUS, see if we can
3460 simplify this by the associative law.
3461 Don't use the associative law for floating point.
3462 The inaccuracy makes it nonassociative,
3463 and subtle programs can break if operations are associated. */
3465 if ((GET_MODE_CLASS (mode
) == MODE_INT
3466 || GET_MODE_CLASS (mode
) == MODE_PARTIAL_INT
)
3467 && (GET_CODE (op0
) == PLUS
|| GET_CODE (op0
) == MINUS
3468 || GET_CODE (op1
) == PLUS
|| GET_CODE (op1
) == MINUS
)
3469 && (tem
= simplify_plus_minus (code
, mode
, op0
, op1
)) != 0)
3475 /* Convert (compare FOO (const_int 0)) to FOO unless we aren't
3476 using cc0, in which case we want to leave it as a COMPARE
3477 so we can distinguish it from a register-register-copy.
3479 In IEEE floating point, x-0 is not the same as x. */
3481 if ((TARGET_FLOAT_FORMAT
!= IEEE_FLOAT_FORMAT
3482 || GET_MODE_CLASS (mode
) == MODE_INT
)
3483 && op1
== CONST0_RTX (mode
))
3486 /* Do nothing here. */
3491 /* None of these optimizations can be done for IEEE
3493 if (TARGET_FLOAT_FORMAT
== IEEE_FLOAT_FORMAT
3494 && GET_MODE_CLASS (mode
) != MODE_INT
3495 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
3498 /* We can't assume x-x is 0 even with non-IEEE floating point. */
3499 if (rtx_equal_p (op0
, op1
)
3500 && ! side_effects_p (op0
)
3501 && GET_MODE_CLASS (mode
) != MODE_FLOAT
3502 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
)
3505 /* Change subtraction from zero into negation. */
3506 if (op0
== CONST0_RTX (mode
))
3507 return gen_rtx (NEG
, mode
, op1
);
3509 /* (-1 - a) is ~a. */
3510 if (op0
== constm1_rtx
)
3511 return gen_rtx (NOT
, mode
, op1
);
3513 /* Subtracting 0 has no effect. */
3514 if (op1
== CONST0_RTX (mode
))
3517 /* (a - (-b)) -> (a + b). */
3518 if (GET_CODE (op1
) == NEG
)
3519 return cse_gen_binary (PLUS
, mode
, op0
, XEXP (op1
, 0));
3521 /* If one of the operands is a PLUS or a MINUS, see if we can
3522 simplify this by the associative law.
3523 Don't use the associative law for floating point.
3524 The inaccuracy makes it nonassociative,
3525 and subtle programs can break if operations are associated. */
3527 if ((GET_MODE_CLASS (mode
) == MODE_INT
3528 || GET_MODE_CLASS (mode
) == MODE_PARTIAL_INT
)
3529 && (GET_CODE (op0
) == PLUS
|| GET_CODE (op0
) == MINUS
3530 || GET_CODE (op1
) == PLUS
|| GET_CODE (op1
) == MINUS
)
3531 && (tem
= simplify_plus_minus (code
, mode
, op0
, op1
)) != 0)
3534 /* Don't let a relocatable value get a negative coeff. */
3535 if (GET_CODE (op1
) == CONST_INT
)
3536 return plus_constant (op0
, - INTVAL (op1
));
3540 if (op1
== constm1_rtx
)
3542 tem
= simplify_unary_operation (NEG
, mode
, op0
, mode
);
3544 return tem
? tem
: gen_rtx (NEG
, mode
, op0
);
3547 /* In IEEE floating point, x*0 is not always 0. */
3548 if ((TARGET_FLOAT_FORMAT
!= IEEE_FLOAT_FORMAT
3549 || GET_MODE_CLASS (mode
) == MODE_INT
)
3550 && op1
== CONST0_RTX (mode
)
3551 && ! side_effects_p (op0
))
3554 /* In IEEE floating point, x*1 is not equivalent to x for nans.
3555 However, ANSI says we can drop signals,
3556 so we can do this anyway. */
3557 if (op1
== CONST1_RTX (mode
))
3560 /* Convert multiply by constant power of two into shift. */
3561 if (GET_CODE (op1
) == CONST_INT
3562 && (val
= exact_log2 (INTVAL (op1
))) >= 0)
3563 return gen_rtx (ASHIFT
, mode
, op0
, GEN_INT (val
));
3565 if (GET_CODE (op1
) == CONST_DOUBLE
3566 && GET_MODE_CLASS (GET_MODE (op1
)) == MODE_FLOAT
)
3570 int op1is2
, op1ism1
;
3572 if (setjmp (handler
))
3575 set_float_handler (handler
);
3576 REAL_VALUE_FROM_CONST_DOUBLE (d
, op1
);
3577 op1is2
= REAL_VALUES_EQUAL (d
, dconst2
);
3578 op1ism1
= REAL_VALUES_EQUAL (d
, dconstm1
);
3579 set_float_handler (NULL_PTR
);
3581 /* x*2 is x+x and x*(-1) is -x */
3582 if (op1is2
&& GET_MODE (op0
) == mode
)
3583 return gen_rtx (PLUS
, mode
, op0
, copy_rtx (op0
));
3585 else if (op1ism1
&& GET_MODE (op0
) == mode
)
3586 return gen_rtx (NEG
, mode
, op0
);
3591 if (op1
== const0_rtx
)
3593 if (GET_CODE (op1
) == CONST_INT
3594 && (INTVAL (op1
) & GET_MODE_MASK (mode
)) == GET_MODE_MASK (mode
))
3596 if (rtx_equal_p (op0
, op1
) && ! side_effects_p (op0
))
3598 /* A | (~A) -> -1 */
3599 if (((GET_CODE (op0
) == NOT
&& rtx_equal_p (XEXP (op0
, 0), op1
))
3600 || (GET_CODE (op1
) == NOT
&& rtx_equal_p (XEXP (op1
, 0), op0
)))
3601 && ! side_effects_p (op0
)
3607 if (op1
== const0_rtx
)
3609 if (GET_CODE (op1
) == CONST_INT
3610 && (INTVAL (op1
) & GET_MODE_MASK (mode
)) == GET_MODE_MASK (mode
))
3611 return gen_rtx (NOT
, mode
, op0
);
3612 if (op0
== op1
&& ! side_effects_p (op0
)
3618 if (op1
== const0_rtx
&& ! side_effects_p (op0
))
3620 if (GET_CODE (op1
) == CONST_INT
3621 && (INTVAL (op1
) & GET_MODE_MASK (mode
)) == GET_MODE_MASK (mode
))
3623 if (op0
== op1
&& ! side_effects_p (op0
)
3627 if (((GET_CODE (op0
) == NOT
&& rtx_equal_p (XEXP (op0
, 0), op1
))
3628 || (GET_CODE (op1
) == NOT
&& rtx_equal_p (XEXP (op1
, 0), op0
)))
3629 && ! side_effects_p (op0
))
3634 /* Convert divide by power of two into shift (divide by 1 handled
3636 if (GET_CODE (op1
) == CONST_INT
3637 && (arg1
= exact_log2 (INTVAL (op1
))) > 0)
3638 return gen_rtx (LSHIFTRT
, mode
, op0
, GEN_INT (arg1
));
3640 /* ... fall through ... */
3643 if (op1
== CONST1_RTX (mode
))
3646 /* In IEEE floating point, 0/x is not always 0. */
3647 if ((TARGET_FLOAT_FORMAT
!= IEEE_FLOAT_FORMAT
3648 || GET_MODE_CLASS (mode
) == MODE_INT
)
3649 && op0
== CONST0_RTX (mode
)
3650 && ! side_effects_p (op1
))
3653 #if 0 /* Turned off till an expert says this is a safe thing to do. */
3654 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3655 /* Change division by a constant into multiplication. */
3656 else if (GET_CODE (op1
) == CONST_DOUBLE
3657 && GET_MODE_CLASS (GET_MODE (op1
)) == MODE_FLOAT
3658 && op1
!= CONST0_RTX (mode
))
3661 REAL_VALUE_FROM_CONST_DOUBLE (d
, op1
);
3662 if (REAL_VALUES_EQUAL (d
, dconst0
))
3664 #if defined (REAL_ARITHMETIC)
3665 REAL_ARITHMETIC (d
, (int) RDIV_EXPR
, dconst1
, d
);
3666 return gen_rtx (MULT
, mode
, op0
,
3667 CONST_DOUBLE_FROM_REAL_VALUE (d
, mode
));
3669 return gen_rtx (MULT
, mode
, op0
,
3670 CONST_DOUBLE_FROM_REAL_VALUE (1./d
, mode
));
3678 /* Handle modulus by power of two (mod with 1 handled below). */
3679 if (GET_CODE (op1
) == CONST_INT
3680 && exact_log2 (INTVAL (op1
)) > 0)
3681 return gen_rtx (AND
, mode
, op0
, GEN_INT (INTVAL (op1
) - 1));
3683 /* ... fall through ... */
3686 if ((op0
== const0_rtx
|| op1
== const1_rtx
)
3687 && ! side_effects_p (op0
) && ! side_effects_p (op1
))
3693 /* Rotating ~0 always results in ~0. */
3694 if (GET_CODE (op0
) == CONST_INT
&& width
<= HOST_BITS_PER_WIDE_INT
3695 && INTVAL (op0
) == GET_MODE_MASK (mode
)
3696 && ! side_effects_p (op1
))
3699 /* ... fall through ... */
3705 if (op1
== const0_rtx
)
3707 if (op0
== const0_rtx
&& ! side_effects_p (op1
))
3712 if (width
<= HOST_BITS_PER_WIDE_INT
&& GET_CODE (op1
) == CONST_INT
3713 && INTVAL (op1
) == (HOST_WIDE_INT
) 1 << (width
-1)
3714 && ! side_effects_p (op0
))
3716 else if (rtx_equal_p (op0
, op1
) && ! side_effects_p (op0
))
3721 if (width
<= HOST_BITS_PER_WIDE_INT
&& GET_CODE (op1
) == CONST_INT
3723 == (unsigned HOST_WIDE_INT
) GET_MODE_MASK (mode
) >> 1)
3724 && ! side_effects_p (op0
))
3726 else if (rtx_equal_p (op0
, op1
) && ! side_effects_p (op0
))
3731 if (op1
== const0_rtx
&& ! side_effects_p (op0
))
3733 else if (rtx_equal_p (op0
, op1
) && ! side_effects_p (op0
))
3738 if (op1
== constm1_rtx
&& ! side_effects_p (op0
))
3740 else if (rtx_equal_p (op0
, op1
) && ! side_effects_p (op0
))
3751 /* Get the integer argument values in two forms:
3752 zero-extended in ARG0, ARG1 and sign-extended in ARG0S, ARG1S. */
3754 arg0
= INTVAL (op0
);
3755 arg1
= INTVAL (op1
);
3757 if (width
< HOST_BITS_PER_WIDE_INT
)
3759 arg0
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
3760 arg1
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
3763 if (arg0s
& ((HOST_WIDE_INT
) 1 << (width
- 1)))
3764 arg0s
|= ((HOST_WIDE_INT
) (-1) << width
);
3767 if (arg1s
& ((HOST_WIDE_INT
) 1 << (width
- 1)))
3768 arg1s
|= ((HOST_WIDE_INT
) (-1) << width
);
3776 /* Compute the value of the arithmetic. */
3781 val
= arg0s
+ arg1s
;
3785 val
= arg0s
- arg1s
;
3789 val
= arg0s
* arg1s
;
3795 val
= arg0s
/ arg1s
;
3801 val
= arg0s
% arg1s
;
3807 val
= (unsigned HOST_WIDE_INT
) arg0
/ arg1
;
3813 val
= (unsigned HOST_WIDE_INT
) arg0
% arg1
;
3829 /* If shift count is undefined, don't fold it; let the machine do
3830 what it wants. But truncate it if the machine will do that. */
3834 #ifdef SHIFT_COUNT_TRUNCATED
3835 arg1
&= (BITS_PER_WORD
- 1);
3841 val
= ((unsigned HOST_WIDE_INT
) arg0
) >> arg1
;
3849 #ifdef SHIFT_COUNT_TRUNCATED
3850 arg1
&= (BITS_PER_WORD
- 1);
3856 val
= ((unsigned HOST_WIDE_INT
) arg0
) << arg1
;
3863 #ifdef SHIFT_COUNT_TRUNCATED
3864 arg1
&= (BITS_PER_WORD
- 1);
3870 val
= arg0s
>> arg1
;
3872 /* Bootstrap compiler may not have sign extended the right shift.
3873 Manually extend the sign to insure bootstrap cc matches gcc. */
3874 if (arg0s
< 0 && arg1
> 0)
3875 val
|= ((HOST_WIDE_INT
) -1) << (HOST_BITS_PER_WIDE_INT
- arg1
);
3884 val
= ((((unsigned HOST_WIDE_INT
) arg0
) << (width
- arg1
))
3885 | (((unsigned HOST_WIDE_INT
) arg0
) >> arg1
));
3893 val
= ((((unsigned HOST_WIDE_INT
) arg0
) << arg1
)
3894 | (((unsigned HOST_WIDE_INT
) arg0
) >> (width
- arg1
)));
3898 /* Do nothing here. */
3902 val
= arg0s
<= arg1s
? arg0s
: arg1s
;
3906 val
= ((unsigned HOST_WIDE_INT
) arg0
3907 <= (unsigned HOST_WIDE_INT
) arg1
? arg0
: arg1
);
3911 val
= arg0s
> arg1s
? arg0s
: arg1s
;
3915 val
= ((unsigned HOST_WIDE_INT
) arg0
3916 > (unsigned HOST_WIDE_INT
) arg1
? arg0
: arg1
);
3923 /* Clear the bits that don't belong in our mode, unless they and our sign
3924 bit are all one. So we get either a reasonable negative value or a
3925 reasonable unsigned value for this mode. */
3926 if (width
< HOST_BITS_PER_WIDE_INT
3927 && ((val
& ((HOST_WIDE_INT
) (-1) << (width
- 1)))
3928 != ((HOST_WIDE_INT
) (-1) << (width
- 1))))
3929 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
3931 return GEN_INT (val
);
3934 /* Simplify a PLUS or MINUS, at least one of whose operands may be another
3937 Rather than test for specific case, we do this by a brute-force method
3938 and do all possible simplifications until no more changes occur. Then
3939 we rebuild the operation. */
3942 simplify_plus_minus (code
, mode
, op0
, op1
)
3944 enum machine_mode mode
;
3950 int n_ops
= 2, input_ops
= 2, input_consts
= 0, n_consts
= 0;
3951 int first
= 1, negate
= 0, changed
;
3954 bzero (ops
, sizeof ops
);
3956 /* Set up the two operands and then expand them until nothing has been
3957 changed. If we run out of room in our array, give up; this should
3958 almost never happen. */
3960 ops
[0] = op0
, ops
[1] = op1
, negs
[0] = 0, negs
[1] = (code
== MINUS
);
3967 for (i
= 0; i
< n_ops
; i
++)
3968 switch (GET_CODE (ops
[i
]))
3975 ops
[n_ops
] = XEXP (ops
[i
], 1);
3976 negs
[n_ops
++] = GET_CODE (ops
[i
]) == MINUS
? !negs
[i
] : negs
[i
];
3977 ops
[i
] = XEXP (ops
[i
], 0);
3983 ops
[i
] = XEXP (ops
[i
], 0);
3984 negs
[i
] = ! negs
[i
];
3989 ops
[i
] = XEXP (ops
[i
], 0);
3995 /* ~a -> (-a - 1) */
3998 ops
[n_ops
] = constm1_rtx
;
3999 negs
[n_ops
++] = negs
[i
];
4000 ops
[i
] = XEXP (ops
[i
], 0);
4001 negs
[i
] = ! negs
[i
];
4008 ops
[i
] = GEN_INT (- INTVAL (ops
[i
])), negs
[i
] = 0, changed
= 1;
4013 /* If we only have two operands, we can't do anything. */
4017 /* Now simplify each pair of operands until nothing changes. The first
4018 time through just simplify constants against each other. */
4025 for (i
= 0; i
< n_ops
- 1; i
++)
4026 for (j
= i
+ 1; j
< n_ops
; j
++)
4027 if (ops
[i
] != 0 && ops
[j
] != 0
4028 && (! first
|| (CONSTANT_P (ops
[i
]) && CONSTANT_P (ops
[j
]))))
4030 rtx lhs
= ops
[i
], rhs
= ops
[j
];
4031 enum rtx_code ncode
= PLUS
;
4033 if (negs
[i
] && ! negs
[j
])
4034 lhs
= ops
[j
], rhs
= ops
[i
], ncode
= MINUS
;
4035 else if (! negs
[i
] && negs
[j
])
4038 tem
= simplify_binary_operation (ncode
, mode
, lhs
, rhs
);
4041 ops
[i
] = tem
, ops
[j
] = 0;
4042 negs
[i
] = negs
[i
] && negs
[j
];
4043 if (GET_CODE (tem
) == NEG
)
4044 ops
[i
] = XEXP (tem
, 0), negs
[i
] = ! negs
[i
];
4046 if (GET_CODE (ops
[i
]) == CONST_INT
&& negs
[i
])
4047 ops
[i
] = GEN_INT (- INTVAL (ops
[i
])), negs
[i
] = 0;
4055 /* Pack all the operands to the lower-numbered entries and give up if
4056 we didn't reduce the number of operands we had. Make sure we
4057 count a CONST as two operands. If we have the same number of
4058 operands, but have made more CONSTs than we had, this is also
4059 an improvement, so accept it. */
4061 for (i
= 0, j
= 0; j
< n_ops
; j
++)
4064 ops
[i
] = ops
[j
], negs
[i
++] = negs
[j
];
4065 if (GET_CODE (ops
[j
]) == CONST
)
4069 if (i
+ n_consts
> input_ops
4070 || (i
+ n_consts
== input_ops
&& n_consts
<= input_consts
))
4075 /* If we have a CONST_INT, put it last. */
4076 for (i
= 0; i
< n_ops
- 1; i
++)
4077 if (GET_CODE (ops
[i
]) == CONST_INT
)
4079 tem
= ops
[n_ops
- 1], ops
[n_ops
- 1] = ops
[i
] , ops
[i
] = tem
;
4080 j
= negs
[n_ops
- 1], negs
[n_ops
- 1] = negs
[i
], negs
[i
] = j
;
4083 /* Put a non-negated operand first. If there aren't any, make all
4084 operands positive and negate the whole thing later. */
4085 for (i
= 0; i
< n_ops
&& negs
[i
]; i
++)
4090 for (i
= 0; i
< n_ops
; i
++)
4096 tem
= ops
[0], ops
[0] = ops
[i
], ops
[i
] = tem
;
4097 j
= negs
[0], negs
[0] = negs
[i
], negs
[i
] = j
;
4100 /* Now make the result by performing the requested operations. */
4102 for (i
= 1; i
< n_ops
; i
++)
4103 result
= cse_gen_binary (negs
[i
] ? MINUS
: PLUS
, mode
, result
, ops
[i
]);
4105 return negate
? gen_rtx (NEG
, mode
, result
) : result
;
4108 /* Make a binary operation by properly ordering the operands and
4109 seeing if the expression folds. */
4112 cse_gen_binary (code
, mode
, op0
, op1
)
4114 enum machine_mode mode
;
4119 /* Put complex operands first and constants second if commutative. */
4120 if (GET_RTX_CLASS (code
) == 'c'
4121 && ((CONSTANT_P (op0
) && GET_CODE (op1
) != CONST_INT
)
4122 || (GET_RTX_CLASS (GET_CODE (op0
)) == 'o'
4123 && GET_RTX_CLASS (GET_CODE (op1
)) != 'o')
4124 || (GET_CODE (op0
) == SUBREG
4125 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op0
))) == 'o'
4126 && GET_RTX_CLASS (GET_CODE (op1
)) != 'o')))
4127 tem
= op0
, op0
= op1
, op1
= tem
;
4129 /* If this simplifies, do it. */
4130 tem
= simplify_binary_operation (code
, mode
, op0
, op1
);
4135 /* Handle addition and subtraction of CONST_INT specially. Otherwise,
4136 just form the operation. */
4138 if (code
== PLUS
&& GET_CODE (op1
) == CONST_INT
4139 && GET_MODE (op0
) != VOIDmode
)
4140 return plus_constant (op0
, INTVAL (op1
));
4141 else if (code
== MINUS
&& GET_CODE (op1
) == CONST_INT
4142 && GET_MODE (op0
) != VOIDmode
)
4143 return plus_constant (op0
, - INTVAL (op1
));
4145 return gen_rtx (code
, mode
, op0
, op1
);
4148 /* Like simplify_binary_operation except used for relational operators.
4149 MODE is the mode of the operands, not that of the result. */
4152 simplify_relational_operation (code
, mode
, op0
, op1
)
4154 enum machine_mode mode
;
4157 register HOST_WIDE_INT arg0
, arg1
, arg0s
, arg1s
;
4159 int width
= GET_MODE_BITSIZE (mode
);
4161 /* If op0 is a compare, extract the comparison arguments from it. */
4162 if (GET_CODE (op0
) == COMPARE
&& op1
== const0_rtx
)
4163 op1
= XEXP (op0
, 1), op0
= XEXP (op0
, 0);
4165 /* What to do with CCmode isn't clear yet.
4166 Let's make sure nothing erroneous is done. */
4167 if (GET_MODE (op0
) == CCmode
)
4170 /* Unlike the arithmetic operations, we can do the comparison whether
4171 or not WIDTH is larger than HOST_BITS_PER_WIDE_INT because the
4172 CONST_INTs are to be understood as being infinite precision as
4173 is the comparison. So there is no question of overflow. */
4175 if (GET_CODE (op0
) != CONST_INT
|| GET_CODE (op1
) != CONST_INT
|| width
== 0)
4177 /* Even if we can't compute a constant result,
4178 there are some cases worth simplifying. */
4180 /* For non-IEEE floating-point, if the two operands are equal, we know
4182 if (rtx_equal_p (op0
, op1
)
4183 && (TARGET_FLOAT_FORMAT
!= IEEE_FLOAT_FORMAT
4184 || GET_MODE_CLASS (GET_MODE (op0
)) != MODE_FLOAT
))
4185 return (code
== EQ
|| code
== GE
|| code
== LE
|| code
== LEU
4186 || code
== GEU
) ? const_true_rtx
: const0_rtx
;
4188 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
4189 else if (GET_CODE (op0
) == CONST_DOUBLE
4190 && GET_CODE (op1
) == CONST_DOUBLE
4191 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_FLOAT
)
4193 REAL_VALUE_TYPE d0
, d1
;
4195 int op0lt
, op1lt
, equal
;
4197 if (setjmp (handler
))
4200 set_float_handler (handler
);
4201 REAL_VALUE_FROM_CONST_DOUBLE (d0
, op0
);
4202 REAL_VALUE_FROM_CONST_DOUBLE (d1
, op1
);
4203 equal
= REAL_VALUES_EQUAL (d0
, d1
);
4204 op0lt
= REAL_VALUES_LESS (d0
, d1
);
4205 op1lt
= REAL_VALUES_LESS (d1
, d0
);
4206 set_float_handler (NULL_PTR
);
4211 return equal
? const_true_rtx
: const0_rtx
;
4213 return !equal
? const_true_rtx
: const0_rtx
;
4215 return equal
|| op0lt
? const_true_rtx
: const0_rtx
;
4217 return op0lt
? const_true_rtx
: const0_rtx
;
4219 return equal
|| op1lt
? const_true_rtx
: const0_rtx
;
4221 return op1lt
? const_true_rtx
: const0_rtx
;
4224 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
4226 else if (GET_MODE_CLASS (mode
) == MODE_INT
4227 && width
> HOST_BITS_PER_WIDE_INT
4228 && (GET_CODE (op0
) == CONST_DOUBLE
4229 || GET_CODE (op0
) == CONST_INT
)
4230 && (GET_CODE (op1
) == CONST_DOUBLE
4231 || GET_CODE (op1
) == CONST_INT
))
4233 HOST_WIDE_INT h0
, l0
, h1
, l1
;
4234 unsigned HOST_WIDE_INT uh0
, ul0
, uh1
, ul1
;
4235 int op0lt
, op0ltu
, equal
;
4237 if (GET_CODE (op0
) == CONST_DOUBLE
)
4238 l0
= CONST_DOUBLE_LOW (op0
), h0
= CONST_DOUBLE_HIGH (op0
);
4240 l0
= INTVAL (op0
), h0
= l0
< 0 ? -1 : 0;
4242 if (GET_CODE (op1
) == CONST_DOUBLE
)
4243 l1
= CONST_DOUBLE_LOW (op1
), h1
= CONST_DOUBLE_HIGH (op1
);
4245 l1
= INTVAL (op1
), h1
= l1
< 0 ? -1 : 0;
4247 uh0
= h0
, ul0
= l0
, uh1
= h1
, ul1
= l1
;
4249 equal
= (h0
== h1
&& l0
== l1
);
4250 op0lt
= (h0
< h1
|| (h0
== h1
&& l0
< l1
));
4251 op0ltu
= (uh0
< uh1
|| (uh0
== uh1
&& ul0
< ul1
));
4256 return equal
? const_true_rtx
: const0_rtx
;
4258 return !equal
? const_true_rtx
: const0_rtx
;
4260 return equal
|| op0lt
? const_true_rtx
: const0_rtx
;
4262 return op0lt
? const_true_rtx
: const0_rtx
;
4264 return !op0lt
? const_true_rtx
: const0_rtx
;
4266 return !equal
&& !op0lt
? const_true_rtx
: const0_rtx
;
4268 return equal
|| op0ltu
? const_true_rtx
: const0_rtx
;
4270 return op0ltu
? const_true_rtx
: const0_rtx
;
4272 return !op0ltu
? const_true_rtx
: const0_rtx
;
4274 return !equal
&& !op0ltu
? const_true_rtx
: const0_rtx
;
4283 /* We can't make this assumption due to #pragma weak */
4284 if (CONSTANT_P (op0
) && op1
== const0_rtx
)
4287 if (NONZERO_BASE_PLUS_P (op0
) && op1
== const0_rtx
4288 /* On some machines, the ap reg can be 0 sometimes. */
4289 && op0
!= arg_pointer_rtx
)
4296 /* We can't make this assumption due to #pragma weak */
4297 if (CONSTANT_P (op0
) && op1
== const0_rtx
)
4298 return const_true_rtx
;
4300 if (NONZERO_BASE_PLUS_P (op0
) && op1
== const0_rtx
4301 /* On some machines, the ap reg can be 0 sometimes. */
4302 && op0
!= arg_pointer_rtx
)
4303 return const_true_rtx
;
4307 /* Unsigned values are never negative, but we must be sure we are
4308 actually comparing a value, not a CC operand. */
4309 if (op1
== const0_rtx
4310 && GET_MODE_CLASS (mode
) == MODE_INT
)
4311 return const_true_rtx
;
4315 if (op1
== const0_rtx
4316 && GET_MODE_CLASS (mode
) == MODE_INT
)
4321 /* Unsigned values are never greater than the largest
4323 if (GET_CODE (op1
) == CONST_INT
4324 && INTVAL (op1
) == GET_MODE_MASK (mode
)
4325 && GET_MODE_CLASS (mode
) == MODE_INT
)
4326 return const_true_rtx
;
4330 if (GET_CODE (op1
) == CONST_INT
4331 && INTVAL (op1
) == GET_MODE_MASK (mode
)
4332 && GET_MODE_CLASS (mode
) == MODE_INT
)
4340 /* Get the integer argument values in two forms:
4341 zero-extended in ARG0, ARG1 and sign-extended in ARG0S, ARG1S. */
4343 arg0
= INTVAL (op0
);
4344 arg1
= INTVAL (op1
);
4346 if (width
< HOST_BITS_PER_WIDE_INT
)
4348 arg0
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
4349 arg1
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
4352 if (arg0s
& ((HOST_WIDE_INT
) 1 << (width
- 1)))
4353 arg0s
|= ((HOST_WIDE_INT
) (-1) << width
);
4356 if (arg1s
& ((HOST_WIDE_INT
) 1 << (width
- 1)))
4357 arg1s
|= ((HOST_WIDE_INT
) (-1) << width
);
4365 /* Compute the value of the arithmetic. */
4370 val
= arg0
!= arg1
? STORE_FLAG_VALUE
: 0;
4374 val
= arg0
== arg1
? STORE_FLAG_VALUE
: 0;
4378 val
= arg0s
<= arg1s
? STORE_FLAG_VALUE
: 0;
4382 val
= arg0s
< arg1s
? STORE_FLAG_VALUE
: 0;
4386 val
= arg0s
>= arg1s
? STORE_FLAG_VALUE
: 0;
4390 val
= arg0s
> arg1s
? STORE_FLAG_VALUE
: 0;
4394 val
= (((unsigned HOST_WIDE_INT
) arg0
)
4395 <= ((unsigned HOST_WIDE_INT
) arg1
) ? STORE_FLAG_VALUE
: 0);
4399 val
= (((unsigned HOST_WIDE_INT
) arg0
)
4400 < ((unsigned HOST_WIDE_INT
) arg1
) ? STORE_FLAG_VALUE
: 0);
4404 val
= (((unsigned HOST_WIDE_INT
) arg0
)
4405 >= ((unsigned HOST_WIDE_INT
) arg1
) ? STORE_FLAG_VALUE
: 0);
4409 val
= (((unsigned HOST_WIDE_INT
) arg0
)
4410 > ((unsigned HOST_WIDE_INT
) arg1
) ? STORE_FLAG_VALUE
: 0);
4417 /* Clear the bits that don't belong in our mode, unless they and our sign
4418 bit are all one. So we get either a reasonable negative value or a
4419 reasonable unsigned value for this mode. */
4420 if (width
< HOST_BITS_PER_WIDE_INT
4421 && ((val
& ((HOST_WIDE_INT
) (-1) << (width
- 1)))
4422 != ((HOST_WIDE_INT
) (-1) << (width
- 1))))
4423 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
4425 return GEN_INT (val
);
4428 /* Simplify CODE, an operation with result mode MODE and three operands,
4429 OP0, OP1, and OP2. OP0_MODE was the mode of OP0 before it became
4430 a constant. Return 0 if no simplifications is possible. */
4433 simplify_ternary_operation (code
, mode
, op0_mode
, op0
, op1
, op2
)
4435 enum machine_mode mode
, op0_mode
;
4438 int width
= GET_MODE_BITSIZE (mode
);
4440 /* VOIDmode means "infinite" precision. */
4442 width
= HOST_BITS_PER_WIDE_INT
;
4448 if (GET_CODE (op0
) == CONST_INT
4449 && GET_CODE (op1
) == CONST_INT
4450 && GET_CODE (op2
) == CONST_INT
4451 && INTVAL (op1
) + INTVAL (op2
) <= GET_MODE_BITSIZE (op0_mode
)
4452 && width
<= HOST_BITS_PER_WIDE_INT
)
4454 /* Extracting a bit-field from a constant */
4455 HOST_WIDE_INT val
= INTVAL (op0
);
4458 val
>>= (GET_MODE_BITSIZE (op0_mode
) - INTVAL (op2
) - INTVAL (op1
));
4460 val
>>= INTVAL (op2
);
4462 if (HOST_BITS_PER_WIDE_INT
!= INTVAL (op1
))
4464 /* First zero-extend. */
4465 val
&= ((HOST_WIDE_INT
) 1 << INTVAL (op1
)) - 1;
4466 /* If desired, propagate sign bit. */
4467 if (code
== SIGN_EXTRACT
4468 && (val
& ((HOST_WIDE_INT
) 1 << (INTVAL (op1
) - 1))))
4469 val
|= ~ (((HOST_WIDE_INT
) 1 << INTVAL (op1
)) - 1);
4472 /* Clear the bits that don't belong in our mode,
4473 unless they and our sign bit are all one.
4474 So we get either a reasonable negative value or a reasonable
4475 unsigned value for this mode. */
4476 if (width
< HOST_BITS_PER_WIDE_INT
4477 && ((val
& ((HOST_WIDE_INT
) (-1) << (width
- 1)))
4478 != ((HOST_WIDE_INT
) (-1) << (width
- 1))))
4479 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
4481 return GEN_INT (val
);
4486 if (GET_CODE (op0
) == CONST_INT
)
4487 return op0
!= const0_rtx
? op1
: op2
;
4497 /* If X is a nontrivial arithmetic operation on an argument
4498 for which a constant value can be determined, return
4499 the result of operating on that value, as a constant.
4500 Otherwise, return X, possibly with one or more operands
4501 modified by recursive calls to this function.
4503 If X is a register whose contents are known, we do NOT
4504 return those contents here. equiv_constant is called to
4507 INSN is the insn that we may be modifying. If it is 0, make a copy
4508 of X before modifying it. */
4515 register enum rtx_code code
;
4516 register enum machine_mode mode
;
4523 /* Folded equivalents of first two operands of X. */
4527 /* Constant equivalents of first three operands of X;
4528 0 when no such equivalent is known. */
4533 /* The mode of the first operand of X. We need this for sign and zero
4535 enum machine_mode mode_arg0
;
4540 mode
= GET_MODE (x
);
4541 code
= GET_CODE (x
);
4550 /* No use simplifying an EXPR_LIST
4551 since they are used only for lists of args
4552 in a function call's REG_EQUAL note. */
4558 return prev_insn_cc0
;
4562 /* If the next insn is a CODE_LABEL followed by a jump table,
4563 PC's value is a LABEL_REF pointing to that label. That
4564 lets us fold switch statements on the Vax. */
4565 if (insn
&& GET_CODE (insn
) == JUMP_INSN
)
4567 rtx next
= next_nonnote_insn (insn
);
4569 if (next
&& GET_CODE (next
) == CODE_LABEL
4570 && NEXT_INSN (next
) != 0
4571 && GET_CODE (NEXT_INSN (next
)) == JUMP_INSN
4572 && (GET_CODE (PATTERN (NEXT_INSN (next
))) == ADDR_VEC
4573 || GET_CODE (PATTERN (NEXT_INSN (next
))) == ADDR_DIFF_VEC
))
4574 return gen_rtx (LABEL_REF
, Pmode
, next
);
4579 /* See if we previously assigned a constant value to this SUBREG. */
4580 if ((new = lookup_as_function (x
, CONST_INT
)) != 0
4581 || (new = lookup_as_function (x
, CONST_DOUBLE
)) != 0)
4584 /* If this is a paradoxical SUBREG, we have no idea what value the
4585 extra bits would have. However, if the operand is equivalent
4586 to a SUBREG whose operand is the same as our mode, and all the
4587 modes are within a word, we can just use the inner operand
4588 because these SUBREGs just say how to treat the register. */
4590 if (GET_MODE_SIZE (mode
) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))))
4592 enum machine_mode imode
= GET_MODE (SUBREG_REG (x
));
4593 struct table_elt
*elt
;
4595 if (GET_MODE_SIZE (mode
) <= UNITS_PER_WORD
4596 && GET_MODE_SIZE (imode
) <= UNITS_PER_WORD
4597 && (elt
= lookup (SUBREG_REG (x
), HASH (SUBREG_REG (x
), imode
),
4600 for (elt
= elt
->first_same_value
;
4601 elt
; elt
= elt
->next_same_value
)
4602 if (GET_CODE (elt
->exp
) == SUBREG
4603 && GET_MODE (SUBREG_REG (elt
->exp
)) == mode
4604 && exp_equiv_p (elt
->exp
, elt
->exp
, 1, 0))
4605 return copy_rtx (SUBREG_REG (elt
->exp
));
4611 /* Fold SUBREG_REG. If it changed, see if we can simplify the SUBREG.
4612 We might be able to if the SUBREG is extracting a single word in an
4613 integral mode or extracting the low part. */
4615 folded_arg0
= fold_rtx (SUBREG_REG (x
), insn
);
4616 const_arg0
= equiv_constant (folded_arg0
);
4618 folded_arg0
= const_arg0
;
4620 if (folded_arg0
!= SUBREG_REG (x
))
4624 if (GET_MODE_CLASS (mode
) == MODE_INT
4625 && GET_MODE_SIZE (mode
) == UNITS_PER_WORD
4626 && GET_MODE (SUBREG_REG (x
)) != VOIDmode
)
4627 new = operand_subword (folded_arg0
, SUBREG_WORD (x
), 0,
4628 GET_MODE (SUBREG_REG (x
)));
4629 if (new == 0 && subreg_lowpart_p (x
))
4630 new = gen_lowpart_if_possible (mode
, folded_arg0
);
4635 /* If this is a narrowing SUBREG and our operand is a REG, see if
4636 we can find an equivalence for REG that is an arithmetic operation
4637 in a wider mode where both operands are paradoxical SUBREGs
4638 from objects of our result mode. In that case, we couldn't report
4639 an equivalent value for that operation, since we don't know what the
4640 extra bits will be. But we can find an equivalence for this SUBREG
4641 by folding that operation is the narrow mode. This allows us to
4642 fold arithmetic in narrow modes when the machine only supports
4643 word-sized arithmetic.
4645 Also look for a case where we have a SUBREG whose operand is the
4646 same as our result. If both modes are smaller than a word, we
4647 are simply interpreting a register in different modes and we
4648 can use the inner value. */
4650 if (GET_CODE (folded_arg0
) == REG
4651 && GET_MODE_SIZE (mode
) < GET_MODE_SIZE (GET_MODE (folded_arg0
))
4652 && subreg_lowpart_p (x
))
4654 struct table_elt
*elt
;
4656 /* We can use HASH here since we know that canon_hash won't be
4658 elt
= lookup (folded_arg0
,
4659 HASH (folded_arg0
, GET_MODE (folded_arg0
)),
4660 GET_MODE (folded_arg0
));
4663 elt
= elt
->first_same_value
;
4665 for (; elt
; elt
= elt
->next_same_value
)
4667 enum rtx_code eltcode
= GET_CODE (elt
->exp
);
4669 /* Just check for unary and binary operations. */
4670 if (GET_RTX_CLASS (GET_CODE (elt
->exp
)) == '1'
4671 && GET_CODE (elt
->exp
) != SIGN_EXTEND
4672 && GET_CODE (elt
->exp
) != ZERO_EXTEND
4673 && GET_CODE (XEXP (elt
->exp
, 0)) == SUBREG
4674 && GET_MODE (SUBREG_REG (XEXP (elt
->exp
, 0))) == mode
)
4676 rtx op0
= SUBREG_REG (XEXP (elt
->exp
, 0));
4678 if (GET_CODE (op0
) != REG
&& ! CONSTANT_P (op0
))
4679 op0
= fold_rtx (op0
, NULL_RTX
);
4681 op0
= equiv_constant (op0
);
4683 new = simplify_unary_operation (GET_CODE (elt
->exp
), mode
,
4686 else if ((GET_RTX_CLASS (GET_CODE (elt
->exp
)) == '2'
4687 || GET_RTX_CLASS (GET_CODE (elt
->exp
)) == 'c')
4688 && eltcode
!= DIV
&& eltcode
!= MOD
4689 && eltcode
!= UDIV
&& eltcode
!= UMOD
4690 && eltcode
!= ASHIFTRT
&& eltcode
!= LSHIFTRT
4691 && eltcode
!= ROTATE
&& eltcode
!= ROTATERT
4692 && ((GET_CODE (XEXP (elt
->exp
, 0)) == SUBREG
4693 && (GET_MODE (SUBREG_REG (XEXP (elt
->exp
, 0)))
4695 || CONSTANT_P (XEXP (elt
->exp
, 0)))
4696 && ((GET_CODE (XEXP (elt
->exp
, 1)) == SUBREG
4697 && (GET_MODE (SUBREG_REG (XEXP (elt
->exp
, 1)))
4699 || CONSTANT_P (XEXP (elt
->exp
, 1))))
4701 rtx op0
= gen_lowpart_common (mode
, XEXP (elt
->exp
, 0));
4702 rtx op1
= gen_lowpart_common (mode
, XEXP (elt
->exp
, 1));
4704 if (op0
&& GET_CODE (op0
) != REG
&& ! CONSTANT_P (op0
))
4705 op0
= fold_rtx (op0
, NULL_RTX
);
4708 op0
= equiv_constant (op0
);
4710 if (op1
&& GET_CODE (op1
) != REG
&& ! CONSTANT_P (op1
))
4711 op1
= fold_rtx (op1
, NULL_RTX
);
4714 op1
= equiv_constant (op1
);
4717 new = simplify_binary_operation (GET_CODE (elt
->exp
), mode
,
4721 else if (GET_CODE (elt
->exp
) == SUBREG
4722 && GET_MODE (SUBREG_REG (elt
->exp
)) == mode
4723 && (GET_MODE_SIZE (GET_MODE (folded_arg0
))
4725 && exp_equiv_p (elt
->exp
, elt
->exp
, 1, 0))
4726 new = copy_rtx (SUBREG_REG (elt
->exp
));
4737 /* If we have (NOT Y), see if Y is known to be (NOT Z).
4738 If so, (NOT Y) simplifies to Z. Similarly for NEG. */
4739 new = lookup_as_function (XEXP (x
, 0), code
);
4741 return fold_rtx (copy_rtx (XEXP (new, 0)), insn
);
4745 /* If we are not actually processing an insn, don't try to find the
4746 best address. Not only don't we care, but we could modify the
4747 MEM in an invalid way since we have no insn to validate against. */
4749 find_best_addr (insn
, &XEXP (x
, 0));
4752 /* Even if we don't fold in the insn itself,
4753 we can safely do so here, in hopes of getting a constant. */
4754 rtx addr
= fold_rtx (XEXP (x
, 0), NULL_RTX
);
4756 HOST_WIDE_INT offset
= 0;
4758 if (GET_CODE (addr
) == REG
4759 && REGNO_QTY_VALID_P (REGNO (addr
))
4760 && GET_MODE (addr
) == qty_mode
[reg_qty
[REGNO (addr
)]]
4761 && qty_const
[reg_qty
[REGNO (addr
)]] != 0)
4762 addr
= qty_const
[reg_qty
[REGNO (addr
)]];
4764 /* If address is constant, split it into a base and integer offset. */
4765 if (GET_CODE (addr
) == SYMBOL_REF
|| GET_CODE (addr
) == LABEL_REF
)
4767 else if (GET_CODE (addr
) == CONST
&& GET_CODE (XEXP (addr
, 0)) == PLUS
4768 && GET_CODE (XEXP (XEXP (addr
, 0), 1)) == CONST_INT
)
4770 base
= XEXP (XEXP (addr
, 0), 0);
4771 offset
= INTVAL (XEXP (XEXP (addr
, 0), 1));
4773 else if (GET_CODE (addr
) == LO_SUM
4774 && GET_CODE (XEXP (addr
, 1)) == SYMBOL_REF
)
4775 base
= XEXP (addr
, 1);
4777 /* If this is a constant pool reference, we can fold it into its
4778 constant to allow better value tracking. */
4779 if (base
&& GET_CODE (base
) == SYMBOL_REF
4780 && CONSTANT_POOL_ADDRESS_P (base
))
4782 rtx constant
= get_pool_constant (base
);
4783 enum machine_mode const_mode
= get_pool_mode (base
);
4786 if (CONSTANT_P (constant
) && GET_CODE (constant
) != CONST_INT
)
4787 constant_pool_entries_cost
= COST (constant
);
4789 /* If we are loading the full constant, we have an equivalence. */
4790 if (offset
== 0 && mode
== const_mode
)
4793 /* If this actually isn't a constant (wierd!), we can't do
4794 anything. Otherwise, handle the two most common cases:
4795 extracting a word from a multi-word constant, and extracting
4796 the low-order bits. Other cases don't seem common enough to
4798 if (! CONSTANT_P (constant
))
4801 if (GET_MODE_CLASS (mode
) == MODE_INT
4802 && GET_MODE_SIZE (mode
) == UNITS_PER_WORD
4803 && offset
% UNITS_PER_WORD
== 0
4804 && (new = operand_subword (constant
,
4805 offset
/ UNITS_PER_WORD
,
4806 0, const_mode
)) != 0)
4809 if (((BYTES_BIG_ENDIAN
4810 && offset
== GET_MODE_SIZE (GET_MODE (constant
)) - 1)
4811 || (! BYTES_BIG_ENDIAN
&& offset
== 0))
4812 && (new = gen_lowpart_if_possible (mode
, constant
)) != 0)
4816 /* If this is a reference to a label at a known position in a jump
4817 table, we also know its value. */
4818 if (base
&& GET_CODE (base
) == LABEL_REF
)
4820 rtx label
= XEXP (base
, 0);
4821 rtx table_insn
= NEXT_INSN (label
);
4823 if (table_insn
&& GET_CODE (table_insn
) == JUMP_INSN
4824 && GET_CODE (PATTERN (table_insn
)) == ADDR_VEC
)
4826 rtx table
= PATTERN (table_insn
);
4829 && (offset
/ GET_MODE_SIZE (GET_MODE (table
))
4830 < XVECLEN (table
, 0)))
4831 return XVECEXP (table
, 0,
4832 offset
/ GET_MODE_SIZE (GET_MODE (table
)));
4834 if (table_insn
&& GET_CODE (table_insn
) == JUMP_INSN
4835 && GET_CODE (PATTERN (table_insn
)) == ADDR_DIFF_VEC
)
4837 rtx table
= PATTERN (table_insn
);
4840 && (offset
/ GET_MODE_SIZE (GET_MODE (table
))
4841 < XVECLEN (table
, 1)))
4843 offset
/= GET_MODE_SIZE (GET_MODE (table
));
4844 new = gen_rtx (MINUS
, Pmode
, XVECEXP (table
, 1, offset
),
4847 if (GET_MODE (table
) != Pmode
)
4848 new = gen_rtx (TRUNCATE
, GET_MODE (table
), new);
4862 mode_arg0
= VOIDmode
;
4864 /* Try folding our operands.
4865 Then see which ones have constant values known. */
4867 fmt
= GET_RTX_FORMAT (code
);
4868 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
4871 rtx arg
= XEXP (x
, i
);
4872 rtx folded_arg
= arg
, const_arg
= 0;
4873 enum machine_mode mode_arg
= GET_MODE (arg
);
4874 rtx cheap_arg
, expensive_arg
;
4875 rtx replacements
[2];
4878 /* Most arguments are cheap, so handle them specially. */
4879 switch (GET_CODE (arg
))
4882 /* This is the same as calling equiv_constant; it is duplicated
4884 if (REGNO_QTY_VALID_P (REGNO (arg
))
4885 && qty_const
[reg_qty
[REGNO (arg
)]] != 0
4886 && GET_CODE (qty_const
[reg_qty
[REGNO (arg
)]]) != REG
4887 && GET_CODE (qty_const
[reg_qty
[REGNO (arg
)]]) != PLUS
)
4889 = gen_lowpart_if_possible (GET_MODE (arg
),
4890 qty_const
[reg_qty
[REGNO (arg
)]]);
4903 folded_arg
= prev_insn_cc0
;
4904 mode_arg
= prev_insn_cc0_mode
;
4905 const_arg
= equiv_constant (folded_arg
);
4910 folded_arg
= fold_rtx (arg
, insn
);
4911 const_arg
= equiv_constant (folded_arg
);
4914 /* For the first three operands, see if the operand
4915 is constant or equivalent to a constant. */
4919 folded_arg0
= folded_arg
;
4920 const_arg0
= const_arg
;
4921 mode_arg0
= mode_arg
;
4924 folded_arg1
= folded_arg
;
4925 const_arg1
= const_arg
;
4928 const_arg2
= const_arg
;
4932 /* Pick the least expensive of the folded argument and an
4933 equivalent constant argument. */
4934 if (const_arg
== 0 || const_arg
== folded_arg
4935 || COST (const_arg
) > COST (folded_arg
))
4936 cheap_arg
= folded_arg
, expensive_arg
= const_arg
;
4938 cheap_arg
= const_arg
, expensive_arg
= folded_arg
;
4940 /* Try to replace the operand with the cheapest of the two
4941 possibilities. If it doesn't work and this is either of the first
4942 two operands of a commutative operation, try swapping them.
4943 If THAT fails, try the more expensive, provided it is cheaper
4944 than what is already there. */
4946 if (cheap_arg
== XEXP (x
, i
))
4949 if (insn
== 0 && ! copied
)
4955 replacements
[0] = cheap_arg
, replacements
[1] = expensive_arg
;
4957 j
< 2 && replacements
[j
]
4958 && COST (replacements
[j
]) < COST (XEXP (x
, i
));
4961 if (validate_change (insn
, &XEXP (x
, i
), replacements
[j
], 0))
4964 if (code
== NE
|| code
== EQ
|| GET_RTX_CLASS (code
) == 'c')
4966 validate_change (insn
, &XEXP (x
, i
), XEXP (x
, 1 - i
), 1);
4967 validate_change (insn
, &XEXP (x
, 1 - i
), replacements
[j
], 1);
4969 if (apply_change_group ())
4971 /* Swap them back to be invalid so that this loop can
4972 continue and flag them to be swapped back later. */
4975 tem
= XEXP (x
, 0); XEXP (x
, 0) = XEXP (x
, 1);
4984 else if (fmt
[i
] == 'E')
4985 /* Don't try to fold inside of a vector of expressions.
4986 Doing nothing is harmless. */
4989 /* If a commutative operation, place a constant integer as the second
4990 operand unless the first operand is also a constant integer. Otherwise,
4991 place any constant second unless the first operand is also a constant. */
4993 if (code
== EQ
|| code
== NE
|| GET_RTX_CLASS (code
) == 'c')
4995 if (must_swap
|| (const_arg0
4997 || (GET_CODE (const_arg0
) == CONST_INT
4998 && GET_CODE (const_arg1
) != CONST_INT
))))
5000 register rtx tem
= XEXP (x
, 0);
5002 if (insn
== 0 && ! copied
)
5008 validate_change (insn
, &XEXP (x
, 0), XEXP (x
, 1), 1);
5009 validate_change (insn
, &XEXP (x
, 1), tem
, 1);
5010 if (apply_change_group ())
5012 tem
= const_arg0
, const_arg0
= const_arg1
, const_arg1
= tem
;
5013 tem
= folded_arg0
, folded_arg0
= folded_arg1
, folded_arg1
= tem
;
5018 /* If X is an arithmetic operation, see if we can simplify it. */
5020 switch (GET_RTX_CLASS (code
))
5023 /* We can't simplify extension ops unless we know the original mode. */
5024 if ((code
== ZERO_EXTEND
|| code
== SIGN_EXTEND
)
5025 && mode_arg0
== VOIDmode
)
5027 new = simplify_unary_operation (code
, mode
,
5028 const_arg0
? const_arg0
: folded_arg0
,
5033 /* See what items are actually being compared and set FOLDED_ARG[01]
5034 to those values and CODE to the actual comparison code. If any are
5035 constant, set CONST_ARG0 and CONST_ARG1 appropriately. We needn't
5036 do anything if both operands are already known to be constant. */
5038 if (const_arg0
== 0 || const_arg1
== 0)
5040 struct table_elt
*p0
, *p1
;
5041 rtx
true = const_true_rtx
, false = const0_rtx
;
5042 enum machine_mode mode_arg1
;
5044 #ifdef FLOAT_STORE_FLAG_VALUE
5045 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
)
5047 true = immed_real_const_1 (FLOAT_STORE_FLAG_VALUE
, mode
);
5048 false = CONST0_RTX (mode
);
5052 code
= find_comparison_args (code
, &folded_arg0
, &folded_arg1
,
5053 &mode_arg0
, &mode_arg1
);
5054 const_arg0
= equiv_constant (folded_arg0
);
5055 const_arg1
= equiv_constant (folded_arg1
);
5057 /* If the mode is VOIDmode or a MODE_CC mode, we don't know
5058 what kinds of things are being compared, so we can't do
5059 anything with this comparison. */
5061 if (mode_arg0
== VOIDmode
|| GET_MODE_CLASS (mode_arg0
) == MODE_CC
)
5064 /* If we do not now have two constants being compared, see if we
5065 can nevertheless deduce some things about the comparison. */
5066 if (const_arg0
== 0 || const_arg1
== 0)
5068 /* Is FOLDED_ARG0 frame-pointer plus a constant? Or non-explicit
5069 constant? These aren't zero, but we don't know their sign. */
5070 if (const_arg1
== const0_rtx
5071 && (NONZERO_BASE_PLUS_P (folded_arg0
)
5072 #if 0 /* Sad to say, on sysvr4, #pragma weak can make a symbol address
5074 || GET_CODE (folded_arg0
) == SYMBOL_REF
5076 || GET_CODE (folded_arg0
) == LABEL_REF
5077 || GET_CODE (folded_arg0
) == CONST
))
5081 else if (code
== NE
)
5085 /* See if the two operands are the same. We don't do this
5086 for IEEE floating-point since we can't assume x == x
5087 since x might be a NaN. */
5089 if ((TARGET_FLOAT_FORMAT
!= IEEE_FLOAT_FORMAT
5090 || GET_MODE_CLASS (mode_arg0
) != MODE_FLOAT
)
5091 && (folded_arg0
== folded_arg1
5092 || (GET_CODE (folded_arg0
) == REG
5093 && GET_CODE (folded_arg1
) == REG
5094 && (reg_qty
[REGNO (folded_arg0
)]
5095 == reg_qty
[REGNO (folded_arg1
)]))
5096 || ((p0
= lookup (folded_arg0
,
5097 (safe_hash (folded_arg0
, mode_arg0
)
5098 % NBUCKETS
), mode_arg0
))
5099 && (p1
= lookup (folded_arg1
,
5100 (safe_hash (folded_arg1
, mode_arg0
)
5101 % NBUCKETS
), mode_arg0
))
5102 && p0
->first_same_value
== p1
->first_same_value
)))
5103 return ((code
== EQ
|| code
== LE
|| code
== GE
5104 || code
== LEU
|| code
== GEU
)
5107 /* If FOLDED_ARG0 is a register, see if the comparison we are
5108 doing now is either the same as we did before or the reverse
5109 (we only check the reverse if not floating-point). */
5110 else if (GET_CODE (folded_arg0
) == REG
)
5112 int qty
= reg_qty
[REGNO (folded_arg0
)];
5114 if (REGNO_QTY_VALID_P (REGNO (folded_arg0
))
5115 && (comparison_dominates_p (qty_comparison_code
[qty
], code
)
5116 || (comparison_dominates_p (qty_comparison_code
[qty
],
5117 reverse_condition (code
))
5118 && GET_MODE_CLASS (mode_arg0
) == MODE_INT
))
5119 && (rtx_equal_p (qty_comparison_const
[qty
], folded_arg1
)
5121 && rtx_equal_p (qty_comparison_const
[qty
],
5123 || (GET_CODE (folded_arg1
) == REG
5124 && (reg_qty
[REGNO (folded_arg1
)]
5125 == qty_comparison_qty
[qty
]))))
5126 return (comparison_dominates_p (qty_comparison_code
[qty
],
5133 /* If we are comparing against zero, see if the first operand is
5134 equivalent to an IOR with a constant. If so, we may be able to
5135 determine the result of this comparison. */
5137 if (const_arg1
== const0_rtx
)
5139 rtx y
= lookup_as_function (folded_arg0
, IOR
);
5143 && (inner_const
= equiv_constant (XEXP (y
, 1))) != 0
5144 && GET_CODE (inner_const
) == CONST_INT
5145 && INTVAL (inner_const
) != 0)
5147 int sign_bitnum
= GET_MODE_BITSIZE (mode_arg0
) - 1;
5148 int has_sign
= (HOST_BITS_PER_WIDE_INT
>= sign_bitnum
5149 && (INTVAL (inner_const
)
5150 & ((HOST_WIDE_INT
) 1 << sign_bitnum
)));
5151 rtx
true = const_true_rtx
, false = const0_rtx
;
5153 #ifdef FLOAT_STORE_FLAG_VALUE
5154 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
)
5156 true = immed_real_const_1 (FLOAT_STORE_FLAG_VALUE
, mode
);
5157 false = CONST0_RTX (mode
);
5179 new = simplify_relational_operation (code
, mode_arg0
,
5180 const_arg0
? const_arg0
: folded_arg0
,
5181 const_arg1
? const_arg1
: folded_arg1
);
5182 #ifdef FLOAT_STORE_FLAG_VALUE
5183 if (new != 0 && GET_MODE_CLASS (mode
) == MODE_FLOAT
)
5184 new = ((new == const0_rtx
) ? CONST0_RTX (mode
)
5185 : immed_real_const_1 (FLOAT_STORE_FLAG_VALUE
, mode
));
5194 /* If the second operand is a LABEL_REF, see if the first is a MINUS
5195 with that LABEL_REF as its second operand. If so, the result is
5196 the first operand of that MINUS. This handles switches with an
5197 ADDR_DIFF_VEC table. */
5198 if (const_arg1
&& GET_CODE (const_arg1
) == LABEL_REF
)
5200 rtx y
= lookup_as_function (folded_arg0
, MINUS
);
5202 if (y
!= 0 && GET_CODE (XEXP (y
, 1)) == LABEL_REF
5203 && XEXP (XEXP (y
, 1), 0) == XEXP (const_arg1
, 0))
5209 /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
5210 If so, produce (PLUS Z C2-C). */
5211 if (const_arg1
!= 0 && GET_CODE (const_arg1
) == CONST_INT
)
5213 rtx y
= lookup_as_function (XEXP (x
, 0), PLUS
);
5214 if (y
&& GET_CODE (XEXP (y
, 1)) == CONST_INT
)
5215 return fold_rtx (plus_constant (y
, -INTVAL (const_arg1
)),
5219 /* ... fall through ... */
5222 case SMIN
: case SMAX
: case UMIN
: case UMAX
:
5223 case IOR
: case AND
: case XOR
:
5224 case MULT
: case DIV
: case UDIV
:
5225 case ASHIFT
: case LSHIFTRT
: case ASHIFTRT
:
5226 /* If we have (<op> <reg> <const_int>) for an associative OP and REG
5227 is known to be of similar form, we may be able to replace the
5228 operation with a combined operation. This may eliminate the
5229 intermediate operation if every use is simplified in this way.
5230 Note that the similar optimization done by combine.c only works
5231 if the intermediate operation's result has only one reference. */
5233 if (GET_CODE (folded_arg0
) == REG
5234 && const_arg1
&& GET_CODE (const_arg1
) == CONST_INT
)
5237 = (code
== ASHIFT
|| code
== ASHIFTRT
|| code
== LSHIFTRT
);
5238 rtx y
= lookup_as_function (folded_arg0
, code
);
5240 enum rtx_code associate_code
;
5244 || 0 == (inner_const
5245 = equiv_constant (fold_rtx (XEXP (y
, 1), 0)))
5246 || GET_CODE (inner_const
) != CONST_INT
5247 /* If we have compiled a statement like
5248 "if (x == (x & mask1))", and now are looking at
5249 "x & mask2", we will have a case where the first operand
5250 of Y is the same as our first operand. Unless we detect
5251 this case, an infinite loop will result. */
5252 || XEXP (y
, 0) == folded_arg0
)
5255 /* Don't associate these operations if they are a PLUS with the
5256 same constant and it is a power of two. These might be doable
5257 with a pre- or post-increment. Similarly for two subtracts of
5258 identical powers of two with post decrement. */
5260 if (code
== PLUS
&& INTVAL (const_arg1
) == INTVAL (inner_const
)
5262 #if defined(HAVE_PRE_INCREMENT) || defined(HAVE_POST_INCREMENT)
5263 || exact_log2 (INTVAL (const_arg1
)) >= 0
5265 #if defined(HAVE_PRE_DECREMENT) || defined(HAVE_POST_DECREMENT)
5266 || exact_log2 (- INTVAL (const_arg1
)) >= 0
5271 /* Compute the code used to compose the constants. For example,
5272 A/C1/C2 is A/(C1 * C2), so if CODE == DIV, we want MULT. */
5275 = (code
== MULT
|| code
== DIV
|| code
== UDIV
? MULT
5276 : is_shift
|| code
== PLUS
|| code
== MINUS
? PLUS
: code
);
5278 new_const
= simplify_binary_operation (associate_code
, mode
,
5279 const_arg1
, inner_const
);
5284 /* If we are associating shift operations, don't let this
5285 produce a shift of larger than the object. This could
5286 occur when we following a sign-extend by a right shift on
5287 a machine that does a sign-extend as a pair of shifts. */
5289 if (is_shift
&& GET_CODE (new_const
) == CONST_INT
5290 && INTVAL (new_const
) > GET_MODE_BITSIZE (mode
))
5293 y
= copy_rtx (XEXP (y
, 0));
5295 /* If Y contains our first operand (the most common way this
5296 can happen is if Y is a MEM), we would do into an infinite
5297 loop if we tried to fold it. So don't in that case. */
5299 if (! reg_mentioned_p (folded_arg0
, y
))
5300 y
= fold_rtx (y
, insn
);
5302 return cse_gen_binary (code
, mode
, y
, new_const
);
5306 new = simplify_binary_operation (code
, mode
,
5307 const_arg0
? const_arg0
: folded_arg0
,
5308 const_arg1
? const_arg1
: folded_arg1
);
5312 /* (lo_sum (high X) X) is simply X. */
5313 if (code
== LO_SUM
&& const_arg0
!= 0
5314 && GET_CODE (const_arg0
) == HIGH
5315 && rtx_equal_p (XEXP (const_arg0
, 0), const_arg1
))
5321 new = simplify_ternary_operation (code
, mode
, mode_arg0
,
5322 const_arg0
? const_arg0
: folded_arg0
,
5323 const_arg1
? const_arg1
: folded_arg1
,
5324 const_arg2
? const_arg2
: XEXP (x
, 2));
5328 return new ? new : x
;
5331 /* Return a constant value currently equivalent to X.
5332 Return 0 if we don't know one. */
5338 if (GET_CODE (x
) == REG
5339 && REGNO_QTY_VALID_P (REGNO (x
))
5340 && qty_const
[reg_qty
[REGNO (x
)]])
5341 x
= gen_lowpart_if_possible (GET_MODE (x
), qty_const
[reg_qty
[REGNO (x
)]]);
5343 if (x
!= 0 && CONSTANT_P (x
))
5346 /* If X is a MEM, try to fold it outside the context of any insn to see if
5347 it might be equivalent to a constant. That handles the case where it
5348 is a constant-pool reference. Then try to look it up in the hash table
5349 in case it is something whose value we have seen before. */
5351 if (GET_CODE (x
) == MEM
)
5353 struct table_elt
*elt
;
5355 x
= fold_rtx (x
, NULL_RTX
);
5359 elt
= lookup (x
, safe_hash (x
, GET_MODE (x
)) % NBUCKETS
, GET_MODE (x
));
5363 for (elt
= elt
->first_same_value
; elt
; elt
= elt
->next_same_value
)
5364 if (elt
->is_const
&& CONSTANT_P (elt
->exp
))
5371 /* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a fixed-point
5372 number, return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
5373 least-significant part of X.
5374 MODE specifies how big a part of X to return.
5376 If the requested operation cannot be done, 0 is returned.
5378 This is similar to gen_lowpart in emit-rtl.c. */
5381 gen_lowpart_if_possible (mode
, x
)
5382 enum machine_mode mode
;
5385 rtx result
= gen_lowpart_common (mode
, x
);
5389 else if (GET_CODE (x
) == MEM
)
5391 /* This is the only other case we handle. */
5392 register int offset
= 0;
5395 #if WORDS_BIG_ENDIAN
5396 offset
= (MAX (GET_MODE_SIZE (GET_MODE (x
)), UNITS_PER_WORD
)
5397 - MAX (GET_MODE_SIZE (mode
), UNITS_PER_WORD
));
5399 #if BYTES_BIG_ENDIAN
5400 /* Adjust the address so that the address-after-the-data
5402 offset
-= (MIN (UNITS_PER_WORD
, GET_MODE_SIZE (mode
))
5403 - MIN (UNITS_PER_WORD
, GET_MODE_SIZE (GET_MODE (x
))));
5405 new = gen_rtx (MEM
, mode
, plus_constant (XEXP (x
, 0), offset
));
5406 if (! memory_address_p (mode
, XEXP (new, 0)))
5408 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x
);
5409 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x
);
5410 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x
);
5417 /* Given INSN, a jump insn, TAKEN indicates if we are following the "taken"
5418 branch. It will be zero if not.
5420 In certain cases, this can cause us to add an equivalence. For example,
5421 if we are following the taken case of
5423 we can add the fact that `i' and '2' are now equivalent.
5425 In any case, we can record that this comparison was passed. If the same
5426 comparison is seen later, we will know its value. */
5429 record_jump_equiv (insn
, taken
)
5433 int cond_known_true
;
5435 enum machine_mode mode
, mode0
, mode1
;
5436 int reversed_nonequality
= 0;
5439 /* Ensure this is the right kind of insn. */
5440 if (! condjump_p (insn
) || simplejump_p (insn
))
5443 /* See if this jump condition is known true or false. */
5445 cond_known_true
= (XEXP (SET_SRC (PATTERN (insn
)), 2) == pc_rtx
);
5447 cond_known_true
= (XEXP (SET_SRC (PATTERN (insn
)), 1) == pc_rtx
);
5449 /* Get the type of comparison being done and the operands being compared.
5450 If we had to reverse a non-equality condition, record that fact so we
5451 know that it isn't valid for floating-point. */
5452 code
= GET_CODE (XEXP (SET_SRC (PATTERN (insn
)), 0));
5453 op0
= fold_rtx (XEXP (XEXP (SET_SRC (PATTERN (insn
)), 0), 0), insn
);
5454 op1
= fold_rtx (XEXP (XEXP (SET_SRC (PATTERN (insn
)), 0), 1), insn
);
5456 code
= find_comparison_args (code
, &op0
, &op1
, &mode0
, &mode1
);
5457 if (! cond_known_true
)
5459 reversed_nonequality
= (code
!= EQ
&& code
!= NE
);
5460 code
= reverse_condition (code
);
5463 /* The mode is the mode of the non-constant. */
5465 if (mode1
!= VOIDmode
)
5468 record_jump_cond (code
, mode
, op0
, op1
, reversed_nonequality
);
5471 /* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
5472 REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
5473 Make any useful entries we can with that information. Called from
5474 above function and called recursively. */
5477 record_jump_cond (code
, mode
, op0
, op1
, reversed_nonequality
)
5479 enum machine_mode mode
;
5481 int reversed_nonequality
;
5483 int op0_hash_code
, op1_hash_code
;
5484 int op0_in_memory
, op0_in_struct
, op1_in_memory
, op1_in_struct
;
5485 struct table_elt
*op0_elt
, *op1_elt
;
5487 /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
5488 we know that they are also equal in the smaller mode (this is also
5489 true for all smaller modes whether or not there is a SUBREG, but
5490 is not worth testing for with no SUBREG. */
5492 if (code
== EQ
&& GET_CODE (op0
) == SUBREG
5493 && GET_MODE_SIZE (mode
) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0
))))
5495 enum machine_mode inner_mode
= GET_MODE (SUBREG_REG (op0
));
5496 rtx tem
= gen_lowpart_if_possible (inner_mode
, op1
);
5498 record_jump_cond (code
, mode
, SUBREG_REG (op0
),
5499 tem
? tem
: gen_rtx (SUBREG
, inner_mode
, op1
, 0),
5500 reversed_nonequality
);
5503 if (code
== EQ
&& GET_CODE (op1
) == SUBREG
5504 && GET_MODE_SIZE (mode
) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1
))))
5506 enum machine_mode inner_mode
= GET_MODE (SUBREG_REG (op1
));
5507 rtx tem
= gen_lowpart_if_possible (inner_mode
, op0
);
5509 record_jump_cond (code
, mode
, SUBREG_REG (op1
),
5510 tem
? tem
: gen_rtx (SUBREG
, inner_mode
, op0
, 0),
5511 reversed_nonequality
);
5514 /* Similarly, if this is an NE comparison, and either is a SUBREG
5515 making a smaller mode, we know the whole thing is also NE. */
5517 if (code
== NE
&& GET_CODE (op0
) == SUBREG
5518 && subreg_lowpart_p (op0
)
5519 && GET_MODE_SIZE (mode
) < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0
))))
5521 enum machine_mode inner_mode
= GET_MODE (SUBREG_REG (op0
));
5522 rtx tem
= gen_lowpart_if_possible (inner_mode
, op1
);
5524 record_jump_cond (code
, mode
, SUBREG_REG (op0
),
5525 tem
? tem
: gen_rtx (SUBREG
, inner_mode
, op1
, 0),
5526 reversed_nonequality
);
5529 if (code
== NE
&& GET_CODE (op1
) == SUBREG
5530 && subreg_lowpart_p (op1
)
5531 && GET_MODE_SIZE (mode
) < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1
))))
5533 enum machine_mode inner_mode
= GET_MODE (SUBREG_REG (op1
));
5534 rtx tem
= gen_lowpart_if_possible (inner_mode
, op0
);
5536 record_jump_cond (code
, mode
, SUBREG_REG (op1
),
5537 tem
? tem
: gen_rtx (SUBREG
, inner_mode
, op0
, 0),
5538 reversed_nonequality
);
5541 /* Hash both operands. */
5544 hash_arg_in_memory
= 0;
5545 hash_arg_in_struct
= 0;
5546 op0_hash_code
= HASH (op0
, mode
);
5547 op0_in_memory
= hash_arg_in_memory
;
5548 op0_in_struct
= hash_arg_in_struct
;
5554 hash_arg_in_memory
= 0;
5555 hash_arg_in_struct
= 0;
5556 op1_hash_code
= HASH (op1
, mode
);
5557 op1_in_memory
= hash_arg_in_memory
;
5558 op1_in_struct
= hash_arg_in_struct
;
5563 /* Look up both operands. */
5564 op0_elt
= lookup (op0
, op0_hash_code
, mode
);
5565 op1_elt
= lookup (op1
, op1_hash_code
, mode
);
5567 /* If we aren't setting two things equal all we can do is save this
5568 comparison. Similarly if this is floating-point. In the latter
5569 case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
5570 If we record the equality, we might inadvertently delete code
5571 whose intent was to change -0 to +0. */
5573 if (code
!= EQ
|| GET_MODE_CLASS (GET_MODE (op0
)) == MODE_FLOAT
)
5575 /* If we reversed a floating-point comparison, if OP0 is not a
5576 register, or if OP1 is neither a register or constant, we can't
5579 if (GET_CODE (op1
) != REG
)
5580 op1
= equiv_constant (op1
);
5582 if ((reversed_nonequality
&& GET_MODE_CLASS (mode
) != MODE_INT
)
5583 || GET_CODE (op0
) != REG
|| op1
== 0)
5586 /* Put OP0 in the hash table if it isn't already. This gives it a
5587 new quantity number. */
5590 if (insert_regs (op0
, NULL_PTR
, 0))
5592 rehash_using_reg (op0
);
5593 op0_hash_code
= HASH (op0
, mode
);
5596 op0_elt
= insert (op0
, NULL_PTR
, op0_hash_code
, mode
);
5597 op0_elt
->in_memory
= op0_in_memory
;
5598 op0_elt
->in_struct
= op0_in_struct
;
5601 qty_comparison_code
[reg_qty
[REGNO (op0
)]] = code
;
5602 if (GET_CODE (op1
) == REG
)
5604 /* Put OP1 in the hash table so it gets a new quantity number. */
5607 if (insert_regs (op1
, NULL_PTR
, 0))
5609 rehash_using_reg (op1
);
5610 op1_hash_code
= HASH (op1
, mode
);
5613 op1_elt
= insert (op1
, NULL_PTR
, op1_hash_code
, mode
);
5614 op1_elt
->in_memory
= op1_in_memory
;
5615 op1_elt
->in_struct
= op1_in_struct
;
5618 qty_comparison_qty
[reg_qty
[REGNO (op0
)]] = reg_qty
[REGNO (op1
)];
5619 qty_comparison_const
[reg_qty
[REGNO (op0
)]] = 0;
5623 qty_comparison_qty
[reg_qty
[REGNO (op0
)]] = -1;
5624 qty_comparison_const
[reg_qty
[REGNO (op0
)]] = op1
;
5630 /* If both are equivalent, merge the two classes. Save this class for
5631 `cse_set_around_loop'. */
5632 if (op0_elt
&& op1_elt
)
5634 merge_equiv_classes (op0_elt
, op1_elt
);
5635 last_jump_equiv_class
= op0_elt
;
5638 /* For whichever side doesn't have an equivalence, make one. */
5641 if (insert_regs (op0
, op1_elt
, 0))
5643 rehash_using_reg (op0
);
5644 op0_hash_code
= HASH (op0
, mode
);
5647 op0_elt
= insert (op0
, op1_elt
, op0_hash_code
, mode
);
5648 op0_elt
->in_memory
= op0_in_memory
;
5649 op0_elt
->in_struct
= op0_in_struct
;
5650 last_jump_equiv_class
= op0_elt
;
5655 if (insert_regs (op1
, op0_elt
, 0))
5657 rehash_using_reg (op1
);
5658 op1_hash_code
= HASH (op1
, mode
);
5661 op1_elt
= insert (op1
, op0_elt
, op1_hash_code
, mode
);
5662 op1_elt
->in_memory
= op1_in_memory
;
5663 op1_elt
->in_struct
= op1_in_struct
;
5664 last_jump_equiv_class
= op1_elt
;
5668 /* CSE processing for one instruction.
5669 First simplify sources and addresses of all assignments
5670 in the instruction, using previously-computed equivalents values.
5671 Then install the new sources and destinations in the table
5672 of available values.
5674 If IN_LIBCALL_BLOCK is nonzero, don't record any equivalence made in
5677 /* Data on one SET contained in the instruction. */
5681 /* The SET rtx itself. */
5683 /* The SET_SRC of the rtx (the original value, if it is changing). */
5685 /* The hash-table element for the SET_SRC of the SET. */
5686 struct table_elt
*src_elt
;
5687 /* Hash code for the SET_SRC. */
5689 /* Hash code for the SET_DEST. */
5691 /* The SET_DEST, with SUBREG, etc., stripped. */
5693 /* Place where the pointer to the INNER_DEST was found. */
5694 rtx
*inner_dest_loc
;
5695 /* Nonzero if the SET_SRC is in memory. */
5697 /* Nonzero if the SET_SRC is in a structure. */
5699 /* Nonzero if the SET_SRC contains something
5700 whose value cannot be predicted and understood. */
5702 /* Original machine mode, in case it becomes a CONST_INT. */
5703 enum machine_mode mode
;
5704 /* A constant equivalent for SET_SRC, if any. */
5706 /* Hash code of constant equivalent for SET_SRC. */
5707 int src_const_hash_code
;
5708 /* Table entry for constant equivalent for SET_SRC, if any. */
5709 struct table_elt
*src_const_elt
;
5713 cse_insn (insn
, in_libcall_block
)
5715 int in_libcall_block
;
5717 register rtx x
= PATTERN (insn
);
5720 register int n_sets
= 0;
5722 /* Records what this insn does to set CC0. */
5723 rtx this_insn_cc0
= 0;
5724 enum machine_mode this_insn_cc0_mode
;
5725 struct write_data writes_memory
;
5726 static struct write_data init
= {0, 0, 0, 0};
5729 struct table_elt
*src_eqv_elt
= 0;
5730 int src_eqv_volatile
;
5731 int src_eqv_in_memory
;
5732 int src_eqv_in_struct
;
5733 int src_eqv_hash_code
;
5738 writes_memory
= init
;
5740 /* Find all the SETs and CLOBBERs in this instruction.
5741 Record all the SETs in the array `set' and count them.
5742 Also determine whether there is a CLOBBER that invalidates
5743 all memory references, or all references at varying addresses. */
5745 if (GET_CODE (x
) == SET
)
5747 sets
= (struct set
*) alloca (sizeof (struct set
));
5750 /* Ignore SETs that are unconditional jumps.
5751 They never need cse processing, so this does not hurt.
5752 The reason is not efficiency but rather
5753 so that we can test at the end for instructions
5754 that have been simplified to unconditional jumps
5755 and not be misled by unchanged instructions
5756 that were unconditional jumps to begin with. */
5757 if (SET_DEST (x
) == pc_rtx
5758 && GET_CODE (SET_SRC (x
)) == LABEL_REF
)
5761 /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
5762 The hard function value register is used only once, to copy to
5763 someplace else, so it isn't worth cse'ing (and on 80386 is unsafe)!
5764 Ensure we invalidate the destination register. On the 80386 no
5765 other code would invalidate it since it is a fixed_reg.
5766 We need not check the return of apply_change_group; see canon_reg. */
5768 else if (GET_CODE (SET_SRC (x
)) == CALL
)
5770 canon_reg (SET_SRC (x
), insn
);
5771 apply_change_group ();
5772 fold_rtx (SET_SRC (x
), insn
);
5773 invalidate (SET_DEST (x
));
5778 else if (GET_CODE (x
) == PARALLEL
)
5780 register int lim
= XVECLEN (x
, 0);
5782 sets
= (struct set
*) alloca (lim
* sizeof (struct set
));
5784 /* Find all regs explicitly clobbered in this insn,
5785 and ensure they are not replaced with any other regs
5786 elsewhere in this insn.
5787 When a reg that is clobbered is also used for input,
5788 we should presume that that is for a reason,
5789 and we should not substitute some other register
5790 which is not supposed to be clobbered.
5791 Therefore, this loop cannot be merged into the one below
5792 because a CALL may precede a CLOBBER and refer to the
5793 value clobbered. We must not let a canonicalization do
5794 anything in that case. */
5795 for (i
= 0; i
< lim
; i
++)
5797 register rtx y
= XVECEXP (x
, 0, i
);
5798 if (GET_CODE (y
) == CLOBBER
5799 && (GET_CODE (XEXP (y
, 0)) == REG
5800 || GET_CODE (XEXP (y
, 0)) == SUBREG
))
5801 invalidate (XEXP (y
, 0));
5804 for (i
= 0; i
< lim
; i
++)
5806 register rtx y
= XVECEXP (x
, 0, i
);
5807 if (GET_CODE (y
) == SET
)
5809 /* As above, we ignore unconditional jumps and call-insns and
5810 ignore the result of apply_change_group. */
5811 if (GET_CODE (SET_SRC (y
)) == CALL
)
5813 canon_reg (SET_SRC (y
), insn
);
5814 apply_change_group ();
5815 fold_rtx (SET_SRC (y
), insn
);
5816 invalidate (SET_DEST (y
));
5818 else if (SET_DEST (y
) == pc_rtx
5819 && GET_CODE (SET_SRC (y
)) == LABEL_REF
)
5822 sets
[n_sets
++].rtl
= y
;
5824 else if (GET_CODE (y
) == CLOBBER
)
5826 /* If we clobber memory, take note of that,
5827 and canon the address.
5828 This does nothing when a register is clobbered
5829 because we have already invalidated the reg. */
5830 if (GET_CODE (XEXP (y
, 0)) == MEM
)
5832 canon_reg (XEXP (y
, 0), NULL_RTX
);
5833 note_mem_written (XEXP (y
, 0), &writes_memory
);
5836 else if (GET_CODE (y
) == USE
5837 && ! (GET_CODE (XEXP (y
, 0)) == REG
5838 && REGNO (XEXP (y
, 0)) < FIRST_PSEUDO_REGISTER
))
5839 canon_reg (y
, NULL_RTX
);
5840 else if (GET_CODE (y
) == CALL
)
5842 /* The result of apply_change_group can be ignored; see
5844 canon_reg (y
, insn
);
5845 apply_change_group ();
5850 else if (GET_CODE (x
) == CLOBBER
)
5852 if (GET_CODE (XEXP (x
, 0)) == MEM
)
5854 canon_reg (XEXP (x
, 0), NULL_RTX
);
5855 note_mem_written (XEXP (x
, 0), &writes_memory
);
5859 /* Canonicalize a USE of a pseudo register or memory location. */
5860 else if (GET_CODE (x
) == USE
5861 && ! (GET_CODE (XEXP (x
, 0)) == REG
5862 && REGNO (XEXP (x
, 0)) < FIRST_PSEUDO_REGISTER
))
5863 canon_reg (XEXP (x
, 0), NULL_RTX
);
5864 else if (GET_CODE (x
) == CALL
)
5866 /* The result of apply_change_group can be ignored; see canon_reg. */
5867 canon_reg (x
, insn
);
5868 apply_change_group ();
5872 if (n_sets
== 1 && REG_NOTES (insn
) != 0)
5874 /* Store the equivalent value in SRC_EQV, if different. */
5875 rtx tem
= find_reg_note (insn
, REG_EQUAL
, NULL_RTX
);
5877 if (tem
&& ! rtx_equal_p (XEXP (tem
, 0), SET_SRC (sets
[0].rtl
)))
5878 src_eqv
= canon_reg (XEXP (tem
, 0), NULL_RTX
);
5881 /* Canonicalize sources and addresses of destinations.
5882 We do this in a separate pass to avoid problems when a MATCH_DUP is
5883 present in the insn pattern. In that case, we want to ensure that
5884 we don't break the duplicate nature of the pattern. So we will replace
5885 both operands at the same time. Otherwise, we would fail to find an
5886 equivalent substitution in the loop calling validate_change below.
5888 We used to suppress canonicalization of DEST if it appears in SRC,
5889 but we don't do this any more. */
5891 for (i
= 0; i
< n_sets
; i
++)
5893 rtx dest
= SET_DEST (sets
[i
].rtl
);
5894 rtx src
= SET_SRC (sets
[i
].rtl
);
5895 rtx
new = canon_reg (src
, insn
);
5897 if ((GET_CODE (new) == REG
&& GET_CODE (src
) == REG
5898 && ((REGNO (new) < FIRST_PSEUDO_REGISTER
)
5899 != (REGNO (src
) < FIRST_PSEUDO_REGISTER
)))
5900 || insn_n_dups
[recog_memoized (insn
)] > 0)
5901 validate_change (insn
, &SET_SRC (sets
[i
].rtl
), new, 1);
5903 SET_SRC (sets
[i
].rtl
) = new;
5905 if (GET_CODE (dest
) == ZERO_EXTRACT
|| GET_CODE (dest
) == SIGN_EXTRACT
)
5907 validate_change (insn
, &XEXP (dest
, 1),
5908 canon_reg (XEXP (dest
, 1), insn
), 1);
5909 validate_change (insn
, &XEXP (dest
, 2),
5910 canon_reg (XEXP (dest
, 2), insn
), 1);
5913 while (GET_CODE (dest
) == SUBREG
|| GET_CODE (dest
) == STRICT_LOW_PART
5914 || GET_CODE (dest
) == ZERO_EXTRACT
5915 || GET_CODE (dest
) == SIGN_EXTRACT
)
5916 dest
= XEXP (dest
, 0);
5918 if (GET_CODE (dest
) == MEM
)
5919 canon_reg (dest
, insn
);
5922 /* Now that we have done all the replacements, we can apply the change
5923 group and see if they all work. Note that this will cause some
5924 canonicalizations that would have worked individually not to be applied
5925 because some other canonicalization didn't work, but this should not
5928 The result of apply_change_group can be ignored; see canon_reg. */
5930 apply_change_group ();
5932 /* Set sets[i].src_elt to the class each source belongs to.
5933 Detect assignments from or to volatile things
5934 and set set[i] to zero so they will be ignored
5935 in the rest of this function.
5937 Nothing in this loop changes the hash table or the register chains. */
5939 for (i
= 0; i
< n_sets
; i
++)
5941 register rtx src
, dest
;
5942 register rtx src_folded
;
5943 register struct table_elt
*elt
= 0, *p
;
5944 enum machine_mode mode
;
5947 rtx src_related
= 0;
5948 struct table_elt
*src_const_elt
= 0;
5949 int src_cost
= 10000, src_eqv_cost
= 10000, src_folded_cost
= 10000;
5950 int src_related_cost
= 10000, src_elt_cost
= 10000;
5951 /* Set non-zero if we need to call force_const_mem on with the
5952 contents of src_folded before using it. */
5953 int src_folded_force_flag
= 0;
5955 dest
= SET_DEST (sets
[i
].rtl
);
5956 src
= SET_SRC (sets
[i
].rtl
);
5958 /* If SRC is a constant that has no machine mode,
5959 hash it with the destination's machine mode.
5960 This way we can keep different modes separate. */
5962 mode
= GET_MODE (src
) == VOIDmode
? GET_MODE (dest
) : GET_MODE (src
);
5963 sets
[i
].mode
= mode
;
5967 enum machine_mode eqvmode
= mode
;
5968 if (GET_CODE (dest
) == STRICT_LOW_PART
)
5969 eqvmode
= GET_MODE (SUBREG_REG (XEXP (dest
, 0)));
5971 hash_arg_in_memory
= 0;
5972 hash_arg_in_struct
= 0;
5973 src_eqv
= fold_rtx (src_eqv
, insn
);
5974 src_eqv_hash_code
= HASH (src_eqv
, eqvmode
);
5976 /* Find the equivalence class for the equivalent expression. */
5979 src_eqv_elt
= lookup (src_eqv
, src_eqv_hash_code
, eqvmode
);
5981 src_eqv_volatile
= do_not_record
;
5982 src_eqv_in_memory
= hash_arg_in_memory
;
5983 src_eqv_in_struct
= hash_arg_in_struct
;
5986 /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
5987 value of the INNER register, not the destination. So it is not
5988 a legal substitution for the source. But save it for later. */
5989 if (GET_CODE (dest
) == STRICT_LOW_PART
)
5992 src_eqv_here
= src_eqv
;
5994 /* Simplify and foldable subexpressions in SRC. Then get the fully-
5995 simplified result, which may not necessarily be valid. */
5996 src_folded
= fold_rtx (src
, insn
);
5998 /* If storing a constant in a bitfield, pre-truncate the constant
5999 so we will be able to record it later. */
6000 if (GET_CODE (SET_DEST (sets
[i
].rtl
)) == ZERO_EXTRACT
6001 || GET_CODE (SET_DEST (sets
[i
].rtl
)) == SIGN_EXTRACT
)
6003 rtx width
= XEXP (SET_DEST (sets
[i
].rtl
), 1);
6005 if (GET_CODE (src
) == CONST_INT
6006 && GET_CODE (width
) == CONST_INT
6007 && INTVAL (width
) < HOST_BITS_PER_WIDE_INT
6008 && (INTVAL (src
) & ((HOST_WIDE_INT
) (-1) << INTVAL (width
))))
6010 = GEN_INT (INTVAL (src
) & (((HOST_WIDE_INT
) 1
6011 << INTVAL (width
)) - 1));
6014 /* Compute SRC's hash code, and also notice if it
6015 should not be recorded at all. In that case,
6016 prevent any further processing of this assignment. */
6018 hash_arg_in_memory
= 0;
6019 hash_arg_in_struct
= 0;
6022 sets
[i
].src_hash_code
= HASH (src
, mode
);
6023 sets
[i
].src_volatile
= do_not_record
;
6024 sets
[i
].src_in_memory
= hash_arg_in_memory
;
6025 sets
[i
].src_in_struct
= hash_arg_in_struct
;
6028 /* It is no longer clear why we used to do this, but it doesn't
6029 appear to still be needed. So let's try without it since this
6030 code hurts cse'ing widened ops. */
6031 /* If source is a perverse subreg (such as QI treated as an SI),
6032 treat it as volatile. It may do the work of an SI in one context
6033 where the extra bits are not being used, but cannot replace an SI
6035 if (GET_CODE (src
) == SUBREG
6036 && (GET_MODE_SIZE (GET_MODE (src
))
6037 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src
)))))
6038 sets
[i
].src_volatile
= 1;
6041 /* Locate all possible equivalent forms for SRC. Try to replace
6042 SRC in the insn with each cheaper equivalent.
6044 We have the following types of equivalents: SRC itself, a folded
6045 version, a value given in a REG_EQUAL note, or a value related
6048 Each of these equivalents may be part of an additional class
6049 of equivalents (if more than one is in the table, they must be in
6050 the same class; we check for this).
6052 If the source is volatile, we don't do any table lookups.
6054 We note any constant equivalent for possible later use in a
6057 if (!sets
[i
].src_volatile
)
6058 elt
= lookup (src
, sets
[i
].src_hash_code
, mode
);
6060 sets
[i
].src_elt
= elt
;
6062 if (elt
&& src_eqv_here
&& src_eqv_elt
)
6064 if (elt
->first_same_value
!= src_eqv_elt
->first_same_value
)
6066 /* The REG_EQUAL is indicating that two formerly distinct
6067 classes are now equivalent. So merge them. */
6068 merge_equiv_classes (elt
, src_eqv_elt
);
6069 src_eqv_hash_code
= HASH (src_eqv
, elt
->mode
);
6070 src_eqv_elt
= lookup (src_eqv
, src_eqv_hash_code
, elt
->mode
);
6076 else if (src_eqv_elt
)
6079 /* Try to find a constant somewhere and record it in `src_const'.
6080 Record its table element, if any, in `src_const_elt'. Look in
6081 any known equivalences first. (If the constant is not in the
6082 table, also set `sets[i].src_const_hash_code'). */
6084 for (p
= elt
->first_same_value
; p
; p
= p
->next_same_value
)
6088 src_const_elt
= elt
;
6093 && (CONSTANT_P (src_folded
)
6094 /* Consider (minus (label_ref L1) (label_ref L2)) as
6095 "constant" here so we will record it. This allows us
6096 to fold switch statements when an ADDR_DIFF_VEC is used. */
6097 || (GET_CODE (src_folded
) == MINUS
6098 && GET_CODE (XEXP (src_folded
, 0)) == LABEL_REF
6099 && GET_CODE (XEXP (src_folded
, 1)) == LABEL_REF
)))
6100 src_const
= src_folded
, src_const_elt
= elt
;
6101 else if (src_const
== 0 && src_eqv_here
&& CONSTANT_P (src_eqv_here
))
6102 src_const
= src_eqv_here
, src_const_elt
= src_eqv_elt
;
6104 /* If we don't know if the constant is in the table, get its
6105 hash code and look it up. */
6106 if (src_const
&& src_const_elt
== 0)
6108 sets
[i
].src_const_hash_code
= HASH (src_const
, mode
);
6109 src_const_elt
= lookup (src_const
, sets
[i
].src_const_hash_code
,
6113 sets
[i
].src_const
= src_const
;
6114 sets
[i
].src_const_elt
= src_const_elt
;
6116 /* If the constant and our source are both in the table, mark them as
6117 equivalent. Otherwise, if a constant is in the table but the source
6118 isn't, set ELT to it. */
6119 if (src_const_elt
&& elt
6120 && src_const_elt
->first_same_value
!= elt
->first_same_value
)
6121 merge_equiv_classes (elt
, src_const_elt
);
6122 else if (src_const_elt
&& elt
== 0)
6123 elt
= src_const_elt
;
6125 /* See if there is a register linearly related to a constant
6126 equivalent of SRC. */
6128 && (GET_CODE (src_const
) == CONST
6129 || (src_const_elt
&& src_const_elt
->related_value
!= 0)))
6131 src_related
= use_related_value (src_const
, src_const_elt
);
6134 struct table_elt
*src_related_elt
6135 = lookup (src_related
, HASH (src_related
, mode
), mode
);
6136 if (src_related_elt
&& elt
)
6138 if (elt
->first_same_value
6139 != src_related_elt
->first_same_value
)
6140 /* This can occur when we previously saw a CONST
6141 involving a SYMBOL_REF and then see the SYMBOL_REF
6142 twice. Merge the involved classes. */
6143 merge_equiv_classes (elt
, src_related_elt
);
6146 src_related_elt
= 0;
6148 else if (src_related_elt
&& elt
== 0)
6149 elt
= src_related_elt
;
6153 /* See if we have a CONST_INT that is already in a register in a
6156 if (src_const
&& src_related
== 0 && GET_CODE (src_const
) == CONST_INT
6157 && GET_MODE_CLASS (mode
) == MODE_INT
6158 && GET_MODE_BITSIZE (mode
) < BITS_PER_WORD
)
6160 enum machine_mode wider_mode
;
6162 for (wider_mode
= GET_MODE_WIDER_MODE (mode
);
6163 GET_MODE_BITSIZE (wider_mode
) <= BITS_PER_WORD
6164 && src_related
== 0;
6165 wider_mode
= GET_MODE_WIDER_MODE (wider_mode
))
6167 struct table_elt
*const_elt
6168 = lookup (src_const
, HASH (src_const
, wider_mode
), wider_mode
);
6173 for (const_elt
= const_elt
->first_same_value
;
6174 const_elt
; const_elt
= const_elt
->next_same_value
)
6175 if (GET_CODE (const_elt
->exp
) == REG
)
6177 src_related
= gen_lowpart_if_possible (mode
,
6184 /* Another possibility is that we have an AND with a constant in
6185 a mode narrower than a word. If so, it might have been generated
6186 as part of an "if" which would narrow the AND. If we already
6187 have done the AND in a wider mode, we can use a SUBREG of that
6190 if (flag_expensive_optimizations
&& ! src_related
6191 && GET_CODE (src
) == AND
&& GET_CODE (XEXP (src
, 1)) == CONST_INT
6192 && GET_MODE_SIZE (mode
) < UNITS_PER_WORD
)
6194 enum machine_mode tmode
;
6195 rtx new_and
= gen_rtx (AND
, VOIDmode
, NULL_RTX
, XEXP (src
, 1));
6197 for (tmode
= GET_MODE_WIDER_MODE (mode
);
6198 GET_MODE_SIZE (tmode
) <= UNITS_PER_WORD
;
6199 tmode
= GET_MODE_WIDER_MODE (tmode
))
6201 rtx inner
= gen_lowpart_if_possible (tmode
, XEXP (src
, 0));
6202 struct table_elt
*larger_elt
;
6206 PUT_MODE (new_and
, tmode
);
6207 XEXP (new_and
, 0) = inner
;
6208 larger_elt
= lookup (new_and
, HASH (new_and
, tmode
), tmode
);
6209 if (larger_elt
== 0)
6212 for (larger_elt
= larger_elt
->first_same_value
;
6213 larger_elt
; larger_elt
= larger_elt
->next_same_value
)
6214 if (GET_CODE (larger_elt
->exp
) == REG
)
6217 = gen_lowpart_if_possible (mode
, larger_elt
->exp
);
6227 if (src
== src_folded
)
6230 /* At this point, ELT, if non-zero, points to a class of expressions
6231 equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
6232 and SRC_RELATED, if non-zero, each contain additional equivalent
6233 expressions. Prune these latter expressions by deleting expressions
6234 already in the equivalence class.
6236 Check for an equivalent identical to the destination. If found,
6237 this is the preferred equivalent since it will likely lead to
6238 elimination of the insn. Indicate this by placing it in
6241 if (elt
) elt
= elt
->first_same_value
;
6242 for (p
= elt
; p
; p
= p
->next_same_value
)
6244 enum rtx_code code
= GET_CODE (p
->exp
);
6246 /* If the expression is not valid, ignore it. Then we do not
6247 have to check for validity below. In most cases, we can use
6248 `rtx_equal_p', since canonicalization has already been done. */
6249 if (code
!= REG
&& ! exp_equiv_p (p
->exp
, p
->exp
, 1, 0))
6252 if (src
&& GET_CODE (src
) == code
&& rtx_equal_p (src
, p
->exp
))
6254 else if (src_folded
&& GET_CODE (src_folded
) == code
6255 && rtx_equal_p (src_folded
, p
->exp
))
6257 else if (src_eqv_here
&& GET_CODE (src_eqv_here
) == code
6258 && rtx_equal_p (src_eqv_here
, p
->exp
))
6260 else if (src_related
&& GET_CODE (src_related
) == code
6261 && rtx_equal_p (src_related
, p
->exp
))
6264 /* This is the same as the destination of the insns, we want
6265 to prefer it. Copy it to src_related. The code below will
6266 then give it a negative cost. */
6267 if (GET_CODE (dest
) == code
&& rtx_equal_p (p
->exp
, dest
))
6272 /* Find the cheapest valid equivalent, trying all the available
6273 possibilities. Prefer items not in the hash table to ones
6274 that are when they are equal cost. Note that we can never
6275 worsen an insn as the current contents will also succeed.
6276 If we find an equivalent identical to the destination, use it as best,
6277 since this insn will probably be eliminated in that case. */
6280 if (rtx_equal_p (src
, dest
))
6283 src_cost
= COST (src
);
6288 if (rtx_equal_p (src_eqv_here
, dest
))
6291 src_eqv_cost
= COST (src_eqv_here
);
6296 if (rtx_equal_p (src_folded
, dest
))
6297 src_folded_cost
= -1;
6299 src_folded_cost
= COST (src_folded
);
6304 if (rtx_equal_p (src_related
, dest
))
6305 src_related_cost
= -1;
6307 src_related_cost
= COST (src_related
);
6310 /* If this was an indirect jump insn, a known label will really be
6311 cheaper even though it looks more expensive. */
6312 if (dest
== pc_rtx
&& src_const
&& GET_CODE (src_const
) == LABEL_REF
)
6313 src_folded
= src_const
, src_folded_cost
= -1;
6315 /* Terminate loop when replacement made. This must terminate since
6316 the current contents will be tested and will always be valid. */
6321 /* Skip invalid entries. */
6322 while (elt
&& GET_CODE (elt
->exp
) != REG
6323 && ! exp_equiv_p (elt
->exp
, elt
->exp
, 1, 0))
6324 elt
= elt
->next_same_value
;
6326 if (elt
) src_elt_cost
= elt
->cost
;
6328 /* Find cheapest and skip it for the next time. For items
6329 of equal cost, use this order:
6330 src_folded, src, src_eqv, src_related and hash table entry. */
6331 if (src_folded_cost
<= src_cost
6332 && src_folded_cost
<= src_eqv_cost
6333 && src_folded_cost
<= src_related_cost
6334 && src_folded_cost
<= src_elt_cost
)
6336 trial
= src_folded
, src_folded_cost
= 10000;
6337 if (src_folded_force_flag
)
6338 trial
= force_const_mem (mode
, trial
);
6340 else if (src_cost
<= src_eqv_cost
6341 && src_cost
<= src_related_cost
6342 && src_cost
<= src_elt_cost
)
6343 trial
= src
, src_cost
= 10000;
6344 else if (src_eqv_cost
<= src_related_cost
6345 && src_eqv_cost
<= src_elt_cost
)
6346 trial
= src_eqv_here
, src_eqv_cost
= 10000;
6347 else if (src_related_cost
<= src_elt_cost
)
6348 trial
= src_related
, src_related_cost
= 10000;
6351 trial
= copy_rtx (elt
->exp
);
6352 elt
= elt
->next_same_value
;
6353 src_elt_cost
= 10000;
6356 /* We don't normally have an insn matching (set (pc) (pc)), so
6357 check for this separately here. We will delete such an
6360 Tablejump insns contain a USE of the table, so simply replacing
6361 the operand with the constant won't match. This is simply an
6362 unconditional branch, however, and is therefore valid. Just
6363 insert the substitution here and we will delete and re-emit
6366 if (n_sets
== 1 && dest
== pc_rtx
6368 || (GET_CODE (trial
) == LABEL_REF
6369 && ! condjump_p (insn
))))
6371 /* If TRIAL is a label in front of a jump table, we are
6372 really falling through the switch (this is how casesi
6373 insns work), so we must branch around the table. */
6374 if (GET_CODE (trial
) == CODE_LABEL
6375 && NEXT_INSN (trial
) != 0
6376 && GET_CODE (NEXT_INSN (trial
)) == JUMP_INSN
6377 && (GET_CODE (PATTERN (NEXT_INSN (trial
))) == ADDR_DIFF_VEC
6378 || GET_CODE (PATTERN (NEXT_INSN (trial
))) == ADDR_VEC
))
6380 trial
= gen_rtx (LABEL_REF
, Pmode
, get_label_after (trial
));
6382 SET_SRC (sets
[i
].rtl
) = trial
;
6386 /* Look for a substitution that makes a valid insn. */
6387 else if (validate_change (insn
, &SET_SRC (sets
[i
].rtl
), trial
, 0))
6389 /* The result of apply_change_group can be ignored; see
6392 validate_change (insn
, &SET_SRC (sets
[i
].rtl
),
6393 canon_reg (SET_SRC (sets
[i
].rtl
), insn
),
6395 apply_change_group ();
6399 /* If we previously found constant pool entries for
6400 constants and this is a constant, try making a
6401 pool entry. Put it in src_folded unless we already have done
6402 this since that is where it likely came from. */
6404 else if (constant_pool_entries_cost
6405 && CONSTANT_P (trial
)
6406 && (src_folded
== 0 || GET_CODE (src_folded
) != MEM
)
6407 && GET_MODE_CLASS (mode
) != MODE_CC
)
6409 src_folded_force_flag
= 1;
6411 src_folded_cost
= constant_pool_entries_cost
;
6415 src
= SET_SRC (sets
[i
].rtl
);
6417 /* In general, it is good to have a SET with SET_SRC == SET_DEST.
6418 However, there is an important exception: If both are registers
6419 that are not the head of their equivalence class, replace SET_SRC
6420 with the head of the class. If we do not do this, we will have
6421 both registers live over a portion of the basic block. This way,
6422 their lifetimes will likely abut instead of overlapping. */
6423 if (GET_CODE (dest
) == REG
6424 && REGNO_QTY_VALID_P (REGNO (dest
))
6425 && qty_mode
[reg_qty
[REGNO (dest
)]] == GET_MODE (dest
)
6426 && qty_first_reg
[reg_qty
[REGNO (dest
)]] != REGNO (dest
)
6427 && GET_CODE (src
) == REG
&& REGNO (src
) == REGNO (dest
)
6428 /* Don't do this if the original insn had a hard reg as
6430 && (GET_CODE (sets
[i
].src
) != REG
6431 || REGNO (sets
[i
].src
) >= FIRST_PSEUDO_REGISTER
))
6432 /* We can't call canon_reg here because it won't do anything if
6433 SRC is a hard register. */
6435 int first
= qty_first_reg
[reg_qty
[REGNO (src
)]];
6437 src
= SET_SRC (sets
[i
].rtl
)
6438 = first
>= FIRST_PSEUDO_REGISTER
? regno_reg_rtx
[first
]
6439 : gen_rtx (REG
, GET_MODE (src
), first
);
6441 /* If we had a constant that is cheaper than what we are now
6442 setting SRC to, use that constant. We ignored it when we
6443 thought we could make this into a no-op. */
6444 if (src_const
&& COST (src_const
) < COST (src
)
6445 && validate_change (insn
, &SET_SRC (sets
[i
].rtl
), src_const
, 0))
6449 /* If we made a change, recompute SRC values. */
6450 if (src
!= sets
[i
].src
)
6453 hash_arg_in_memory
= 0;
6454 hash_arg_in_struct
= 0;
6456 sets
[i
].src_hash_code
= HASH (src
, mode
);
6457 sets
[i
].src_volatile
= do_not_record
;
6458 sets
[i
].src_in_memory
= hash_arg_in_memory
;
6459 sets
[i
].src_in_struct
= hash_arg_in_struct
;
6460 sets
[i
].src_elt
= lookup (src
, sets
[i
].src_hash_code
, mode
);
6463 /* If this is a single SET, we are setting a register, and we have an
6464 equivalent constant, we want to add a REG_NOTE. We don't want
6465 to write a REG_EQUAL note for a constant pseudo since verifying that
6466 that pseudo hasn't been eliminated is a pain. Such a note also
6467 won't help anything. */
6468 if (n_sets
== 1 && src_const
&& GET_CODE (dest
) == REG
6469 && GET_CODE (src_const
) != REG
)
6471 rtx tem
= find_reg_note (insn
, REG_EQUAL
, NULL_RTX
);
6473 /* Record the actual constant value in a REG_EQUAL note, making
6474 a new one if one does not already exist. */
6476 XEXP (tem
, 0) = src_const
;
6478 REG_NOTES (insn
) = gen_rtx (EXPR_LIST
, REG_EQUAL
,
6479 src_const
, REG_NOTES (insn
));
6481 /* If storing a constant value in a register that
6482 previously held the constant value 0,
6483 record this fact with a REG_WAS_0 note on this insn.
6485 Note that the *register* is required to have previously held 0,
6486 not just any register in the quantity and we must point to the
6487 insn that set that register to zero.
6489 Rather than track each register individually, we just see if
6490 the last set for this quantity was for this register. */
6492 if (REGNO_QTY_VALID_P (REGNO (dest
))
6493 && qty_const
[reg_qty
[REGNO (dest
)]] == const0_rtx
)
6495 /* See if we previously had a REG_WAS_0 note. */
6496 rtx note
= find_reg_note (insn
, REG_WAS_0
, NULL_RTX
);
6497 rtx const_insn
= qty_const_insn
[reg_qty
[REGNO (dest
)]];
6499 if ((tem
= single_set (const_insn
)) != 0
6500 && rtx_equal_p (SET_DEST (tem
), dest
))
6503 XEXP (note
, 0) = const_insn
;
6505 REG_NOTES (insn
) = gen_rtx (INSN_LIST
, REG_WAS_0
,
6506 const_insn
, REG_NOTES (insn
));
6511 /* Now deal with the destination. */
6513 sets
[i
].inner_dest_loc
= &SET_DEST (sets
[0].rtl
);
6515 /* Look within any SIGN_EXTRACT or ZERO_EXTRACT
6516 to the MEM or REG within it. */
6517 while (GET_CODE (dest
) == SIGN_EXTRACT
6518 || GET_CODE (dest
) == ZERO_EXTRACT
6519 || GET_CODE (dest
) == SUBREG
6520 || GET_CODE (dest
) == STRICT_LOW_PART
)
6522 sets
[i
].inner_dest_loc
= &XEXP (dest
, 0);
6523 dest
= XEXP (dest
, 0);
6526 sets
[i
].inner_dest
= dest
;
6528 if (GET_CODE (dest
) == MEM
)
6530 dest
= fold_rtx (dest
, insn
);
6532 /* Decide whether we invalidate everything in memory,
6533 or just things at non-fixed places.
6534 Writing a large aggregate must invalidate everything
6535 because we don't know how long it is. */
6536 note_mem_written (dest
, &writes_memory
);
6539 /* Compute the hash code of the destination now,
6540 before the effects of this instruction are recorded,
6541 since the register values used in the address computation
6542 are those before this instruction. */
6543 sets
[i
].dest_hash_code
= HASH (dest
, mode
);
6545 /* Don't enter a bit-field in the hash table
6546 because the value in it after the store
6547 may not equal what was stored, due to truncation. */
6549 if (GET_CODE (SET_DEST (sets
[i
].rtl
)) == ZERO_EXTRACT
6550 || GET_CODE (SET_DEST (sets
[i
].rtl
)) == SIGN_EXTRACT
)
6552 rtx width
= XEXP (SET_DEST (sets
[i
].rtl
), 1);
6554 if (src_const
!= 0 && GET_CODE (src_const
) == CONST_INT
6555 && GET_CODE (width
) == CONST_INT
6556 && INTVAL (width
) < HOST_BITS_PER_WIDE_INT
6557 && ! (INTVAL (src_const
)
6558 & ((HOST_WIDE_INT
) (-1) << INTVAL (width
))))
6559 /* Exception: if the value is constant,
6560 and it won't be truncated, record it. */
6564 /* This is chosen so that the destination will be invalidated
6565 but no new value will be recorded.
6566 We must invalidate because sometimes constant
6567 values can be recorded for bitfields. */
6568 sets
[i
].src_elt
= 0;
6569 sets
[i
].src_volatile
= 1;
6575 /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
6577 else if (n_sets
== 1 && dest
== pc_rtx
&& src
== pc_rtx
)
6579 PUT_CODE (insn
, NOTE
);
6580 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
6581 NOTE_SOURCE_FILE (insn
) = 0;
6582 cse_jumps_altered
= 1;
6583 /* One less use of the label this insn used to jump to. */
6584 --LABEL_NUSES (JUMP_LABEL (insn
));
6585 /* No more processing for this set. */
6589 /* If this SET is now setting PC to a label, we know it used to
6590 be a conditional or computed branch. So we see if we can follow
6591 it. If it was a computed branch, delete it and re-emit. */
6592 else if (dest
== pc_rtx
&& GET_CODE (src
) == LABEL_REF
)
6596 /* If this is not in the format for a simple branch and
6597 we are the only SET in it, re-emit it. */
6598 if (! simplejump_p (insn
) && n_sets
== 1)
6600 rtx
new = emit_jump_insn_before (gen_jump (XEXP (src
, 0)), insn
);
6601 JUMP_LABEL (new) = XEXP (src
, 0);
6602 LABEL_NUSES (XEXP (src
, 0))++;
6607 /* Otherwise, force rerecognition, since it probably had
6608 a different pattern before.
6609 This shouldn't really be necessary, since whatever
6610 changed the source value above should have done this.
6611 Until the right place is found, might as well do this here. */
6612 INSN_CODE (insn
) = -1;
6614 /* Now that we've converted this jump to an unconditional jump,
6615 there is dead code after it. Delete the dead code until we
6616 reach a BARRIER, the end of the function, or a label. Do
6617 not delete NOTEs except for NOTE_INSN_DELETED since later
6618 phases assume these notes are retained. */
6622 while (NEXT_INSN (p
) != 0
6623 && GET_CODE (NEXT_INSN (p
)) != BARRIER
6624 && GET_CODE (NEXT_INSN (p
)) != CODE_LABEL
)
6626 if (GET_CODE (NEXT_INSN (p
)) != NOTE
6627 || NOTE_LINE_NUMBER (NEXT_INSN (p
)) == NOTE_INSN_DELETED
)
6628 delete_insn (NEXT_INSN (p
));
6633 /* If we don't have a BARRIER immediately after INSN, put one there.
6634 Much code assumes that there are no NOTEs between a JUMP_INSN and
6637 if (NEXT_INSN (insn
) == 0
6638 || GET_CODE (NEXT_INSN (insn
)) != BARRIER
)
6639 emit_barrier_after (insn
);
6641 /* We might have two BARRIERs separated by notes. Delete the second
6644 if (p
!= insn
&& NEXT_INSN (p
) != 0
6645 && GET_CODE (NEXT_INSN (p
)) == BARRIER
)
6646 delete_insn (NEXT_INSN (p
));
6648 cse_jumps_altered
= 1;
6652 /* If destination is volatile, invalidate it and then do no further
6653 processing for this assignment. */
6655 else if (do_not_record
)
6657 if (GET_CODE (dest
) == REG
|| GET_CODE (dest
) == SUBREG
6658 || GET_CODE (dest
) == MEM
)
6663 if (sets
[i
].rtl
!= 0 && dest
!= SET_DEST (sets
[i
].rtl
))
6664 sets
[i
].dest_hash_code
= HASH (SET_DEST (sets
[i
].rtl
), mode
);
6667 /* If setting CC0, record what it was set to, or a constant, if it
6668 is equivalent to a constant. If it is being set to a floating-point
6669 value, make a COMPARE with the appropriate constant of 0. If we
6670 don't do this, later code can interpret this as a test against
6671 const0_rtx, which can cause problems if we try to put it into an
6672 insn as a floating-point operand. */
6673 if (dest
== cc0_rtx
)
6675 this_insn_cc0
= src_const
&& mode
!= VOIDmode
? src_const
: src
;
6676 this_insn_cc0_mode
= mode
;
6677 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
)
6678 this_insn_cc0
= gen_rtx (COMPARE
, VOIDmode
, this_insn_cc0
,
6684 /* Now enter all non-volatile source expressions in the hash table
6685 if they are not already present.
6686 Record their equivalence classes in src_elt.
6687 This way we can insert the corresponding destinations into
6688 the same classes even if the actual sources are no longer in them
6689 (having been invalidated). */
6691 if (src_eqv
&& src_eqv_elt
== 0 && sets
[0].rtl
!= 0 && ! src_eqv_volatile
6692 && ! rtx_equal_p (src_eqv
, SET_DEST (sets
[0].rtl
)))
6694 register struct table_elt
*elt
;
6695 register struct table_elt
*classp
= sets
[0].src_elt
;
6696 rtx dest
= SET_DEST (sets
[0].rtl
);
6697 enum machine_mode eqvmode
= GET_MODE (dest
);
6699 if (GET_CODE (dest
) == STRICT_LOW_PART
)
6701 eqvmode
= GET_MODE (SUBREG_REG (XEXP (dest
, 0)));
6704 if (insert_regs (src_eqv
, classp
, 0))
6705 src_eqv_hash_code
= HASH (src_eqv
, eqvmode
);
6706 elt
= insert (src_eqv
, classp
, src_eqv_hash_code
, eqvmode
);
6707 elt
->in_memory
= src_eqv_in_memory
;
6708 elt
->in_struct
= src_eqv_in_struct
;
6712 for (i
= 0; i
< n_sets
; i
++)
6713 if (sets
[i
].rtl
&& ! sets
[i
].src_volatile
6714 && ! rtx_equal_p (SET_SRC (sets
[i
].rtl
), SET_DEST (sets
[i
].rtl
)))
6716 if (GET_CODE (SET_DEST (sets
[i
].rtl
)) == STRICT_LOW_PART
)
6718 /* REG_EQUAL in setting a STRICT_LOW_PART
6719 gives an equivalent for the entire destination register,
6720 not just for the subreg being stored in now.
6721 This is a more interesting equivalence, so we arrange later
6722 to treat the entire reg as the destination. */
6723 sets
[i
].src_elt
= src_eqv_elt
;
6724 sets
[i
].src_hash_code
= src_eqv_hash_code
;
6728 /* Insert source and constant equivalent into hash table, if not
6730 register struct table_elt
*classp
= src_eqv_elt
;
6731 register rtx src
= sets
[i
].src
;
6732 register rtx dest
= SET_DEST (sets
[i
].rtl
);
6733 enum machine_mode mode
6734 = GET_MODE (src
) == VOIDmode
? GET_MODE (dest
) : GET_MODE (src
);
6736 if (sets
[i
].src_elt
== 0)
6738 register struct table_elt
*elt
;
6740 /* Note that these insert_regs calls cannot remove
6741 any of the src_elt's, because they would have failed to
6742 match if not still valid. */
6743 if (insert_regs (src
, classp
, 0))
6744 sets
[i
].src_hash_code
= HASH (src
, mode
);
6745 elt
= insert (src
, classp
, sets
[i
].src_hash_code
, mode
);
6746 elt
->in_memory
= sets
[i
].src_in_memory
;
6747 elt
->in_struct
= sets
[i
].src_in_struct
;
6748 sets
[i
].src_elt
= classp
= elt
;
6751 if (sets
[i
].src_const
&& sets
[i
].src_const_elt
== 0
6752 && src
!= sets
[i
].src_const
6753 && ! rtx_equal_p (sets
[i
].src_const
, src
))
6754 sets
[i
].src_elt
= insert (sets
[i
].src_const
, classp
,
6755 sets
[i
].src_const_hash_code
, mode
);
6758 else if (sets
[i
].src_elt
== 0)
6759 /* If we did not insert the source into the hash table (e.g., it was
6760 volatile), note the equivalence class for the REG_EQUAL value, if any,
6761 so that the destination goes into that class. */
6762 sets
[i
].src_elt
= src_eqv_elt
;
6764 invalidate_from_clobbers (&writes_memory
, x
);
6766 /* Some registers are invalidated by subroutine calls. Memory is
6767 invalidated by non-constant calls. */
6769 if (GET_CODE (insn
) == CALL_INSN
)
6771 static struct write_data everything
= {0, 1, 1, 1};
6773 if (! CONST_CALL_P (insn
))
6774 invalidate_memory (&everything
);
6775 invalidate_for_call ();
6778 /* Now invalidate everything set by this instruction.
6779 If a SUBREG or other funny destination is being set,
6780 sets[i].rtl is still nonzero, so here we invalidate the reg
6781 a part of which is being set. */
6783 for (i
= 0; i
< n_sets
; i
++)
6786 register rtx dest
= sets
[i
].inner_dest
;
6788 /* Needed for registers to remove the register from its
6789 previous quantity's chain.
6790 Needed for memory if this is a nonvarying address, unless
6791 we have just done an invalidate_memory that covers even those. */
6792 if (GET_CODE (dest
) == REG
|| GET_CODE (dest
) == SUBREG
6793 || (! writes_memory
.all
&& ! cse_rtx_addr_varies_p (dest
)))
6797 /* Make sure registers mentioned in destinations
6798 are safe for use in an expression to be inserted.
6799 This removes from the hash table
6800 any invalid entry that refers to one of these registers.
6802 We don't care about the return value from mention_regs because
6803 we are going to hash the SET_DEST values unconditionally. */
6805 for (i
= 0; i
< n_sets
; i
++)
6806 if (sets
[i
].rtl
&& GET_CODE (SET_DEST (sets
[i
].rtl
)) != REG
)
6807 mention_regs (SET_DEST (sets
[i
].rtl
));
6809 /* We may have just removed some of the src_elt's from the hash table.
6810 So replace each one with the current head of the same class. */
6812 for (i
= 0; i
< n_sets
; i
++)
6815 if (sets
[i
].src_elt
&& sets
[i
].src_elt
->first_same_value
== 0)
6816 /* If elt was removed, find current head of same class,
6817 or 0 if nothing remains of that class. */
6819 register struct table_elt
*elt
= sets
[i
].src_elt
;
6821 while (elt
&& elt
->prev_same_value
)
6822 elt
= elt
->prev_same_value
;
6824 while (elt
&& elt
->first_same_value
== 0)
6825 elt
= elt
->next_same_value
;
6826 sets
[i
].src_elt
= elt
? elt
->first_same_value
: 0;
6830 /* Now insert the destinations into their equivalence classes. */
6832 for (i
= 0; i
< n_sets
; i
++)
6835 register rtx dest
= SET_DEST (sets
[i
].rtl
);
6836 register struct table_elt
*elt
;
6838 /* Don't record value if we are not supposed to risk allocating
6839 floating-point values in registers that might be wider than
6841 if ((flag_float_store
6842 && GET_CODE (dest
) == MEM
6843 && GET_MODE_CLASS (GET_MODE (dest
)) == MODE_FLOAT
)
6844 /* Don't record values of destinations set inside a libcall block
6845 since we might delete the libcall. Things should have been set
6846 up so we won't want to reuse such a value, but we play it safe
6849 /* If we didn't put a REG_EQUAL value or a source into the hash
6850 table, there is no point is recording DEST. */
6851 || sets
[i
].src_elt
== 0)
6854 /* STRICT_LOW_PART isn't part of the value BEING set,
6855 and neither is the SUBREG inside it.
6856 Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT. */
6857 if (GET_CODE (dest
) == STRICT_LOW_PART
)
6858 dest
= SUBREG_REG (XEXP (dest
, 0));
6860 if (GET_CODE (dest
) == REG
|| GET_CODE (dest
) == SUBREG
)
6861 /* Registers must also be inserted into chains for quantities. */
6862 if (insert_regs (dest
, sets
[i
].src_elt
, 1))
6863 /* If `insert_regs' changes something, the hash code must be
6865 sets
[i
].dest_hash_code
= HASH (dest
, GET_MODE (dest
));
6867 elt
= insert (dest
, sets
[i
].src_elt
,
6868 sets
[i
].dest_hash_code
, GET_MODE (dest
));
6869 elt
->in_memory
= GET_CODE (sets
[i
].inner_dest
) == MEM
;
6872 /* This implicitly assumes a whole struct
6873 need not have MEM_IN_STRUCT_P.
6874 But a whole struct is *supposed* to have MEM_IN_STRUCT_P. */
6875 elt
->in_struct
= (MEM_IN_STRUCT_P (sets
[i
].inner_dest
)
6876 || sets
[i
].inner_dest
!= SET_DEST (sets
[i
].rtl
));
6879 /* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
6880 narrower than M2, and both M1 and M2 are the same number of words,
6881 we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
6882 make that equivalence as well.
6884 However, BAR may have equivalences for which gen_lowpart_if_possible
6885 will produce a simpler value than gen_lowpart_if_possible applied to
6886 BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
6887 BAR's equivalences. If we don't get a simplified form, make
6888 the SUBREG. It will not be used in an equivalence, but will
6889 cause two similar assignments to be detected.
6891 Note the loop below will find SUBREG_REG (DEST) since we have
6892 already entered SRC and DEST of the SET in the table. */
6894 if (GET_CODE (dest
) == SUBREG
6895 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest
))) / UNITS_PER_WORD
6896 == GET_MODE_SIZE (GET_MODE (dest
)) / UNITS_PER_WORD
)
6897 && (GET_MODE_SIZE (GET_MODE (dest
))
6898 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest
))))
6899 && sets
[i
].src_elt
!= 0)
6901 enum machine_mode new_mode
= GET_MODE (SUBREG_REG (dest
));
6902 struct table_elt
*elt
, *classp
= 0;
6904 for (elt
= sets
[i
].src_elt
->first_same_value
; elt
;
6905 elt
= elt
->next_same_value
)
6909 struct table_elt
*src_elt
;
6911 /* Ignore invalid entries. */
6912 if (GET_CODE (elt
->exp
) != REG
6913 && ! exp_equiv_p (elt
->exp
, elt
->exp
, 1, 0))
6916 new_src
= gen_lowpart_if_possible (new_mode
, elt
->exp
);
6918 new_src
= gen_rtx (SUBREG
, new_mode
, elt
->exp
, 0);
6920 src_hash
= HASH (new_src
, new_mode
);
6921 src_elt
= lookup (new_src
, src_hash
, new_mode
);
6923 /* Put the new source in the hash table is if isn't
6927 if (insert_regs (new_src
, classp
, 0))
6928 src_hash
= HASH (new_src
, new_mode
);
6929 src_elt
= insert (new_src
, classp
, src_hash
, new_mode
);
6930 src_elt
->in_memory
= elt
->in_memory
;
6931 src_elt
->in_struct
= elt
->in_struct
;
6933 else if (classp
&& classp
!= src_elt
->first_same_value
)
6934 /* Show that two things that we've seen before are
6935 actually the same. */
6936 merge_equiv_classes (src_elt
, classp
);
6938 classp
= src_elt
->first_same_value
;
6943 /* Special handling for (set REG0 REG1)
6944 where REG0 is the "cheapest", cheaper than REG1.
6945 After cse, REG1 will probably not be used in the sequel,
6946 so (if easily done) change this insn to (set REG1 REG0) and
6947 replace REG1 with REG0 in the previous insn that computed their value.
6948 Then REG1 will become a dead store and won't cloud the situation
6949 for later optimizations.
6951 Do not make this change if REG1 is a hard register, because it will
6952 then be used in the sequel and we may be changing a two-operand insn
6953 into a three-operand insn.
6955 Also do not do this if we are operating on a copy of INSN. */
6957 if (n_sets
== 1 && sets
[0].rtl
&& GET_CODE (SET_DEST (sets
[0].rtl
)) == REG
6958 && NEXT_INSN (PREV_INSN (insn
)) == insn
6959 && GET_CODE (SET_SRC (sets
[0].rtl
)) == REG
6960 && REGNO (SET_SRC (sets
[0].rtl
)) >= FIRST_PSEUDO_REGISTER
6961 && REGNO_QTY_VALID_P (REGNO (SET_SRC (sets
[0].rtl
)))
6962 && (qty_first_reg
[reg_qty
[REGNO (SET_SRC (sets
[0].rtl
))]]
6963 == REGNO (SET_DEST (sets
[0].rtl
))))
6965 rtx prev
= PREV_INSN (insn
);
6966 while (prev
&& GET_CODE (prev
) == NOTE
)
6967 prev
= PREV_INSN (prev
);
6969 if (prev
&& GET_CODE (prev
) == INSN
&& GET_CODE (PATTERN (prev
)) == SET
6970 && SET_DEST (PATTERN (prev
)) == SET_SRC (sets
[0].rtl
))
6972 rtx dest
= SET_DEST (sets
[0].rtl
);
6973 rtx note
= find_reg_note (prev
, REG_EQUIV
, NULL_RTX
);
6975 validate_change (prev
, & SET_DEST (PATTERN (prev
)), dest
, 1);
6976 validate_change (insn
, & SET_DEST (sets
[0].rtl
),
6977 SET_SRC (sets
[0].rtl
), 1);
6978 validate_change (insn
, & SET_SRC (sets
[0].rtl
), dest
, 1);
6979 apply_change_group ();
6981 /* If REG1 was equivalent to a constant, REG0 is not. */
6983 PUT_REG_NOTE_KIND (note
, REG_EQUAL
);
6985 /* If there was a REG_WAS_0 note on PREV, remove it. Move
6986 any REG_WAS_0 note on INSN to PREV. */
6987 note
= find_reg_note (prev
, REG_WAS_0
, NULL_RTX
);
6989 remove_note (prev
, note
);
6991 note
= find_reg_note (insn
, REG_WAS_0
, NULL_RTX
);
6994 remove_note (insn
, note
);
6995 XEXP (note
, 1) = REG_NOTES (prev
);
6996 REG_NOTES (prev
) = note
;
7001 /* If this is a conditional jump insn, record any known equivalences due to
7002 the condition being tested. */
7004 last_jump_equiv_class
= 0;
7005 if (GET_CODE (insn
) == JUMP_INSN
7006 && n_sets
== 1 && GET_CODE (x
) == SET
7007 && GET_CODE (SET_SRC (x
)) == IF_THEN_ELSE
)
7008 record_jump_equiv (insn
, 0);
7011 /* If the previous insn set CC0 and this insn no longer references CC0,
7012 delete the previous insn. Here we use the fact that nothing expects CC0
7013 to be valid over an insn, which is true until the final pass. */
7014 if (prev_insn
&& GET_CODE (prev_insn
) == INSN
7015 && (tem
= single_set (prev_insn
)) != 0
7016 && SET_DEST (tem
) == cc0_rtx
7017 && ! reg_mentioned_p (cc0_rtx
, x
))
7019 PUT_CODE (prev_insn
, NOTE
);
7020 NOTE_LINE_NUMBER (prev_insn
) = NOTE_INSN_DELETED
;
7021 NOTE_SOURCE_FILE (prev_insn
) = 0;
7024 prev_insn_cc0
= this_insn_cc0
;
7025 prev_insn_cc0_mode
= this_insn_cc0_mode
;
7031 /* Store 1 in *WRITES_PTR for those categories of memory ref
7032 that must be invalidated when the expression WRITTEN is stored in.
7033 If WRITTEN is null, say everything must be invalidated. */
7036 note_mem_written (written
, writes_ptr
)
7038 struct write_data
*writes_ptr
;
7040 static struct write_data everything
= {0, 1, 1, 1};
7043 *writes_ptr
= everything
;
7044 else if (GET_CODE (written
) == MEM
)
7046 /* Pushing or popping the stack invalidates just the stack pointer. */
7047 rtx addr
= XEXP (written
, 0);
7048 if ((GET_CODE (addr
) == PRE_DEC
|| GET_CODE (addr
) == PRE_INC
7049 || GET_CODE (addr
) == POST_DEC
|| GET_CODE (addr
) == POST_INC
)
7050 && GET_CODE (XEXP (addr
, 0)) == REG
7051 && REGNO (XEXP (addr
, 0)) == STACK_POINTER_REGNUM
)
7056 else if (GET_MODE (written
) == BLKmode
)
7057 *writes_ptr
= everything
;
7058 else if (cse_rtx_addr_varies_p (written
))
7060 /* A varying address that is a sum indicates an array element,
7061 and that's just as good as a structure element
7062 in implying that we need not invalidate scalar variables.
7063 However, we must allow QImode aliasing of scalars, because the
7064 ANSI C standard allows character pointers to alias anything. */
7065 if (! ((MEM_IN_STRUCT_P (written
)
7066 || GET_CODE (XEXP (written
, 0)) == PLUS
)
7067 && GET_MODE (written
) != QImode
))
7068 writes_ptr
->all
= 1;
7069 writes_ptr
->nonscalar
= 1;
7071 writes_ptr
->var
= 1;
7075 /* Perform invalidation on the basis of everything about an insn
7076 except for invalidating the actual places that are SET in it.
7077 This includes the places CLOBBERed, and anything that might
7078 alias with something that is SET or CLOBBERed.
7080 W points to the writes_memory for this insn, a struct write_data
7081 saying which kinds of memory references must be invalidated.
7082 X is the pattern of the insn. */
7085 invalidate_from_clobbers (w
, x
)
7086 struct write_data
*w
;
7089 /* If W->var is not set, W specifies no action.
7090 If W->all is set, this step gets all memory refs
7091 so they can be ignored in the rest of this function. */
7093 invalidate_memory (w
);
7097 if (reg_tick
[STACK_POINTER_REGNUM
] >= 0)
7098 reg_tick
[STACK_POINTER_REGNUM
]++;
7100 /* This should be *very* rare. */
7101 if (TEST_HARD_REG_BIT (hard_regs_in_table
, STACK_POINTER_REGNUM
))
7102 invalidate (stack_pointer_rtx
);
7105 if (GET_CODE (x
) == CLOBBER
)
7107 rtx ref
= XEXP (x
, 0);
7109 && (GET_CODE (ref
) == REG
|| GET_CODE (ref
) == SUBREG
7110 || (GET_CODE (ref
) == MEM
&& ! w
->all
)))
7113 else if (GET_CODE (x
) == PARALLEL
)
7116 for (i
= XVECLEN (x
, 0) - 1; i
>= 0; i
--)
7118 register rtx y
= XVECEXP (x
, 0, i
);
7119 if (GET_CODE (y
) == CLOBBER
)
7121 rtx ref
= XEXP (y
, 0);
7123 &&(GET_CODE (ref
) == REG
|| GET_CODE (ref
) == SUBREG
7124 || (GET_CODE (ref
) == MEM
&& !w
->all
)))
7131 /* Process X, part of the REG_NOTES of an insn. Look at any REG_EQUAL notes
7132 and replace any registers in them with either an equivalent constant
7133 or the canonical form of the register. If we are inside an address,
7134 only do this if the address remains valid.
7136 OBJECT is 0 except when within a MEM in which case it is the MEM.
7138 Return the replacement for X. */
7141 cse_process_notes (x
, object
)
7145 enum rtx_code code
= GET_CODE (x
);
7146 char *fmt
= GET_RTX_FORMAT (code
);
7163 XEXP (x
, 0) = cse_process_notes (XEXP (x
, 0), x
);
7168 if (REG_NOTE_KIND (x
) == REG_EQUAL
)
7169 XEXP (x
, 0) = cse_process_notes (XEXP (x
, 0), NULL_RTX
);
7171 XEXP (x
, 1) = cse_process_notes (XEXP (x
, 1), NULL_RTX
);
7177 rtx
new = cse_process_notes (XEXP (x
, 0), object
);
7178 /* We don't substitute VOIDmode constants into these rtx,
7179 since they would impede folding. */
7180 if (GET_MODE (new) != VOIDmode
)
7181 validate_change (object
, &XEXP (x
, 0), new, 0);
7186 i
= reg_qty
[REGNO (x
)];
7188 /* Return a constant or a constant register. */
7189 if (REGNO_QTY_VALID_P (REGNO (x
))
7190 && qty_const
[i
] != 0
7191 && (CONSTANT_P (qty_const
[i
])
7192 || GET_CODE (qty_const
[i
]) == REG
))
7194 rtx
new = gen_lowpart_if_possible (GET_MODE (x
), qty_const
[i
]);
7199 /* Otherwise, canonicalize this register. */
7200 return canon_reg (x
, NULL_RTX
);
7203 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++)
7205 validate_change (object
, &XEXP (x
, i
),
7206 cse_process_notes (XEXP (x
, i
), object
), 0);
7211 /* Find common subexpressions between the end test of a loop and the beginning
7212 of the loop. LOOP_START is the CODE_LABEL at the start of a loop.
7214 Often we have a loop where an expression in the exit test is used
7215 in the body of the loop. For example "while (*p) *q++ = *p++;".
7216 Because of the way we duplicate the loop exit test in front of the loop,
7217 however, we don't detect that common subexpression. This will be caught
7218 when global cse is implemented, but this is a quite common case.
7220 This function handles the most common cases of these common expressions.
7221 It is called after we have processed the basic block ending with the
7222 NOTE_INSN_LOOP_END note that ends a loop and the previous JUMP_INSN
7223 jumps to a label used only once. */
7226 cse_around_loop (loop_start
)
7231 struct table_elt
*p
;
7233 /* If the jump at the end of the loop doesn't go to the start, we don't
7235 for (insn
= PREV_INSN (loop_start
);
7236 insn
&& (GET_CODE (insn
) == NOTE
&& NOTE_LINE_NUMBER (insn
) >= 0);
7237 insn
= PREV_INSN (insn
))
7241 || GET_CODE (insn
) != NOTE
7242 || NOTE_LINE_NUMBER (insn
) != NOTE_INSN_LOOP_BEG
)
7245 /* If the last insn of the loop (the end test) was an NE comparison,
7246 we will interpret it as an EQ comparison, since we fell through
7247 the loop. Any equivalences resulting from that comparison are
7248 therefore not valid and must be invalidated. */
7249 if (last_jump_equiv_class
)
7250 for (p
= last_jump_equiv_class
->first_same_value
; p
;
7251 p
= p
->next_same_value
)
7252 if (GET_CODE (p
->exp
) == MEM
|| GET_CODE (p
->exp
) == REG
7253 || GET_CODE (p
->exp
) == SUBREG
)
7254 invalidate (p
->exp
);
7256 /* Process insns starting after LOOP_START until we hit a CALL_INSN or
7257 a CODE_LABEL (we could handle a CALL_INSN, but it isn't worth it).
7259 The only thing we do with SET_DEST is invalidate entries, so we
7260 can safely process each SET in order. It is slightly less efficient
7261 to do so, but we only want to handle the most common cases. */
7263 for (insn
= NEXT_INSN (loop_start
);
7264 GET_CODE (insn
) != CALL_INSN
&& GET_CODE (insn
) != CODE_LABEL
7265 && ! (GET_CODE (insn
) == NOTE
7266 && NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_END
);
7267 insn
= NEXT_INSN (insn
))
7269 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i'
7270 && (GET_CODE (PATTERN (insn
)) == SET
7271 || GET_CODE (PATTERN (insn
)) == CLOBBER
))
7272 cse_set_around_loop (PATTERN (insn
), insn
, loop_start
);
7273 else if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i'
7274 && GET_CODE (PATTERN (insn
)) == PARALLEL
)
7275 for (i
= XVECLEN (PATTERN (insn
), 0) - 1; i
>= 0; i
--)
7276 if (GET_CODE (XVECEXP (PATTERN (insn
), 0, i
)) == SET
7277 || GET_CODE (XVECEXP (PATTERN (insn
), 0, i
)) == CLOBBER
)
7278 cse_set_around_loop (XVECEXP (PATTERN (insn
), 0, i
), insn
,
7283 /* Variable used for communications between the next two routines. */
7285 static struct write_data skipped_writes_memory
;
7287 /* Process one SET of an insn that was skipped. We ignore CLOBBERs
7288 since they are done elsewhere. This function is called via note_stores. */
7291 invalidate_skipped_set (dest
, set
)
7295 if (GET_CODE (set
) == CLOBBER
7302 if (GET_CODE (dest
) == MEM
)
7303 note_mem_written (dest
, &skipped_writes_memory
);
7305 if (GET_CODE (dest
) == REG
|| GET_CODE (dest
) == SUBREG
7306 || (! skipped_writes_memory
.all
&& ! cse_rtx_addr_varies_p (dest
)))
7310 /* Invalidate all insns from START up to the end of the function or the
7311 next label. This called when we wish to CSE around a block that is
7312 conditionally executed. */
7315 invalidate_skipped_block (start
)
7320 static struct write_data init
= {0, 0, 0, 0};
7321 static struct write_data everything
= {0, 1, 1, 1};
7323 for (insn
= start
; insn
&& GET_CODE (insn
) != CODE_LABEL
;
7324 insn
= NEXT_INSN (insn
))
7326 if (GET_RTX_CLASS (GET_CODE (insn
)) != 'i')
7329 skipped_writes_memory
= init
;
7331 if (GET_CODE (insn
) == CALL_INSN
)
7333 invalidate_for_call ();
7334 skipped_writes_memory
= everything
;
7337 note_stores (PATTERN (insn
), invalidate_skipped_set
);
7338 invalidate_from_clobbers (&skipped_writes_memory
, PATTERN (insn
));
7342 /* Used for communication between the following two routines; contains a
7343 value to be checked for modification. */
7345 static rtx cse_check_loop_start_value
;
7347 /* If modifying X will modify the value in CSE_CHECK_LOOP_START_VALUE,
7348 indicate that fact by setting CSE_CHECK_LOOP_START_VALUE to 0. */
7351 cse_check_loop_start (x
, set
)
7355 if (cse_check_loop_start_value
== 0
7356 || GET_CODE (x
) == CC0
|| GET_CODE (x
) == PC
)
7359 if ((GET_CODE (x
) == MEM
&& GET_CODE (cse_check_loop_start_value
) == MEM
)
7360 || reg_overlap_mentioned_p (x
, cse_check_loop_start_value
))
7361 cse_check_loop_start_value
= 0;
7364 /* X is a SET or CLOBBER contained in INSN that was found near the start of
7365 a loop that starts with the label at LOOP_START.
7367 If X is a SET, we see if its SET_SRC is currently in our hash table.
7368 If so, we see if it has a value equal to some register used only in the
7369 loop exit code (as marked by jump.c).
7371 If those two conditions are true, we search backwards from the start of
7372 the loop to see if that same value was loaded into a register that still
7373 retains its value at the start of the loop.
7375 If so, we insert an insn after the load to copy the destination of that
7376 load into the equivalent register and (try to) replace our SET_SRC with that
7379 In any event, we invalidate whatever this SET or CLOBBER modifies. */
7382 cse_set_around_loop (x
, insn
, loop_start
)
7388 struct table_elt
*src_elt
;
7389 static struct write_data init
= {0, 0, 0, 0};
7390 struct write_data writes_memory
;
7392 writes_memory
= init
;
7394 /* If this is a SET, see if we can replace SET_SRC, but ignore SETs that
7395 are setting PC or CC0 or whose SET_SRC is already a register. */
7396 if (GET_CODE (x
) == SET
7397 && GET_CODE (SET_DEST (x
)) != PC
&& GET_CODE (SET_DEST (x
)) != CC0
7398 && GET_CODE (SET_SRC (x
)) != REG
)
7400 src_elt
= lookup (SET_SRC (x
),
7401 HASH (SET_SRC (x
), GET_MODE (SET_DEST (x
))),
7402 GET_MODE (SET_DEST (x
)));
7405 for (src_elt
= src_elt
->first_same_value
; src_elt
;
7406 src_elt
= src_elt
->next_same_value
)
7407 if (GET_CODE (src_elt
->exp
) == REG
&& REG_LOOP_TEST_P (src_elt
->exp
)
7408 && COST (src_elt
->exp
) < COST (SET_SRC (x
)))
7412 /* Look for an insn in front of LOOP_START that sets
7413 something in the desired mode to SET_SRC (x) before we hit
7414 a label or CALL_INSN. */
7416 for (p
= prev_nonnote_insn (loop_start
);
7417 p
&& GET_CODE (p
) != CALL_INSN
7418 && GET_CODE (p
) != CODE_LABEL
;
7419 p
= prev_nonnote_insn (p
))
7420 if ((set
= single_set (p
)) != 0
7421 && GET_CODE (SET_DEST (set
)) == REG
7422 && GET_MODE (SET_DEST (set
)) == src_elt
->mode
7423 && rtx_equal_p (SET_SRC (set
), SET_SRC (x
)))
7425 /* We now have to ensure that nothing between P
7426 and LOOP_START modified anything referenced in
7427 SET_SRC (x). We know that nothing within the loop
7428 can modify it, or we would have invalidated it in
7432 cse_check_loop_start_value
= SET_SRC (x
);
7433 for (q
= p
; q
!= loop_start
; q
= NEXT_INSN (q
))
7434 if (GET_RTX_CLASS (GET_CODE (q
)) == 'i')
7435 note_stores (PATTERN (q
), cse_check_loop_start
);
7437 /* If nothing was changed and we can replace our
7438 SET_SRC, add an insn after P to copy its destination
7439 to what we will be replacing SET_SRC with. */
7440 if (cse_check_loop_start_value
7441 && validate_change (insn
, &SET_SRC (x
),
7443 emit_insn_after (gen_move_insn (src_elt
->exp
,
7451 /* Now invalidate anything modified by X. */
7452 note_mem_written (SET_DEST (x
), &writes_memory
);
7454 if (writes_memory
.var
)
7455 invalidate_memory (&writes_memory
);
7457 /* See comment on similar code in cse_insn for explanation of these tests. */
7458 if (GET_CODE (SET_DEST (x
)) == REG
|| GET_CODE (SET_DEST (x
)) == SUBREG
7459 || (GET_CODE (SET_DEST (x
)) == MEM
&& ! writes_memory
.all
7460 && ! cse_rtx_addr_varies_p (SET_DEST (x
))))
7461 invalidate (SET_DEST (x
));
7464 /* Find the end of INSN's basic block and return its range,
7465 the total number of SETs in all the insns of the block, the last insn of the
7466 block, and the branch path.
7468 The branch path indicates which branches should be followed. If a non-zero
7469 path size is specified, the block should be rescanned and a different set
7470 of branches will be taken. The branch path is only used if
7471 FLAG_CSE_FOLLOW_JUMPS or FLAG_CSE_SKIP_BLOCKS is non-zero.
7473 DATA is a pointer to a struct cse_basic_block_data, defined below, that is
7474 used to describe the block. It is filled in with the information about
7475 the current block. The incoming structure's branch path, if any, is used
7476 to construct the output branch path. */
7479 cse_end_of_basic_block (insn
, data
, follow_jumps
, after_loop
, skip_blocks
)
7481 struct cse_basic_block_data
*data
;
7488 int low_cuid
= INSN_CUID (insn
), high_cuid
= INSN_CUID (insn
);
7489 rtx next
= GET_RTX_CLASS (GET_CODE (insn
)) == 'i' ? insn
: next_real_insn (insn
);
7490 int path_size
= data
->path_size
;
7494 /* Update the previous branch path, if any. If the last branch was
7495 previously TAKEN, mark it NOT_TAKEN. If it was previously NOT_TAKEN,
7496 shorten the path by one and look at the previous branch. We know that
7497 at least one branch must have been taken if PATH_SIZE is non-zero. */
7498 while (path_size
> 0)
7500 if (data
->path
[path_size
- 1].status
!= NOT_TAKEN
)
7502 data
->path
[path_size
- 1].status
= NOT_TAKEN
;
7509 /* Scan to end of this basic block. */
7510 while (p
&& GET_CODE (p
) != CODE_LABEL
)
7512 /* Don't cse out the end of a loop. This makes a difference
7513 only for the unusual loops that always execute at least once;
7514 all other loops have labels there so we will stop in any case.
7515 Cse'ing out the end of the loop is dangerous because it
7516 might cause an invariant expression inside the loop
7517 to be reused after the end of the loop. This would make it
7518 hard to move the expression out of the loop in loop.c,
7519 especially if it is one of several equivalent expressions
7520 and loop.c would like to eliminate it.
7522 If we are running after loop.c has finished, we can ignore
7523 the NOTE_INSN_LOOP_END. */
7525 if (! after_loop
&& GET_CODE (p
) == NOTE
7526 && NOTE_LINE_NUMBER (p
) == NOTE_INSN_LOOP_END
)
7529 /* Don't cse over a call to setjmp; on some machines (eg vax)
7530 the regs restored by the longjmp come from
7531 a later time than the setjmp. */
7532 if (GET_CODE (p
) == NOTE
7533 && NOTE_LINE_NUMBER (p
) == NOTE_INSN_SETJMP
)
7536 /* A PARALLEL can have lots of SETs in it,
7537 especially if it is really an ASM_OPERANDS. */
7538 if (GET_RTX_CLASS (GET_CODE (p
)) == 'i'
7539 && GET_CODE (PATTERN (p
)) == PARALLEL
)
7540 nsets
+= XVECLEN (PATTERN (p
), 0);
7541 else if (GET_CODE (p
) != NOTE
)
7544 /* Ignore insns made by CSE; they cannot affect the boundaries of
7547 if (INSN_UID (p
) <= max_uid
&& INSN_CUID (p
) > high_cuid
)
7548 high_cuid
= INSN_CUID (p
);
7549 if (INSN_UID (p
) <= max_uid
&& INSN_CUID (p
) < low_cuid
)
7550 low_cuid
= INSN_CUID (p
);
7552 /* See if this insn is in our branch path. If it is and we are to
7554 if (path_entry
< path_size
&& data
->path
[path_entry
].branch
== p
)
7556 if (data
->path
[path_entry
].status
!= NOT_TAKEN
)
7559 /* Point to next entry in path, if any. */
7563 /* If this is a conditional jump, we can follow it if -fcse-follow-jumps
7564 was specified, we haven't reached our maximum path length, there are
7565 insns following the target of the jump, this is the only use of the
7566 jump label, and the target label is preceded by a BARRIER.
7568 Alternatively, we can follow the jump if it branches around a
7569 block of code and there are no other branches into the block.
7570 In this case invalidate_skipped_block will be called to invalidate any
7571 registers set in the block when following the jump. */
7573 else if ((follow_jumps
|| skip_blocks
) && path_size
< PATHLENGTH
- 1
7574 && GET_CODE (p
) == JUMP_INSN
7575 && GET_CODE (PATTERN (p
)) == SET
7576 && GET_CODE (SET_SRC (PATTERN (p
))) == IF_THEN_ELSE
7577 && LABEL_NUSES (JUMP_LABEL (p
)) == 1
7578 && NEXT_INSN (JUMP_LABEL (p
)) != 0)
7580 for (q
= PREV_INSN (JUMP_LABEL (p
)); q
; q
= PREV_INSN (q
))
7581 if ((GET_CODE (q
) != NOTE
7582 || NOTE_LINE_NUMBER (q
) == NOTE_INSN_LOOP_END
7583 || NOTE_LINE_NUMBER (q
) == NOTE_INSN_SETJMP
)
7584 && (GET_CODE (q
) != CODE_LABEL
|| LABEL_NUSES (q
) != 0))
7587 /* If we ran into a BARRIER, this code is an extension of the
7588 basic block when the branch is taken. */
7589 if (follow_jumps
&& q
!= 0 && GET_CODE (q
) == BARRIER
)
7591 /* Don't allow ourself to keep walking around an
7592 always-executed loop. */
7593 if (next_real_insn (q
) == next
)
7599 /* Similarly, don't put a branch in our path more than once. */
7600 for (i
= 0; i
< path_entry
; i
++)
7601 if (data
->path
[i
].branch
== p
)
7604 if (i
!= path_entry
)
7607 data
->path
[path_entry
].branch
= p
;
7608 data
->path
[path_entry
++].status
= TAKEN
;
7610 /* This branch now ends our path. It was possible that we
7611 didn't see this branch the last time around (when the
7612 insn in front of the target was a JUMP_INSN that was
7613 turned into a no-op). */
7614 path_size
= path_entry
;
7617 /* Mark block so we won't scan it again later. */
7618 PUT_MODE (NEXT_INSN (p
), QImode
);
7620 /* Detect a branch around a block of code. */
7621 else if (skip_blocks
&& q
!= 0 && GET_CODE (q
) != CODE_LABEL
)
7625 if (next_real_insn (q
) == next
)
7631 for (i
= 0; i
< path_entry
; i
++)
7632 if (data
->path
[i
].branch
== p
)
7635 if (i
!= path_entry
)
7638 /* This is no_labels_between_p (p, q) with an added check for
7639 reaching the end of a function (in case Q precedes P). */
7640 for (tmp
= NEXT_INSN (p
); tmp
&& tmp
!= q
; tmp
= NEXT_INSN (tmp
))
7641 if (GET_CODE (tmp
) == CODE_LABEL
)
7646 data
->path
[path_entry
].branch
= p
;
7647 data
->path
[path_entry
++].status
= AROUND
;
7649 path_size
= path_entry
;
7652 /* Mark block so we won't scan it again later. */
7653 PUT_MODE (NEXT_INSN (p
), QImode
);
7660 data
->low_cuid
= low_cuid
;
7661 data
->high_cuid
= high_cuid
;
7662 data
->nsets
= nsets
;
7665 /* If all jumps in the path are not taken, set our path length to zero
7666 so a rescan won't be done. */
7667 for (i
= path_size
- 1; i
>= 0; i
--)
7668 if (data
->path
[i
].status
!= NOT_TAKEN
)
7672 data
->path_size
= 0;
7674 data
->path_size
= path_size
;
7676 /* End the current branch path. */
7677 data
->path
[path_size
].branch
= 0;
7680 /* Perform cse on the instructions of a function.
7681 F is the first instruction.
7682 NREGS is one plus the highest pseudo-reg number used in the instruction.
7684 AFTER_LOOP is 1 if this is the cse call done after loop optimization
7685 (only if -frerun-cse-after-loop).
7687 Returns 1 if jump_optimize should be redone due to simplifications
7688 in conditional jump instructions. */
7691 cse_main (f
, nregs
, after_loop
, file
)
7697 struct cse_basic_block_data val
;
7698 register rtx insn
= f
;
7701 cse_jumps_altered
= 0;
7702 constant_pool_entries_cost
= 0;
7709 all_minus_one
= (int *) alloca (nregs
* sizeof (int));
7710 consec_ints
= (int *) alloca (nregs
* sizeof (int));
7712 for (i
= 0; i
< nregs
; i
++)
7714 all_minus_one
[i
] = -1;
7718 reg_next_eqv
= (int *) alloca (nregs
* sizeof (int));
7719 reg_prev_eqv
= (int *) alloca (nregs
* sizeof (int));
7720 reg_qty
= (int *) alloca (nregs
* sizeof (int));
7721 reg_in_table
= (int *) alloca (nregs
* sizeof (int));
7722 reg_tick
= (int *) alloca (nregs
* sizeof (int));
7724 /* Discard all the free elements of the previous function
7725 since they are allocated in the temporarily obstack. */
7726 bzero (table
, sizeof table
);
7727 free_element_chain
= 0;
7728 n_elements_made
= 0;
7730 /* Find the largest uid. */
7732 max_uid
= get_max_uid ();
7733 uid_cuid
= (int *) alloca ((max_uid
+ 1) * sizeof (int));
7734 bzero (uid_cuid
, (max_uid
+ 1) * sizeof (int));
7736 /* Compute the mapping from uids to cuids.
7737 CUIDs are numbers assigned to insns, like uids,
7738 except that cuids increase monotonically through the code.
7739 Don't assign cuids to line-number NOTEs, so that the distance in cuids
7740 between two insns is not affected by -g. */
7742 for (insn
= f
, i
= 0; insn
; insn
= NEXT_INSN (insn
))
7744 if (GET_CODE (insn
) != NOTE
7745 || NOTE_LINE_NUMBER (insn
) < 0)
7746 INSN_CUID (insn
) = ++i
;
7748 /* Give a line number note the same cuid as preceding insn. */
7749 INSN_CUID (insn
) = i
;
7752 /* Initialize which registers are clobbered by calls. */
7754 CLEAR_HARD_REG_SET (regs_invalidated_by_call
);
7756 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
7757 if ((call_used_regs
[i
]
7758 /* Used to check !fixed_regs[i] here, but that isn't safe;
7759 fixed regs are still call-clobbered, and sched can get
7760 confused if they can "live across calls".
7762 The frame pointer is always preserved across calls. The arg
7763 pointer is if it is fixed. The stack pointer usually is, unless
7764 RETURN_POPS_ARGS, in which case an explicit CLOBBER
7765 will be present. If we are generating PIC code, the PIC offset
7766 table register is preserved across calls. */
7768 && i
!= STACK_POINTER_REGNUM
7769 && i
!= FRAME_POINTER_REGNUM
7770 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
7771 && ! (i
== ARG_POINTER_REGNUM
&& fixed_regs
[i
])
7773 #ifdef PIC_OFFSET_TABLE_REGNUM
7774 && ! (i
== PIC_OFFSET_TABLE_REGNUM
&& flag_pic
)
7778 SET_HARD_REG_BIT (regs_invalidated_by_call
, i
);
7780 /* Loop over basic blocks.
7781 Compute the maximum number of qty's needed for each basic block
7782 (which is 2 for each SET). */
7786 cse_end_of_basic_block (insn
, &val
, flag_cse_follow_jumps
, after_loop
,
7787 flag_cse_skip_blocks
);
7789 /* If this basic block was already processed or has no sets, skip it. */
7790 if (val
.nsets
== 0 || GET_MODE (insn
) == QImode
)
7792 PUT_MODE (insn
, VOIDmode
);
7793 insn
= (val
.last
? NEXT_INSN (val
.last
) : 0);
7798 cse_basic_block_start
= val
.low_cuid
;
7799 cse_basic_block_end
= val
.high_cuid
;
7800 max_qty
= val
.nsets
* 2;
7803 fprintf (file
, ";; Processing block from %d to %d, %d sets.\n",
7804 INSN_UID (insn
), val
.last
? INSN_UID (val
.last
) : 0,
7807 /* Make MAX_QTY bigger to give us room to optimize
7808 past the end of this basic block, if that should prove useful. */
7814 /* If this basic block is being extended by following certain jumps,
7815 (see `cse_end_of_basic_block'), we reprocess the code from the start.
7816 Otherwise, we start after this basic block. */
7817 if (val
.path_size
> 0)
7818 cse_basic_block (insn
, val
.last
, val
.path
, 0);
7821 int old_cse_jumps_altered
= cse_jumps_altered
;
7824 /* When cse changes a conditional jump to an unconditional
7825 jump, we want to reprocess the block, since it will give
7826 us a new branch path to investigate. */
7827 cse_jumps_altered
= 0;
7828 temp
= cse_basic_block (insn
, val
.last
, val
.path
, ! after_loop
);
7829 if (cse_jumps_altered
== 0
7830 || (flag_cse_follow_jumps
== 0 && flag_cse_skip_blocks
== 0))
7833 cse_jumps_altered
|= old_cse_jumps_altered
;
7841 /* Tell refers_to_mem_p that qty_const info is not available. */
7844 if (max_elements_made
< n_elements_made
)
7845 max_elements_made
= n_elements_made
;
7847 return cse_jumps_altered
;
7850 /* Process a single basic block. FROM and TO and the limits of the basic
7851 block. NEXT_BRANCH points to the branch path when following jumps or
7852 a null path when not following jumps.
7854 AROUND_LOOP is non-zero if we are to try to cse around to the start of a
7855 loop. This is true when we are being called for the last time on a
7856 block and this CSE pass is before loop.c. */
7859 cse_basic_block (from
, to
, next_branch
, around_loop
)
7860 register rtx from
, to
;
7861 struct branch_path
*next_branch
;
7866 int in_libcall_block
= 0;
7868 /* Each of these arrays is undefined before max_reg, so only allocate
7869 the space actually needed and adjust the start below. */
7871 qty_first_reg
= (int *) alloca ((max_qty
- max_reg
) * sizeof (int));
7872 qty_last_reg
= (int *) alloca ((max_qty
- max_reg
) * sizeof (int));
7873 qty_mode
= (enum machine_mode
*) alloca ((max_qty
- max_reg
) * sizeof (enum machine_mode
));
7874 qty_const
= (rtx
*) alloca ((max_qty
- max_reg
) * sizeof (rtx
));
7875 qty_const_insn
= (rtx
*) alloca ((max_qty
- max_reg
) * sizeof (rtx
));
7877 = (enum rtx_code
*) alloca ((max_qty
- max_reg
) * sizeof (enum rtx_code
));
7878 qty_comparison_qty
= (int *) alloca ((max_qty
- max_reg
) * sizeof (int));
7879 qty_comparison_const
= (rtx
*) alloca ((max_qty
- max_reg
) * sizeof (rtx
));
7881 qty_first_reg
-= max_reg
;
7882 qty_last_reg
-= max_reg
;
7883 qty_mode
-= max_reg
;
7884 qty_const
-= max_reg
;
7885 qty_const_insn
-= max_reg
;
7886 qty_comparison_code
-= max_reg
;
7887 qty_comparison_qty
-= max_reg
;
7888 qty_comparison_const
-= max_reg
;
7892 /* TO might be a label. If so, protect it from being deleted. */
7893 if (to
!= 0 && GET_CODE (to
) == CODE_LABEL
)
7896 for (insn
= from
; insn
!= to
; insn
= NEXT_INSN (insn
))
7898 register enum rtx_code code
;
7900 /* See if this is a branch that is part of the path. If so, and it is
7901 to be taken, do so. */
7902 if (next_branch
->branch
== insn
)
7904 enum taken status
= next_branch
++->status
;
7905 if (status
!= NOT_TAKEN
)
7907 if (status
== TAKEN
)
7908 record_jump_equiv (insn
, 1);
7910 invalidate_skipped_block (NEXT_INSN (insn
));
7912 /* Set the last insn as the jump insn; it doesn't affect cc0.
7913 Then follow this branch. */
7918 insn
= JUMP_LABEL (insn
);
7923 code
= GET_CODE (insn
);
7924 if (GET_MODE (insn
) == QImode
)
7925 PUT_MODE (insn
, VOIDmode
);
7927 if (GET_RTX_CLASS (code
) == 'i')
7929 /* Process notes first so we have all notes in canonical forms when
7930 looking for duplicate operations. */
7932 if (REG_NOTES (insn
))
7933 REG_NOTES (insn
) = cse_process_notes (REG_NOTES (insn
), NULL_RTX
);
7935 /* Track when we are inside in LIBCALL block. Inside such a block,
7936 we do not want to record destinations. The last insn of a
7937 LIBCALL block is not considered to be part of the block, since
7938 its destination is the result of the block and hence should be
7941 if (find_reg_note (insn
, REG_LIBCALL
, NULL_RTX
))
7942 in_libcall_block
= 1;
7943 else if (find_reg_note (insn
, REG_RETVAL
, NULL_RTX
))
7944 in_libcall_block
= 0;
7946 cse_insn (insn
, in_libcall_block
);
7949 /* If INSN is now an unconditional jump, skip to the end of our
7950 basic block by pretending that we just did the last insn in the
7951 basic block. If we are jumping to the end of our block, show
7952 that we can have one usage of TO. */
7954 if (simplejump_p (insn
))
7959 if (JUMP_LABEL (insn
) == to
)
7962 /* Maybe TO was deleted because the jump is unconditional.
7963 If so, there is nothing left in this basic block. */
7964 /* ??? Perhaps it would be smarter to set TO
7965 to whatever follows this insn,
7966 and pretend the basic block had always ended here. */
7967 if (INSN_DELETED_P (to
))
7970 insn
= PREV_INSN (to
);
7973 /* See if it is ok to keep on going past the label
7974 which used to end our basic block. Remember that we incremented
7975 the count of that label, so we decrement it here. If we made
7976 a jump unconditional, TO_USAGE will be one; in that case, we don't
7977 want to count the use in that jump. */
7979 if (to
!= 0 && NEXT_INSN (insn
) == to
7980 && GET_CODE (to
) == CODE_LABEL
&& --LABEL_NUSES (to
) == to_usage
)
7982 struct cse_basic_block_data val
;
7984 insn
= NEXT_INSN (to
);
7986 if (LABEL_NUSES (to
) == 0)
7989 /* Find the end of the following block. Note that we won't be
7990 following branches in this case. If TO was the last insn
7991 in the function, we are done. Similarly, if we deleted the
7992 insn after TO, it must have been because it was preceded by
7993 a BARRIER. In that case, we are done with this block because it
7994 has no continuation. */
7996 if (insn
== 0 || INSN_DELETED_P (insn
))
8001 cse_end_of_basic_block (insn
, &val
, 0, 0, 0);
8003 /* If the tables we allocated have enough space left
8004 to handle all the SETs in the next basic block,
8005 continue through it. Otherwise, return,
8006 and that block will be scanned individually. */
8007 if (val
.nsets
* 2 + next_qty
> max_qty
)
8010 cse_basic_block_start
= val
.low_cuid
;
8011 cse_basic_block_end
= val
.high_cuid
;
8014 /* Prevent TO from being deleted if it is a label. */
8015 if (to
!= 0 && GET_CODE (to
) == CODE_LABEL
)
8018 /* Back up so we process the first insn in the extension. */
8019 insn
= PREV_INSN (insn
);
8023 if (next_qty
> max_qty
)
8026 /* If we are running before loop.c, we stopped on a NOTE_INSN_LOOP_END, and
8027 the previous insn is the only insn that branches to the head of a loop,
8028 we can cse into the loop. Don't do this if we changed the jump
8029 structure of a loop unless we aren't going to be following jumps. */
8031 if ((cse_jumps_altered
== 0
8032 || (flag_cse_follow_jumps
== 0 && flag_cse_skip_blocks
== 0))
8033 && around_loop
&& to
!= 0
8034 && GET_CODE (to
) == NOTE
&& NOTE_LINE_NUMBER (to
) == NOTE_INSN_LOOP_END
8035 && GET_CODE (PREV_INSN (to
)) == JUMP_INSN
8036 && JUMP_LABEL (PREV_INSN (to
)) != 0
8037 && LABEL_NUSES (JUMP_LABEL (PREV_INSN (to
))) == 1)
8038 cse_around_loop (JUMP_LABEL (PREV_INSN (to
)));
8040 return to
? NEXT_INSN (to
) : 0;
8043 /* Count the number of times registers are used (not set) in X.
8044 COUNTS is an array in which we accumulate the count, INCR is how much
8045 we count each register usage. */
8048 count_reg_usage (x
, counts
, incr
)
8053 enum rtx_code code
= GET_CODE (x
);
8060 counts
[REGNO (x
)] += incr
;
8074 /* Unless we are setting a REG, count everything in SET_DEST. */
8075 if (GET_CODE (SET_DEST (x
)) != REG
)
8076 count_reg_usage (SET_DEST (x
), counts
, incr
);
8077 count_reg_usage (SET_SRC (x
), counts
, incr
);
8083 count_reg_usage (PATTERN (x
), counts
, incr
);
8085 /* Things used in a REG_EQUAL note aren't dead since loop may try to
8089 count_reg_usage (REG_NOTES (x
), counts
, incr
);
8094 if (REG_NOTE_KIND (x
) == REG_EQUAL
)
8095 count_reg_usage (XEXP (x
, 0), counts
, incr
);
8097 count_reg_usage (XEXP (x
, 1), counts
, incr
);
8101 fmt
= GET_RTX_FORMAT (code
);
8102 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
8105 count_reg_usage (XEXP (x
, i
), counts
, incr
);
8106 else if (fmt
[i
] == 'E')
8107 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
8108 count_reg_usage (XVECEXP (x
, i
, j
), counts
, incr
);
8112 /* Scan all the insns and delete any that are dead; i.e., they store a register
8113 that is never used or they copy a register to itself.
8115 This is used to remove insns made obviously dead by cse. It improves the
8116 heuristics in loop since it won't try to move dead invariants out of loops
8117 or make givs for dead quantities. The remaining passes of the compilation
8118 are also sped up. */
8121 delete_dead_from_cse (insns
, nreg
)
8125 int *counts
= (int *) alloca (nreg
* sizeof (int));
8131 /* First count the number of times each register is used. */
8132 bzero (counts
, sizeof (int) * nreg
);
8133 for (insn
= next_real_insn (insns
); insn
; insn
= next_real_insn (insn
))
8134 count_reg_usage (insn
, counts
, 1);
8136 /* Go from the last insn to the first and delete insns that only set unused
8137 registers or copy a register to itself. As we delete an insn, remove
8138 usage counts for registers it uses. */
8139 for (insn
= prev_real_insn (get_last_insn ()); insn
; insn
= prev
)
8143 prev
= prev_real_insn (insn
);
8145 /* Don't delete any insns that are part of a libcall block.
8146 Flow or loop might get confused if we did that. Remember
8147 that we are scanning backwards. */
8148 if (find_reg_note (insn
, REG_RETVAL
, NULL_RTX
))
8153 else if (GET_CODE (PATTERN (insn
)) == SET
)
8155 if (GET_CODE (SET_DEST (PATTERN (insn
))) == REG
8156 && SET_DEST (PATTERN (insn
)) == SET_SRC (PATTERN (insn
)))
8160 else if (GET_CODE (SET_DEST (PATTERN (insn
))) == CC0
8161 && ! side_effects_p (SET_SRC (PATTERN (insn
)))
8162 && ((tem
= next_nonnote_insn (insn
)) == 0
8163 || GET_RTX_CLASS (GET_CODE (tem
)) != 'i'
8164 || ! reg_referenced_p (cc0_rtx
, PATTERN (tem
))))
8167 else if (GET_CODE (SET_DEST (PATTERN (insn
))) != REG
8168 || REGNO (SET_DEST (PATTERN (insn
))) < FIRST_PSEUDO_REGISTER
8169 || counts
[REGNO (SET_DEST (PATTERN (insn
)))] != 0
8170 || side_effects_p (SET_SRC (PATTERN (insn
))))
8173 else if (GET_CODE (PATTERN (insn
)) == PARALLEL
)
8174 for (i
= XVECLEN (PATTERN (insn
), 0) - 1; i
>= 0; i
--)
8176 rtx elt
= XVECEXP (PATTERN (insn
), 0, i
);
8178 if (GET_CODE (elt
) == SET
)
8180 if (GET_CODE (SET_DEST (elt
)) == REG
8181 && SET_DEST (elt
) == SET_SRC (elt
))
8185 else if (GET_CODE (SET_DEST (elt
)) == CC0
8186 && ! side_effects_p (SET_SRC (elt
))
8187 && ((tem
= next_nonnote_insn (insn
)) == 0
8188 || GET_RTX_CLASS (GET_CODE (tem
)) != 'i'
8189 || ! reg_referenced_p (cc0_rtx
, PATTERN (tem
))))
8192 else if (GET_CODE (SET_DEST (elt
)) != REG
8193 || REGNO (SET_DEST (elt
)) < FIRST_PSEUDO_REGISTER
8194 || counts
[REGNO (SET_DEST (elt
))] != 0
8195 || side_effects_p (SET_SRC (elt
)))
8198 else if (GET_CODE (elt
) != CLOBBER
&& GET_CODE (elt
) != USE
)
8204 /* If this is a dead insn, delete it and show registers in it aren't
8209 count_reg_usage (insn
, counts
, -1);
8213 if (find_reg_note (insn
, REG_LIBCALL
, NULL_RTX
))