]> gcc.gnu.org Git - gcc.git/blob - gcc/cse.c
(simplify_plus_minus): Remove last change.
[gcc.git] / gcc / cse.c
1 /* Common subexpression elimination for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1992 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21 #include "config.h"
22 #include "rtl.h"
23 #include "regs.h"
24 #include "hard-reg-set.h"
25 #include "flags.h"
26 #include "real.h"
27 #include "insn-config.h"
28 #include "recog.h"
29
30 #include <stdio.h>
31 #include <setjmp.h>
32
33 /* The basic idea of common subexpression elimination is to go
34 through the code, keeping a record of expressions that would
35 have the same value at the current scan point, and replacing
36 expressions encountered with the cheapest equivalent expression.
37
38 It is too complicated to keep track of the different possibilities
39 when control paths merge; so, at each label, we forget all that is
40 known and start fresh. This can be described as processing each
41 basic block separately. Note, however, that these are not quite
42 the same as the basic blocks found by a later pass and used for
43 data flow analysis and register packing. We do not need to start fresh
44 after a conditional jump instruction if there is no label there.
45
46 We use two data structures to record the equivalent expressions:
47 a hash table for most expressions, and several vectors together
48 with "quantity numbers" to record equivalent (pseudo) registers.
49
50 The use of the special data structure for registers is desirable
51 because it is faster. It is possible because registers references
52 contain a fairly small number, the register number, taken from
53 a contiguously allocated series, and two register references are
54 identical if they have the same number. General expressions
55 do not have any such thing, so the only way to retrieve the
56 information recorded on an expression other than a register
57 is to keep it in a hash table.
58
59 Registers and "quantity numbers":
60
61 At the start of each basic block, all of the (hardware and pseudo)
62 registers used in the function are given distinct quantity
63 numbers to indicate their contents. During scan, when the code
64 copies one register into another, we copy the quantity number.
65 When a register is loaded in any other way, we allocate a new
66 quantity number to describe the value generated by this operation.
67 `reg_qty' records what quantity a register is currently thought
68 of as containing.
69
70 All real quantity numbers are greater than or equal to `max_reg'.
71 If register N has not been assigned a quantity, reg_qty[N] will equal N.
72
73 Quantity numbers below `max_reg' do not exist and none of the `qty_...'
74 variables should be referenced with an index below `max_reg'.
75
76 We also maintain a bidirectional chain of registers for each
77 quantity number. `qty_first_reg', `qty_last_reg',
78 `reg_next_eqv' and `reg_prev_eqv' hold these chains.
79
80 The first register in a chain is the one whose lifespan is least local.
81 Among equals, it is the one that was seen first.
82 We replace any equivalent register with that one.
83
84 If two registers have the same quantity number, it must be true that
85 REG expressions with `qty_mode' must be in the hash table for both
86 registers and must be in the same class.
87
88 The converse is not true. Since hard registers may be referenced in
89 any mode, two REG expressions might be equivalent in the hash table
90 but not have the same quantity number if the quantity number of one
91 of the registers is not the same mode as those expressions.
92
93 Constants and quantity numbers
94
95 When a quantity has a known constant value, that value is stored
96 in the appropriate element of qty_const. This is in addition to
97 putting the constant in the hash table as is usual for non-regs.
98
99 Whether a reg or a constant is preferred is determined by the configuration
100 macro CONST_COSTS and will often depend on the constant value. In any
101 event, expressions containing constants can be simplified, by fold_rtx.
102
103 When a quantity has a known nearly constant value (such as an address
104 of a stack slot), that value is stored in the appropriate element
105 of qty_const.
106
107 Integer constants don't have a machine mode. However, cse
108 determines the intended machine mode from the destination
109 of the instruction that moves the constant. The machine mode
110 is recorded in the hash table along with the actual RTL
111 constant expression so that different modes are kept separate.
112
113 Other expressions:
114
115 To record known equivalences among expressions in general
116 we use a hash table called `table'. It has a fixed number of buckets
117 that contain chains of `struct table_elt' elements for expressions.
118 These chains connect the elements whose expressions have the same
119 hash codes.
120
121 Other chains through the same elements connect the elements which
122 currently have equivalent values.
123
124 Register references in an expression are canonicalized before hashing
125 the expression. This is done using `reg_qty' and `qty_first_reg'.
126 The hash code of a register reference is computed using the quantity
127 number, not the register number.
128
129 When the value of an expression changes, it is necessary to remove from the
130 hash table not just that expression but all expressions whose values
131 could be different as a result.
132
133 1. If the value changing is in memory, except in special cases
134 ANYTHING referring to memory could be changed. That is because
135 nobody knows where a pointer does not point.
136 The function `invalidate_memory' removes what is necessary.
137
138 The special cases are when the address is constant or is
139 a constant plus a fixed register such as the frame pointer
140 or a static chain pointer. When such addresses are stored in,
141 we can tell exactly which other such addresses must be invalidated
142 due to overlap. `invalidate' does this.
143 All expressions that refer to non-constant
144 memory addresses are also invalidated. `invalidate_memory' does this.
145
146 2. If the value changing is a register, all expressions
147 containing references to that register, and only those,
148 must be removed.
149
150 Because searching the entire hash table for expressions that contain
151 a register is very slow, we try to figure out when it isn't necessary.
152 Precisely, this is necessary only when expressions have been
153 entered in the hash table using this register, and then the value has
154 changed, and then another expression wants to be added to refer to
155 the register's new value. This sequence of circumstances is rare
156 within any one basic block.
157
158 The vectors `reg_tick' and `reg_in_table' are used to detect this case.
159 reg_tick[i] is incremented whenever a value is stored in register i.
160 reg_in_table[i] holds -1 if no references to register i have been
161 entered in the table; otherwise, it contains the value reg_tick[i] had
162 when the references were entered. If we want to enter a reference
163 and reg_in_table[i] != reg_tick[i], we must scan and remove old references.
164 Until we want to enter a new entry, the mere fact that the two vectors
165 don't match makes the entries be ignored if anyone tries to match them.
166
167 Registers themselves are entered in the hash table as well as in
168 the equivalent-register chains. However, the vectors `reg_tick'
169 and `reg_in_table' do not apply to expressions which are simple
170 register references. These expressions are removed from the table
171 immediately when they become invalid, and this can be done even if
172 we do not immediately search for all the expressions that refer to
173 the register.
174
175 A CLOBBER rtx in an instruction invalidates its operand for further
176 reuse. A CLOBBER or SET rtx whose operand is a MEM:BLK
177 invalidates everything that resides in memory.
178
179 Related expressions:
180
181 Constant expressions that differ only by an additive integer
182 are called related. When a constant expression is put in
183 the table, the related expression with no constant term
184 is also entered. These are made to point at each other
185 so that it is possible to find out if there exists any
186 register equivalent to an expression related to a given expression. */
187
188 /* One plus largest register number used in this function. */
189
190 static int max_reg;
191
192 /* Length of vectors indexed by quantity number.
193 We know in advance we will not need a quantity number this big. */
194
195 static int max_qty;
196
197 /* Next quantity number to be allocated.
198 This is 1 + the largest number needed so far. */
199
200 static int next_qty;
201
202 /* Indexed by quantity number, gives the first (or last) (pseudo) register
203 in the chain of registers that currently contain this quantity. */
204
205 static int *qty_first_reg;
206 static int *qty_last_reg;
207
208 /* Index by quantity number, gives the mode of the quantity. */
209
210 static enum machine_mode *qty_mode;
211
212 /* Indexed by quantity number, gives the rtx of the constant value of the
213 quantity, or zero if it does not have a known value.
214 A sum of the frame pointer (or arg pointer) plus a constant
215 can also be entered here. */
216
217 static rtx *qty_const;
218
219 /* Indexed by qty number, gives the insn that stored the constant value
220 recorded in `qty_const'. */
221
222 static rtx *qty_const_insn;
223
224 /* The next three variables are used to track when a comparison between a
225 quantity and some constant or register has been passed. In that case, we
226 know the results of the comparison in case we see it again. These variables
227 record a comparison that is known to be true. */
228
229 /* Indexed by qty number, gives the rtx code of a comparison with a known
230 result involving this quantity. If none, it is UNKNOWN. */
231 static enum rtx_code *qty_comparison_code;
232
233 /* Indexed by qty number, gives the constant being compared against in a
234 comparison of known result. If no such comparison, it is undefined.
235 If the comparison is not with a constant, it is zero. */
236
237 static rtx *qty_comparison_const;
238
239 /* Indexed by qty number, gives the quantity being compared against in a
240 comparison of known result. If no such comparison, if it undefined.
241 If the comparison is not with a register, it is -1. */
242
243 static int *qty_comparison_qty;
244
245 #ifdef HAVE_cc0
246 /* For machines that have a CC0, we do not record its value in the hash
247 table since its use is guaranteed to be the insn immediately following
248 its definition and any other insn is presumed to invalidate it.
249
250 Instead, we store below the value last assigned to CC0. If it should
251 happen to be a constant, it is stored in preference to the actual
252 assigned value. In case it is a constant, we store the mode in which
253 the constant should be interpreted. */
254
255 static rtx prev_insn_cc0;
256 static enum machine_mode prev_insn_cc0_mode;
257 #endif
258
259 /* Previous actual insn. 0 if at first insn of basic block. */
260
261 static rtx prev_insn;
262
263 /* Insn being scanned. */
264
265 static rtx this_insn;
266
267 /* Index by (pseudo) register number, gives the quantity number
268 of the register's current contents. */
269
270 static int *reg_qty;
271
272 /* Index by (pseudo) register number, gives the number of the next (or
273 previous) (pseudo) register in the chain of registers sharing the same
274 value.
275
276 Or -1 if this register is at the end of the chain.
277
278 If reg_qty[N] == N, reg_next_eqv[N] is undefined. */
279
280 static int *reg_next_eqv;
281 static int *reg_prev_eqv;
282
283 /* Index by (pseudo) register number, gives the number of times
284 that register has been altered in the current basic block. */
285
286 static int *reg_tick;
287
288 /* Index by (pseudo) register number, gives the reg_tick value at which
289 rtx's containing this register are valid in the hash table.
290 If this does not equal the current reg_tick value, such expressions
291 existing in the hash table are invalid.
292 If this is -1, no expressions containing this register have been
293 entered in the table. */
294
295 static int *reg_in_table;
296
297 /* A HARD_REG_SET containing all the hard registers for which there is
298 currently a REG expression in the hash table. Note the difference
299 from the above variables, which indicate if the REG is mentioned in some
300 expression in the table. */
301
302 static HARD_REG_SET hard_regs_in_table;
303
304 /* A HARD_REG_SET containing all the hard registers that are invalidated
305 by a CALL_INSN. */
306
307 static HARD_REG_SET regs_invalidated_by_call;
308
309 /* Two vectors of ints:
310 one containing max_reg -1's; the other max_reg + 500 (an approximation
311 for max_qty) elements where element i contains i.
312 These are used to initialize various other vectors fast. */
313
314 static int *all_minus_one;
315 static int *consec_ints;
316
317 /* CUID of insn that starts the basic block currently being cse-processed. */
318
319 static int cse_basic_block_start;
320
321 /* CUID of insn that ends the basic block currently being cse-processed. */
322
323 static int cse_basic_block_end;
324
325 /* Vector mapping INSN_UIDs to cuids.
326 The cuids are like uids but increase monotonically always.
327 We use them to see whether a reg is used outside a given basic block. */
328
329 static int *uid_cuid;
330
331 /* Highest UID in UID_CUID. */
332 static int max_uid;
333
334 /* Get the cuid of an insn. */
335
336 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
337
338 /* Nonzero if cse has altered conditional jump insns
339 in such a way that jump optimization should be redone. */
340
341 static int cse_jumps_altered;
342
343 /* canon_hash stores 1 in do_not_record
344 if it notices a reference to CC0, PC, or some other volatile
345 subexpression. */
346
347 static int do_not_record;
348
349 /* canon_hash stores 1 in hash_arg_in_memory
350 if it notices a reference to memory within the expression being hashed. */
351
352 static int hash_arg_in_memory;
353
354 /* canon_hash stores 1 in hash_arg_in_struct
355 if it notices a reference to memory that's part of a structure. */
356
357 static int hash_arg_in_struct;
358
359 /* The hash table contains buckets which are chains of `struct table_elt's,
360 each recording one expression's information.
361 That expression is in the `exp' field.
362
363 Those elements with the same hash code are chained in both directions
364 through the `next_same_hash' and `prev_same_hash' fields.
365
366 Each set of expressions with equivalent values
367 are on a two-way chain through the `next_same_value'
368 and `prev_same_value' fields, and all point with
369 the `first_same_value' field at the first element in
370 that chain. The chain is in order of increasing cost.
371 Each element's cost value is in its `cost' field.
372
373 The `in_memory' field is nonzero for elements that
374 involve any reference to memory. These elements are removed
375 whenever a write is done to an unidentified location in memory.
376 To be safe, we assume that a memory address is unidentified unless
377 the address is either a symbol constant or a constant plus
378 the frame pointer or argument pointer.
379
380 The `in_struct' field is nonzero for elements that
381 involve any reference to memory inside a structure or array.
382
383 The `related_value' field is used to connect related expressions
384 (that differ by adding an integer).
385 The related expressions are chained in a circular fashion.
386 `related_value' is zero for expressions for which this
387 chain is not useful.
388
389 The `cost' field stores the cost of this element's expression.
390
391 The `is_const' flag is set if the element is a constant (including
392 a fixed address).
393
394 The `flag' field is used as a temporary during some search routines.
395
396 The `mode' field is usually the same as GET_MODE (`exp'), but
397 if `exp' is a CONST_INT and has no machine mode then the `mode'
398 field is the mode it was being used as. Each constant is
399 recorded separately for each mode it is used with. */
400
401
402 struct table_elt
403 {
404 rtx exp;
405 struct table_elt *next_same_hash;
406 struct table_elt *prev_same_hash;
407 struct table_elt *next_same_value;
408 struct table_elt *prev_same_value;
409 struct table_elt *first_same_value;
410 struct table_elt *related_value;
411 int cost;
412 enum machine_mode mode;
413 char in_memory;
414 char in_struct;
415 char is_const;
416 char flag;
417 };
418
419 #define HASHBITS 16
420
421 /* We don't want a lot of buckets, because we rarely have very many
422 things stored in the hash table, and a lot of buckets slows
423 down a lot of loops that happen frequently. */
424 #define NBUCKETS 31
425
426 /* Compute hash code of X in mode M. Special-case case where X is a pseudo
427 register (hard registers may require `do_not_record' to be set). */
428
429 #define HASH(X, M) \
430 (GET_CODE (X) == REG && REGNO (X) >= FIRST_PSEUDO_REGISTER \
431 ? ((((int) REG << 7) + reg_qty[REGNO (X)]) % NBUCKETS) \
432 : canon_hash (X, M) % NBUCKETS)
433
434 /* Determine whether register number N is considered a fixed register for CSE.
435 It is desirable to replace other regs with fixed regs, to reduce need for
436 non-fixed hard regs.
437 A reg wins if it is either the frame pointer or designated as fixed,
438 but not if it is an overlapping register. */
439 #ifdef OVERLAPPING_REGNO_P
440 #define FIXED_REGNO_P(N) \
441 (((N) == FRAME_POINTER_REGNUM || fixed_regs[N]) \
442 && ! OVERLAPPING_REGNO_P ((N)))
443 #else
444 #define FIXED_REGNO_P(N) \
445 ((N) == FRAME_POINTER_REGNUM || fixed_regs[N])
446 #endif
447
448 /* Compute cost of X, as stored in the `cost' field of a table_elt. Fixed
449 hard registers and pointers into the frame are the cheapest with a cost
450 of 0. Next come pseudos with a cost of one and other hard registers with
451 a cost of 2. Aside from these special cases, call `rtx_cost'. */
452
453 #define CHEAP_REG(N) \
454 ((N) == FRAME_POINTER_REGNUM || (N) == STACK_POINTER_REGNUM \
455 || (N) == ARG_POINTER_REGNUM \
456 || (N) >= FIRST_VIRTUAL_REGISTER && (N) <= LAST_VIRTUAL_REGISTER \
457 || (FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
458
459 #define COST(X) \
460 (GET_CODE (X) == REG \
461 ? (CHEAP_REG (REGNO (X)) ? 0 \
462 : REGNO (X) >= FIRST_PSEUDO_REGISTER ? 1 \
463 : 2) \
464 : rtx_cost (X, SET) * 2)
465
466 /* Determine if the quantity number for register X represents a valid index
467 into the `qty_...' variables. */
468
469 #define REGNO_QTY_VALID_P(N) (reg_qty[N] != (N))
470
471 static struct table_elt *table[NBUCKETS];
472
473 /* Chain of `struct table_elt's made so far for this function
474 but currently removed from the table. */
475
476 static struct table_elt *free_element_chain;
477
478 /* Number of `struct table_elt' structures made so far for this function. */
479
480 static int n_elements_made;
481
482 /* Maximum value `n_elements_made' has had so far in this compilation
483 for functions previously processed. */
484
485 static int max_elements_made;
486
487 /* Surviving equivalence class when two equivalence classes are merged
488 by recording the effects of a jump in the last insn. Zero if the
489 last insn was not a conditional jump. */
490
491 static struct table_elt *last_jump_equiv_class;
492
493 /* Set to the cost of a constant pool reference if one was found for a
494 symbolic constant. If this was found, it means we should try to
495 convert constants into constant pool entries if they don't fit in
496 the insn. */
497
498 static int constant_pool_entries_cost;
499
500 /* Bits describing what kind of values in memory must be invalidated
501 for a particular instruction. If all three bits are zero,
502 no memory refs need to be invalidated. Each bit is more powerful
503 than the preceding ones, and if a bit is set then the preceding
504 bits are also set.
505
506 Here is how the bits are set:
507 Pushing onto the stack invalidates only the stack pointer,
508 writing at a fixed address invalidates only variable addresses,
509 writing in a structure element at variable address
510 invalidates all but scalar variables,
511 and writing in anything else at variable address invalidates everything. */
512
513 struct write_data
514 {
515 int sp : 1; /* Invalidate stack pointer. */
516 int var : 1; /* Invalidate variable addresses. */
517 int nonscalar : 1; /* Invalidate all but scalar variables. */
518 int all : 1; /* Invalidate all memory refs. */
519 };
520
521 /* Define maximum length of a branch path. */
522
523 #define PATHLENGTH 10
524
525 /* This data describes a block that will be processed by cse_basic_block. */
526
527 struct cse_basic_block_data {
528 /* Lowest CUID value of insns in block. */
529 int low_cuid;
530 /* Highest CUID value of insns in block. */
531 int high_cuid;
532 /* Total number of SETs in block. */
533 int nsets;
534 /* Last insn in the block. */
535 rtx last;
536 /* Size of current branch path, if any. */
537 int path_size;
538 /* Current branch path, indicating which branches will be taken. */
539 struct branch_path {
540 /* The branch insn. */
541 rtx branch;
542 /* Whether it should be taken or not. AROUND is the same as taken
543 except that it is used when the destination label is not preceded
544 by a BARRIER. */
545 enum taken {TAKEN, NOT_TAKEN, AROUND} status;
546 } path[PATHLENGTH];
547 };
548
549 /* Nonzero if X has the form (PLUS frame-pointer integer). We check for
550 virtual regs here because the simplify_*_operation routines are called
551 by integrate.c, which is called before virtual register instantiation. */
552
553 #define FIXED_BASE_PLUS_P(X) \
554 ((X) == frame_pointer_rtx || (X) == arg_pointer_rtx \
555 || (X) == virtual_stack_vars_rtx \
556 || (X) == virtual_incoming_args_rtx \
557 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
558 && (XEXP (X, 0) == frame_pointer_rtx \
559 || XEXP (X, 0) == arg_pointer_rtx \
560 || XEXP (X, 0) == virtual_stack_vars_rtx \
561 || XEXP (X, 0) == virtual_incoming_args_rtx)))
562
563 /* Similar, but also allows reference to the stack pointer.
564
565 This used to include FIXED_BASE_PLUS_P, however, we can't assume that
566 arg_pointer_rtx by itself is nonzero, because on at least one machine,
567 the i960, the arg pointer is zero when it is unused. */
568
569 #define NONZERO_BASE_PLUS_P(X) \
570 ((X) == frame_pointer_rtx \
571 || (X) == virtual_stack_vars_rtx \
572 || (X) == virtual_incoming_args_rtx \
573 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
574 && (XEXP (X, 0) == frame_pointer_rtx \
575 || XEXP (X, 0) == arg_pointer_rtx \
576 || XEXP (X, 0) == virtual_stack_vars_rtx \
577 || XEXP (X, 0) == virtual_incoming_args_rtx)) \
578 || (X) == stack_pointer_rtx \
579 || (X) == virtual_stack_dynamic_rtx \
580 || (X) == virtual_outgoing_args_rtx \
581 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
582 && (XEXP (X, 0) == stack_pointer_rtx \
583 || XEXP (X, 0) == virtual_stack_dynamic_rtx \
584 || XEXP (X, 0) == virtual_outgoing_args_rtx)))
585
586 static void new_basic_block PROTO((void));
587 static void make_new_qty PROTO((int));
588 static void make_regs_eqv PROTO((int, int));
589 static void delete_reg_equiv PROTO((int));
590 static int mention_regs PROTO((rtx));
591 static int insert_regs PROTO((rtx, struct table_elt *, int));
592 static void free_element PROTO((struct table_elt *));
593 static void remove_from_table PROTO((struct table_elt *, int));
594 static struct table_elt *get_element PROTO((void));
595 static struct table_elt *lookup PROTO((rtx, int, enum machine_mode)),
596 *lookup_for_remove PROTO((rtx, int, enum machine_mode));
597 static rtx lookup_as_function PROTO((rtx, enum rtx_code));
598 static struct table_elt *insert PROTO((rtx, struct table_elt *, int,
599 enum machine_mode));
600 static void merge_equiv_classes PROTO((struct table_elt *,
601 struct table_elt *));
602 static void invalidate PROTO((rtx));
603 static void remove_invalid_refs PROTO((int));
604 static void rehash_using_reg PROTO((rtx));
605 static void invalidate_memory PROTO((struct write_data *));
606 static void invalidate_for_call PROTO((void));
607 static rtx use_related_value PROTO((rtx, struct table_elt *));
608 static int canon_hash PROTO((rtx, enum machine_mode));
609 static int safe_hash PROTO((rtx, enum machine_mode));
610 static int exp_equiv_p PROTO((rtx, rtx, int, int));
611 static void set_nonvarying_address_components PROTO((rtx, int, rtx *,
612 HOST_WIDE_INT *,
613 HOST_WIDE_INT *));
614 static int refers_to_p PROTO((rtx, rtx));
615 static int refers_to_mem_p PROTO((rtx, rtx, HOST_WIDE_INT,
616 HOST_WIDE_INT));
617 static int cse_rtx_addr_varies_p PROTO((rtx));
618 static rtx canon_reg PROTO((rtx, rtx));
619 static void find_best_addr PROTO((rtx, rtx *));
620 static enum rtx_code find_comparison_args PROTO((enum rtx_code, rtx *, rtx *,
621 enum machine_mode *,
622 enum machine_mode *));
623 static rtx cse_gen_binary PROTO((enum rtx_code, enum machine_mode,
624 rtx, rtx));
625 static rtx simplify_plus_minus PROTO((enum rtx_code, enum machine_mode,
626 rtx, rtx));
627 static rtx fold_rtx PROTO((rtx, rtx));
628 static rtx equiv_constant PROTO((rtx));
629 static void record_jump_equiv PROTO((rtx, int));
630 static void record_jump_cond PROTO((enum rtx_code, enum machine_mode,
631 rtx, rtx, int));
632 static void cse_insn PROTO((rtx, int));
633 static void note_mem_written PROTO((rtx, struct write_data *));
634 static void invalidate_from_clobbers PROTO((struct write_data *, rtx));
635 static rtx cse_process_notes PROTO((rtx, rtx));
636 static void cse_around_loop PROTO((rtx));
637 static void invalidate_skipped_set PROTO((rtx, rtx));
638 static void invalidate_skipped_block PROTO((rtx));
639 static void cse_check_loop_start PROTO((rtx, rtx));
640 static void cse_set_around_loop PROTO((rtx, rtx, rtx));
641 static rtx cse_basic_block PROTO((rtx, rtx, struct branch_path *, int));
642 static void count_reg_usage PROTO((rtx, int *, int));
643 \f
644 /* Return an estimate of the cost of computing rtx X.
645 One use is in cse, to decide which expression to keep in the hash table.
646 Another is in rtl generation, to pick the cheapest way to multiply.
647 Other uses like the latter are expected in the future. */
648
649 /* Return the right cost to give to an operation
650 to make the cost of the corresponding register-to-register instruction
651 N times that of a fast register-to-register instruction. */
652
653 #define COSTS_N_INSNS(N) ((N) * 4 - 2)
654
655 int
656 rtx_cost (x, outer_code)
657 rtx x;
658 enum rtx_code outer_code;
659 {
660 register int i, j;
661 register enum rtx_code code;
662 register char *fmt;
663 register int total;
664
665 if (x == 0)
666 return 0;
667
668 /* Compute the default costs of certain things.
669 Note that RTX_COSTS can override the defaults. */
670
671 code = GET_CODE (x);
672 switch (code)
673 {
674 case MULT:
675 /* Count multiplication by 2**n as a shift,
676 because if we are considering it, we would output it as a shift. */
677 if (GET_CODE (XEXP (x, 1)) == CONST_INT
678 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
679 total = 2;
680 else
681 total = COSTS_N_INSNS (5);
682 break;
683 case DIV:
684 case UDIV:
685 case MOD:
686 case UMOD:
687 total = COSTS_N_INSNS (7);
688 break;
689 case USE:
690 /* Used in loop.c and combine.c as a marker. */
691 total = 0;
692 break;
693 case ASM_OPERANDS:
694 /* We don't want these to be used in substitutions because
695 we have no way of validating the resulting insn. So assign
696 anything containing an ASM_OPERANDS a very high cost. */
697 total = 1000;
698 break;
699 default:
700 total = 2;
701 }
702
703 switch (code)
704 {
705 case REG:
706 return ! CHEAP_REG (REGNO (x));
707
708 case SUBREG:
709 /* If we can't tie these modes, make this expensive. The larger
710 the mode, the more expensive it is. */
711 if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
712 return COSTS_N_INSNS (2
713 + GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD);
714 return 2;
715 #ifdef RTX_COSTS
716 RTX_COSTS (x, code, outer_code);
717 #endif
718 CONST_COSTS (x, code, outer_code);
719 }
720
721 /* Sum the costs of the sub-rtx's, plus cost of this operation,
722 which is already in total. */
723
724 fmt = GET_RTX_FORMAT (code);
725 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
726 if (fmt[i] == 'e')
727 total += rtx_cost (XEXP (x, i), code);
728 else if (fmt[i] == 'E')
729 for (j = 0; j < XVECLEN (x, i); j++)
730 total += rtx_cost (XVECEXP (x, i, j), code);
731
732 return total;
733 }
734 \f
735 /* Clear the hash table and initialize each register with its own quantity,
736 for a new basic block. */
737
738 static void
739 new_basic_block ()
740 {
741 register int i;
742
743 next_qty = max_reg;
744
745 bzero (reg_tick, max_reg * sizeof (int));
746
747 bcopy (all_minus_one, reg_in_table, max_reg * sizeof (int));
748 bcopy (consec_ints, reg_qty, max_reg * sizeof (int));
749 CLEAR_HARD_REG_SET (hard_regs_in_table);
750
751 /* The per-quantity values used to be initialized here, but it is
752 much faster to initialize each as it is made in `make_new_qty'. */
753
754 for (i = 0; i < NBUCKETS; i++)
755 {
756 register struct table_elt *this, *next;
757 for (this = table[i]; this; this = next)
758 {
759 next = this->next_same_hash;
760 free_element (this);
761 }
762 }
763
764 bzero (table, sizeof table);
765
766 prev_insn = 0;
767
768 #ifdef HAVE_cc0
769 prev_insn_cc0 = 0;
770 #endif
771 }
772
773 /* Say that register REG contains a quantity not in any register before
774 and initialize that quantity. */
775
776 static void
777 make_new_qty (reg)
778 register int reg;
779 {
780 register int q;
781
782 if (next_qty >= max_qty)
783 abort ();
784
785 q = reg_qty[reg] = next_qty++;
786 qty_first_reg[q] = reg;
787 qty_last_reg[q] = reg;
788 qty_const[q] = qty_const_insn[q] = 0;
789 qty_comparison_code[q] = UNKNOWN;
790
791 reg_next_eqv[reg] = reg_prev_eqv[reg] = -1;
792 }
793
794 /* Make reg NEW equivalent to reg OLD.
795 OLD is not changing; NEW is. */
796
797 static void
798 make_regs_eqv (new, old)
799 register int new, old;
800 {
801 register int lastr, firstr;
802 register int q = reg_qty[old];
803
804 /* Nothing should become eqv until it has a "non-invalid" qty number. */
805 if (! REGNO_QTY_VALID_P (old))
806 abort ();
807
808 reg_qty[new] = q;
809 firstr = qty_first_reg[q];
810 lastr = qty_last_reg[q];
811
812 /* Prefer fixed hard registers to anything. Prefer pseudo regs to other
813 hard regs. Among pseudos, if NEW will live longer than any other reg
814 of the same qty, and that is beyond the current basic block,
815 make it the new canonical replacement for this qty. */
816 if (! (firstr < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (firstr))
817 /* Certain fixed registers might be of the class NO_REGS. This means
818 that not only can they not be allocated by the compiler, but
819 they cannot be used in substitutions or canonicalizations
820 either. */
821 && (new >= FIRST_PSEUDO_REGISTER || REGNO_REG_CLASS (new) != NO_REGS)
822 && ((new < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (new))
823 || (new >= FIRST_PSEUDO_REGISTER
824 && (firstr < FIRST_PSEUDO_REGISTER
825 || ((uid_cuid[regno_last_uid[new]] > cse_basic_block_end
826 || (uid_cuid[regno_first_uid[new]]
827 < cse_basic_block_start))
828 && (uid_cuid[regno_last_uid[new]]
829 > uid_cuid[regno_last_uid[firstr]]))))))
830 {
831 reg_prev_eqv[firstr] = new;
832 reg_next_eqv[new] = firstr;
833 reg_prev_eqv[new] = -1;
834 qty_first_reg[q] = new;
835 }
836 else
837 {
838 /* If NEW is a hard reg (known to be non-fixed), insert at end.
839 Otherwise, insert before any non-fixed hard regs that are at the
840 end. Registers of class NO_REGS cannot be used as an
841 equivalent for anything. */
842 while (lastr < FIRST_PSEUDO_REGISTER && reg_prev_eqv[lastr] >= 0
843 && (REGNO_REG_CLASS (lastr) == NO_REGS || ! FIXED_REGNO_P (lastr))
844 && new >= FIRST_PSEUDO_REGISTER)
845 lastr = reg_prev_eqv[lastr];
846 reg_next_eqv[new] = reg_next_eqv[lastr];
847 if (reg_next_eqv[lastr] >= 0)
848 reg_prev_eqv[reg_next_eqv[lastr]] = new;
849 else
850 qty_last_reg[q] = new;
851 reg_next_eqv[lastr] = new;
852 reg_prev_eqv[new] = lastr;
853 }
854 }
855
856 /* Remove REG from its equivalence class. */
857
858 static void
859 delete_reg_equiv (reg)
860 register int reg;
861 {
862 register int n = reg_next_eqv[reg];
863 register int p = reg_prev_eqv[reg];
864 register int q = reg_qty[reg];
865
866 /* If invalid, do nothing. N and P above are undefined in that case. */
867 if (q == reg)
868 return;
869
870 if (n != -1)
871 reg_prev_eqv[n] = p;
872 else
873 qty_last_reg[q] = p;
874 if (p != -1)
875 reg_next_eqv[p] = n;
876 else
877 qty_first_reg[q] = n;
878
879 reg_qty[reg] = reg;
880 }
881
882 /* Remove any invalid expressions from the hash table
883 that refer to any of the registers contained in expression X.
884
885 Make sure that newly inserted references to those registers
886 as subexpressions will be considered valid.
887
888 mention_regs is not called when a register itself
889 is being stored in the table.
890
891 Return 1 if we have done something that may have changed the hash code
892 of X. */
893
894 static int
895 mention_regs (x)
896 rtx x;
897 {
898 register enum rtx_code code;
899 register int i, j;
900 register char *fmt;
901 register int changed = 0;
902
903 if (x == 0)
904 return 0;
905
906 code = GET_CODE (x);
907 if (code == REG)
908 {
909 register int regno = REGNO (x);
910 register int endregno
911 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
912 : HARD_REGNO_NREGS (regno, GET_MODE (x)));
913 int i;
914
915 for (i = regno; i < endregno; i++)
916 {
917 if (reg_in_table[i] >= 0 && reg_in_table[i] != reg_tick[i])
918 remove_invalid_refs (i);
919
920 reg_in_table[i] = reg_tick[i];
921 }
922
923 return 0;
924 }
925
926 /* If X is a comparison or a COMPARE and either operand is a register
927 that does not have a quantity, give it one. This is so that a later
928 call to record_jump_equiv won't cause X to be assigned a different
929 hash code and not found in the table after that call.
930
931 It is not necessary to do this here, since rehash_using_reg can
932 fix up the table later, but doing this here eliminates the need to
933 call that expensive function in the most common case where the only
934 use of the register is in the comparison. */
935
936 if (code == COMPARE || GET_RTX_CLASS (code) == '<')
937 {
938 if (GET_CODE (XEXP (x, 0)) == REG
939 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
940 if (insert_regs (XEXP (x, 0), NULL_PTR, 0))
941 {
942 rehash_using_reg (XEXP (x, 0));
943 changed = 1;
944 }
945
946 if (GET_CODE (XEXP (x, 1)) == REG
947 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
948 if (insert_regs (XEXP (x, 1), NULL_PTR, 0))
949 {
950 rehash_using_reg (XEXP (x, 1));
951 changed = 1;
952 }
953 }
954
955 fmt = GET_RTX_FORMAT (code);
956 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
957 if (fmt[i] == 'e')
958 changed |= mention_regs (XEXP (x, i));
959 else if (fmt[i] == 'E')
960 for (j = 0; j < XVECLEN (x, i); j++)
961 changed |= mention_regs (XVECEXP (x, i, j));
962
963 return changed;
964 }
965
966 /* Update the register quantities for inserting X into the hash table
967 with a value equivalent to CLASSP.
968 (If the class does not contain a REG, it is irrelevant.)
969 If MODIFIED is nonzero, X is a destination; it is being modified.
970 Note that delete_reg_equiv should be called on a register
971 before insert_regs is done on that register with MODIFIED != 0.
972
973 Nonzero value means that elements of reg_qty have changed
974 so X's hash code may be different. */
975
976 static int
977 insert_regs (x, classp, modified)
978 rtx x;
979 struct table_elt *classp;
980 int modified;
981 {
982 if (GET_CODE (x) == REG)
983 {
984 register int regno = REGNO (x);
985
986 if (modified
987 || ! (REGNO_QTY_VALID_P (regno)
988 && qty_mode[reg_qty[regno]] == GET_MODE (x)))
989 {
990 if (classp)
991 for (classp = classp->first_same_value;
992 classp != 0;
993 classp = classp->next_same_value)
994 if (GET_CODE (classp->exp) == REG
995 && GET_MODE (classp->exp) == GET_MODE (x))
996 {
997 make_regs_eqv (regno, REGNO (classp->exp));
998 return 1;
999 }
1000
1001 make_new_qty (regno);
1002 qty_mode[reg_qty[regno]] = GET_MODE (x);
1003 return 1;
1004 }
1005 }
1006
1007 /* If X is a SUBREG, we will likely be inserting the inner register in the
1008 table. If that register doesn't have an assigned quantity number at
1009 this point but does later, the insertion that we will be doing now will
1010 not be accessible because its hash code will have changed. So assign
1011 a quantity number now. */
1012
1013 else if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == REG
1014 && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x))))
1015 {
1016 insert_regs (SUBREG_REG (x), NULL_PTR, 0);
1017 mention_regs (SUBREG_REG (x));
1018 return 1;
1019 }
1020 else
1021 return mention_regs (x);
1022 }
1023 \f
1024 /* Look in or update the hash table. */
1025
1026 /* Put the element ELT on the list of free elements. */
1027
1028 static void
1029 free_element (elt)
1030 struct table_elt *elt;
1031 {
1032 elt->next_same_hash = free_element_chain;
1033 free_element_chain = elt;
1034 }
1035
1036 /* Return an element that is free for use. */
1037
1038 static struct table_elt *
1039 get_element ()
1040 {
1041 struct table_elt *elt = free_element_chain;
1042 if (elt)
1043 {
1044 free_element_chain = elt->next_same_hash;
1045 return elt;
1046 }
1047 n_elements_made++;
1048 return (struct table_elt *) oballoc (sizeof (struct table_elt));
1049 }
1050
1051 /* Remove table element ELT from use in the table.
1052 HASH is its hash code, made using the HASH macro.
1053 It's an argument because often that is known in advance
1054 and we save much time not recomputing it. */
1055
1056 static void
1057 remove_from_table (elt, hash)
1058 register struct table_elt *elt;
1059 int hash;
1060 {
1061 if (elt == 0)
1062 return;
1063
1064 /* Mark this element as removed. See cse_insn. */
1065 elt->first_same_value = 0;
1066
1067 /* Remove the table element from its equivalence class. */
1068
1069 {
1070 register struct table_elt *prev = elt->prev_same_value;
1071 register struct table_elt *next = elt->next_same_value;
1072
1073 if (next) next->prev_same_value = prev;
1074
1075 if (prev)
1076 prev->next_same_value = next;
1077 else
1078 {
1079 register struct table_elt *newfirst = next;
1080 while (next)
1081 {
1082 next->first_same_value = newfirst;
1083 next = next->next_same_value;
1084 }
1085 }
1086 }
1087
1088 /* Remove the table element from its hash bucket. */
1089
1090 {
1091 register struct table_elt *prev = elt->prev_same_hash;
1092 register struct table_elt *next = elt->next_same_hash;
1093
1094 if (next) next->prev_same_hash = prev;
1095
1096 if (prev)
1097 prev->next_same_hash = next;
1098 else if (table[hash] == elt)
1099 table[hash] = next;
1100 else
1101 {
1102 /* This entry is not in the proper hash bucket. This can happen
1103 when two classes were merged by `merge_equiv_classes'. Search
1104 for the hash bucket that it heads. This happens only very
1105 rarely, so the cost is acceptable. */
1106 for (hash = 0; hash < NBUCKETS; hash++)
1107 if (table[hash] == elt)
1108 table[hash] = next;
1109 }
1110 }
1111
1112 /* Remove the table element from its related-value circular chain. */
1113
1114 if (elt->related_value != 0 && elt->related_value != elt)
1115 {
1116 register struct table_elt *p = elt->related_value;
1117 while (p->related_value != elt)
1118 p = p->related_value;
1119 p->related_value = elt->related_value;
1120 if (p->related_value == p)
1121 p->related_value = 0;
1122 }
1123
1124 free_element (elt);
1125 }
1126
1127 /* Look up X in the hash table and return its table element,
1128 or 0 if X is not in the table.
1129
1130 MODE is the machine-mode of X, or if X is an integer constant
1131 with VOIDmode then MODE is the mode with which X will be used.
1132
1133 Here we are satisfied to find an expression whose tree structure
1134 looks like X. */
1135
1136 static struct table_elt *
1137 lookup (x, hash, mode)
1138 rtx x;
1139 int hash;
1140 enum machine_mode mode;
1141 {
1142 register struct table_elt *p;
1143
1144 for (p = table[hash]; p; p = p->next_same_hash)
1145 if (mode == p->mode && ((x == p->exp && GET_CODE (x) == REG)
1146 || exp_equiv_p (x, p->exp, GET_CODE (x) != REG, 0)))
1147 return p;
1148
1149 return 0;
1150 }
1151
1152 /* Like `lookup' but don't care whether the table element uses invalid regs.
1153 Also ignore discrepancies in the machine mode of a register. */
1154
1155 static struct table_elt *
1156 lookup_for_remove (x, hash, mode)
1157 rtx x;
1158 int hash;
1159 enum machine_mode mode;
1160 {
1161 register struct table_elt *p;
1162
1163 if (GET_CODE (x) == REG)
1164 {
1165 int regno = REGNO (x);
1166 /* Don't check the machine mode when comparing registers;
1167 invalidating (REG:SI 0) also invalidates (REG:DF 0). */
1168 for (p = table[hash]; p; p = p->next_same_hash)
1169 if (GET_CODE (p->exp) == REG
1170 && REGNO (p->exp) == regno)
1171 return p;
1172 }
1173 else
1174 {
1175 for (p = table[hash]; p; p = p->next_same_hash)
1176 if (mode == p->mode && (x == p->exp || exp_equiv_p (x, p->exp, 0, 0)))
1177 return p;
1178 }
1179
1180 return 0;
1181 }
1182
1183 /* Look for an expression equivalent to X and with code CODE.
1184 If one is found, return that expression. */
1185
1186 static rtx
1187 lookup_as_function (x, code)
1188 rtx x;
1189 enum rtx_code code;
1190 {
1191 register struct table_elt *p = lookup (x, safe_hash (x, VOIDmode) % NBUCKETS,
1192 GET_MODE (x));
1193 if (p == 0)
1194 return 0;
1195
1196 for (p = p->first_same_value; p; p = p->next_same_value)
1197 {
1198 if (GET_CODE (p->exp) == code
1199 /* Make sure this is a valid entry in the table. */
1200 && exp_equiv_p (p->exp, p->exp, 1, 0))
1201 return p->exp;
1202 }
1203
1204 return 0;
1205 }
1206
1207 /* Insert X in the hash table, assuming HASH is its hash code
1208 and CLASSP is an element of the class it should go in
1209 (or 0 if a new class should be made).
1210 It is inserted at the proper position to keep the class in
1211 the order cheapest first.
1212
1213 MODE is the machine-mode of X, or if X is an integer constant
1214 with VOIDmode then MODE is the mode with which X will be used.
1215
1216 For elements of equal cheapness, the most recent one
1217 goes in front, except that the first element in the list
1218 remains first unless a cheaper element is added. The order of
1219 pseudo-registers does not matter, as canon_reg will be called to
1220 find the cheapest when a register is retrieved from the table.
1221
1222 The in_memory field in the hash table element is set to 0.
1223 The caller must set it nonzero if appropriate.
1224
1225 You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1226 and if insert_regs returns a nonzero value
1227 you must then recompute its hash code before calling here.
1228
1229 If necessary, update table showing constant values of quantities. */
1230
1231 #define CHEAPER(X,Y) ((X)->cost < (Y)->cost)
1232
1233 static struct table_elt *
1234 insert (x, classp, hash, mode)
1235 register rtx x;
1236 register struct table_elt *classp;
1237 int hash;
1238 enum machine_mode mode;
1239 {
1240 register struct table_elt *elt;
1241
1242 /* If X is a register and we haven't made a quantity for it,
1243 something is wrong. */
1244 if (GET_CODE (x) == REG && ! REGNO_QTY_VALID_P (REGNO (x)))
1245 abort ();
1246
1247 /* If X is a hard register, show it is being put in the table. */
1248 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
1249 {
1250 int regno = REGNO (x);
1251 int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1252 int i;
1253
1254 for (i = regno; i < endregno; i++)
1255 SET_HARD_REG_BIT (hard_regs_in_table, i);
1256 }
1257
1258
1259 /* Put an element for X into the right hash bucket. */
1260
1261 elt = get_element ();
1262 elt->exp = x;
1263 elt->cost = COST (x);
1264 elt->next_same_value = 0;
1265 elt->prev_same_value = 0;
1266 elt->next_same_hash = table[hash];
1267 elt->prev_same_hash = 0;
1268 elt->related_value = 0;
1269 elt->in_memory = 0;
1270 elt->mode = mode;
1271 elt->is_const = (CONSTANT_P (x)
1272 /* GNU C++ takes advantage of this for `this'
1273 (and other const values). */
1274 || (RTX_UNCHANGING_P (x)
1275 && GET_CODE (x) == REG
1276 && REGNO (x) >= FIRST_PSEUDO_REGISTER)
1277 || FIXED_BASE_PLUS_P (x));
1278
1279 if (table[hash])
1280 table[hash]->prev_same_hash = elt;
1281 table[hash] = elt;
1282
1283 /* Put it into the proper value-class. */
1284 if (classp)
1285 {
1286 classp = classp->first_same_value;
1287 if (CHEAPER (elt, classp))
1288 /* Insert at the head of the class */
1289 {
1290 register struct table_elt *p;
1291 elt->next_same_value = classp;
1292 classp->prev_same_value = elt;
1293 elt->first_same_value = elt;
1294
1295 for (p = classp; p; p = p->next_same_value)
1296 p->first_same_value = elt;
1297 }
1298 else
1299 {
1300 /* Insert not at head of the class. */
1301 /* Put it after the last element cheaper than X. */
1302 register struct table_elt *p, *next;
1303 for (p = classp; (next = p->next_same_value) && CHEAPER (next, elt);
1304 p = next);
1305 /* Put it after P and before NEXT. */
1306 elt->next_same_value = next;
1307 if (next)
1308 next->prev_same_value = elt;
1309 elt->prev_same_value = p;
1310 p->next_same_value = elt;
1311 elt->first_same_value = classp;
1312 }
1313 }
1314 else
1315 elt->first_same_value = elt;
1316
1317 /* If this is a constant being set equivalent to a register or a register
1318 being set equivalent to a constant, note the constant equivalence.
1319
1320 If this is a constant, it cannot be equivalent to a different constant,
1321 and a constant is the only thing that can be cheaper than a register. So
1322 we know the register is the head of the class (before the constant was
1323 inserted).
1324
1325 If this is a register that is not already known equivalent to a
1326 constant, we must check the entire class.
1327
1328 If this is a register that is already known equivalent to an insn,
1329 update `qty_const_insn' to show that `this_insn' is the latest
1330 insn making that quantity equivalent to the constant. */
1331
1332 if (elt->is_const && classp && GET_CODE (classp->exp) == REG)
1333 {
1334 qty_const[reg_qty[REGNO (classp->exp)]]
1335 = gen_lowpart_if_possible (qty_mode[reg_qty[REGNO (classp->exp)]], x);
1336 qty_const_insn[reg_qty[REGNO (classp->exp)]] = this_insn;
1337 }
1338
1339 else if (GET_CODE (x) == REG && classp && ! qty_const[reg_qty[REGNO (x)]])
1340 {
1341 register struct table_elt *p;
1342
1343 for (p = classp; p != 0; p = p->next_same_value)
1344 {
1345 if (p->is_const)
1346 {
1347 qty_const[reg_qty[REGNO (x)]]
1348 = gen_lowpart_if_possible (GET_MODE (x), p->exp);
1349 qty_const_insn[reg_qty[REGNO (x)]] = this_insn;
1350 break;
1351 }
1352 }
1353 }
1354
1355 else if (GET_CODE (x) == REG && qty_const[reg_qty[REGNO (x)]]
1356 && GET_MODE (x) == qty_mode[reg_qty[REGNO (x)]])
1357 qty_const_insn[reg_qty[REGNO (x)]] = this_insn;
1358
1359 /* If this is a constant with symbolic value,
1360 and it has a term with an explicit integer value,
1361 link it up with related expressions. */
1362 if (GET_CODE (x) == CONST)
1363 {
1364 rtx subexp = get_related_value (x);
1365 int subhash;
1366 struct table_elt *subelt, *subelt_prev;
1367
1368 if (subexp != 0)
1369 {
1370 /* Get the integer-free subexpression in the hash table. */
1371 subhash = safe_hash (subexp, mode) % NBUCKETS;
1372 subelt = lookup (subexp, subhash, mode);
1373 if (subelt == 0)
1374 subelt = insert (subexp, NULL_PTR, subhash, mode);
1375 /* Initialize SUBELT's circular chain if it has none. */
1376 if (subelt->related_value == 0)
1377 subelt->related_value = subelt;
1378 /* Find the element in the circular chain that precedes SUBELT. */
1379 subelt_prev = subelt;
1380 while (subelt_prev->related_value != subelt)
1381 subelt_prev = subelt_prev->related_value;
1382 /* Put new ELT into SUBELT's circular chain just before SUBELT.
1383 This way the element that follows SUBELT is the oldest one. */
1384 elt->related_value = subelt_prev->related_value;
1385 subelt_prev->related_value = elt;
1386 }
1387 }
1388
1389 return elt;
1390 }
1391 \f
1392 /* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1393 CLASS2 into CLASS1. This is done when we have reached an insn which makes
1394 the two classes equivalent.
1395
1396 CLASS1 will be the surviving class; CLASS2 should not be used after this
1397 call.
1398
1399 Any invalid entries in CLASS2 will not be copied. */
1400
1401 static void
1402 merge_equiv_classes (class1, class2)
1403 struct table_elt *class1, *class2;
1404 {
1405 struct table_elt *elt, *next, *new;
1406
1407 /* Ensure we start with the head of the classes. */
1408 class1 = class1->first_same_value;
1409 class2 = class2->first_same_value;
1410
1411 /* If they were already equal, forget it. */
1412 if (class1 == class2)
1413 return;
1414
1415 for (elt = class2; elt; elt = next)
1416 {
1417 int hash;
1418 rtx exp = elt->exp;
1419 enum machine_mode mode = elt->mode;
1420
1421 next = elt->next_same_value;
1422
1423 /* Remove old entry, make a new one in CLASS1's class.
1424 Don't do this for invalid entries as we cannot find their
1425 hash code (it also isn't necessary). */
1426 if (GET_CODE (exp) == REG || exp_equiv_p (exp, exp, 1, 0))
1427 {
1428 hash_arg_in_memory = 0;
1429 hash_arg_in_struct = 0;
1430 hash = HASH (exp, mode);
1431
1432 if (GET_CODE (exp) == REG)
1433 delete_reg_equiv (REGNO (exp));
1434
1435 remove_from_table (elt, hash);
1436
1437 if (insert_regs (exp, class1, 0))
1438 hash = HASH (exp, mode);
1439 new = insert (exp, class1, hash, mode);
1440 new->in_memory = hash_arg_in_memory;
1441 new->in_struct = hash_arg_in_struct;
1442 }
1443 }
1444 }
1445 \f
1446 /* Remove from the hash table, or mark as invalid,
1447 all expressions whose values could be altered by storing in X.
1448 X is a register, a subreg, or a memory reference with nonvarying address
1449 (because, when a memory reference with a varying address is stored in,
1450 all memory references are removed by invalidate_memory
1451 so specific invalidation is superfluous).
1452
1453 A nonvarying address may be just a register or just
1454 a symbol reference, or it may be either of those plus
1455 a numeric offset. */
1456
1457 static void
1458 invalidate (x)
1459 rtx x;
1460 {
1461 register int i;
1462 register struct table_elt *p;
1463 rtx base;
1464 HOST_WIDE_INT start, end;
1465
1466 /* If X is a register, dependencies on its contents
1467 are recorded through the qty number mechanism.
1468 Just change the qty number of the register,
1469 mark it as invalid for expressions that refer to it,
1470 and remove it itself. */
1471
1472 if (GET_CODE (x) == REG)
1473 {
1474 register int regno = REGNO (x);
1475 register int hash = HASH (x, GET_MODE (x));
1476
1477 /* Remove REGNO from any quantity list it might be on and indicate
1478 that it's value might have changed. If it is a pseudo, remove its
1479 entry from the hash table.
1480
1481 For a hard register, we do the first two actions above for any
1482 additional hard registers corresponding to X. Then, if any of these
1483 registers are in the table, we must remove any REG entries that
1484 overlap these registers. */
1485
1486 delete_reg_equiv (regno);
1487 reg_tick[regno]++;
1488
1489 if (regno >= FIRST_PSEUDO_REGISTER)
1490 remove_from_table (lookup_for_remove (x, hash, GET_MODE (x)), hash);
1491 else
1492 {
1493 int in_table = TEST_HARD_REG_BIT (hard_regs_in_table, regno);
1494 int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1495 int tregno, tendregno;
1496 register struct table_elt *p, *next;
1497
1498 CLEAR_HARD_REG_BIT (hard_regs_in_table, regno);
1499
1500 for (i = regno + 1; i < endregno; i++)
1501 {
1502 in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, i);
1503 CLEAR_HARD_REG_BIT (hard_regs_in_table, i);
1504 delete_reg_equiv (i);
1505 reg_tick[i]++;
1506 }
1507
1508 if (in_table)
1509 for (hash = 0; hash < NBUCKETS; hash++)
1510 for (p = table[hash]; p; p = next)
1511 {
1512 next = p->next_same_hash;
1513
1514 if (GET_CODE (p->exp) != REG
1515 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1516 continue;
1517
1518 tregno = REGNO (p->exp);
1519 tendregno
1520 = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (p->exp));
1521 if (tendregno > regno && tregno < endregno)
1522 remove_from_table (p, hash);
1523 }
1524 }
1525
1526 return;
1527 }
1528
1529 if (GET_CODE (x) == SUBREG)
1530 {
1531 if (GET_CODE (SUBREG_REG (x)) != REG)
1532 abort ();
1533 invalidate (SUBREG_REG (x));
1534 return;
1535 }
1536
1537 /* X is not a register; it must be a memory reference with
1538 a nonvarying address. Remove all hash table elements
1539 that refer to overlapping pieces of memory. */
1540
1541 if (GET_CODE (x) != MEM)
1542 abort ();
1543
1544 set_nonvarying_address_components (XEXP (x, 0), GET_MODE_SIZE (GET_MODE (x)),
1545 &base, &start, &end);
1546
1547 for (i = 0; i < NBUCKETS; i++)
1548 {
1549 register struct table_elt *next;
1550 for (p = table[i]; p; p = next)
1551 {
1552 next = p->next_same_hash;
1553 if (refers_to_mem_p (p->exp, base, start, end))
1554 remove_from_table (p, i);
1555 }
1556 }
1557 }
1558
1559 /* Remove all expressions that refer to register REGNO,
1560 since they are already invalid, and we are about to
1561 mark that register valid again and don't want the old
1562 expressions to reappear as valid. */
1563
1564 static void
1565 remove_invalid_refs (regno)
1566 int regno;
1567 {
1568 register int i;
1569 register struct table_elt *p, *next;
1570
1571 for (i = 0; i < NBUCKETS; i++)
1572 for (p = table[i]; p; p = next)
1573 {
1574 next = p->next_same_hash;
1575 if (GET_CODE (p->exp) != REG
1576 && refers_to_regno_p (regno, regno + 1, p->exp, NULL_PTR))
1577 remove_from_table (p, i);
1578 }
1579 }
1580 \f
1581 /* Recompute the hash codes of any valid entries in the hash table that
1582 reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
1583
1584 This is called when we make a jump equivalence. */
1585
1586 static void
1587 rehash_using_reg (x)
1588 rtx x;
1589 {
1590 int i;
1591 struct table_elt *p, *next;
1592 int hash;
1593
1594 if (GET_CODE (x) == SUBREG)
1595 x = SUBREG_REG (x);
1596
1597 /* If X is not a register or if the register is known not to be in any
1598 valid entries in the table, we have no work to do. */
1599
1600 if (GET_CODE (x) != REG
1601 || reg_in_table[REGNO (x)] < 0
1602 || reg_in_table[REGNO (x)] != reg_tick[REGNO (x)])
1603 return;
1604
1605 /* Scan all hash chains looking for valid entries that mention X.
1606 If we find one and it is in the wrong hash chain, move it. We can skip
1607 objects that are registers, since they are handled specially. */
1608
1609 for (i = 0; i < NBUCKETS; i++)
1610 for (p = table[i]; p; p = next)
1611 {
1612 next = p->next_same_hash;
1613 if (GET_CODE (p->exp) != REG && reg_mentioned_p (x, p->exp)
1614 && exp_equiv_p (p->exp, p->exp, 1, 0)
1615 && i != (hash = safe_hash (p->exp, p->mode) % NBUCKETS))
1616 {
1617 if (p->next_same_hash)
1618 p->next_same_hash->prev_same_hash = p->prev_same_hash;
1619
1620 if (p->prev_same_hash)
1621 p->prev_same_hash->next_same_hash = p->next_same_hash;
1622 else
1623 table[i] = p->next_same_hash;
1624
1625 p->next_same_hash = table[hash];
1626 p->prev_same_hash = 0;
1627 if (table[hash])
1628 table[hash]->prev_same_hash = p;
1629 table[hash] = p;
1630 }
1631 }
1632 }
1633 \f
1634 /* Remove from the hash table all expressions that reference memory,
1635 or some of them as specified by *WRITES. */
1636
1637 static void
1638 invalidate_memory (writes)
1639 struct write_data *writes;
1640 {
1641 register int i;
1642 register struct table_elt *p, *next;
1643 int all = writes->all;
1644 int nonscalar = writes->nonscalar;
1645
1646 for (i = 0; i < NBUCKETS; i++)
1647 for (p = table[i]; p; p = next)
1648 {
1649 next = p->next_same_hash;
1650 if (p->in_memory
1651 && (all
1652 || (nonscalar && p->in_struct)
1653 || cse_rtx_addr_varies_p (p->exp)))
1654 remove_from_table (p, i);
1655 }
1656 }
1657 \f
1658 /* Remove from the hash table any expression that is a call-clobbered
1659 register. Also update their TICK values. */
1660
1661 static void
1662 invalidate_for_call ()
1663 {
1664 int regno, endregno;
1665 int i;
1666 int hash;
1667 struct table_elt *p, *next;
1668 int in_table = 0;
1669
1670 /* Go through all the hard registers. For each that is clobbered in
1671 a CALL_INSN, remove the register from quantity chains and update
1672 reg_tick if defined. Also see if any of these registers is currently
1673 in the table. */
1674
1675 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1676 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
1677 {
1678 delete_reg_equiv (regno);
1679 if (reg_tick[regno] >= 0)
1680 reg_tick[regno]++;
1681
1682 in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, regno);
1683 }
1684
1685 /* In the case where we have no call-clobbered hard registers in the
1686 table, we are done. Otherwise, scan the table and remove any
1687 entry that overlaps a call-clobbered register. */
1688
1689 if (in_table)
1690 for (hash = 0; hash < NBUCKETS; hash++)
1691 for (p = table[hash]; p; p = next)
1692 {
1693 next = p->next_same_hash;
1694
1695 if (GET_CODE (p->exp) != REG
1696 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1697 continue;
1698
1699 regno = REGNO (p->exp);
1700 endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (p->exp));
1701
1702 for (i = regno; i < endregno; i++)
1703 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
1704 {
1705 remove_from_table (p, hash);
1706 break;
1707 }
1708 }
1709 }
1710 \f
1711 /* Given an expression X of type CONST,
1712 and ELT which is its table entry (or 0 if it
1713 is not in the hash table),
1714 return an alternate expression for X as a register plus integer.
1715 If none can be found, return 0. */
1716
1717 static rtx
1718 use_related_value (x, elt)
1719 rtx x;
1720 struct table_elt *elt;
1721 {
1722 register struct table_elt *relt = 0;
1723 register struct table_elt *p, *q;
1724 HOST_WIDE_INT offset;
1725
1726 /* First, is there anything related known?
1727 If we have a table element, we can tell from that.
1728 Otherwise, must look it up. */
1729
1730 if (elt != 0 && elt->related_value != 0)
1731 relt = elt;
1732 else if (elt == 0 && GET_CODE (x) == CONST)
1733 {
1734 rtx subexp = get_related_value (x);
1735 if (subexp != 0)
1736 relt = lookup (subexp,
1737 safe_hash (subexp, GET_MODE (subexp)) % NBUCKETS,
1738 GET_MODE (subexp));
1739 }
1740
1741 if (relt == 0)
1742 return 0;
1743
1744 /* Search all related table entries for one that has an
1745 equivalent register. */
1746
1747 p = relt;
1748 while (1)
1749 {
1750 /* This loop is strange in that it is executed in two different cases.
1751 The first is when X is already in the table. Then it is searching
1752 the RELATED_VALUE list of X's class (RELT). The second case is when
1753 X is not in the table. Then RELT points to a class for the related
1754 value.
1755
1756 Ensure that, whatever case we are in, that we ignore classes that have
1757 the same value as X. */
1758
1759 if (rtx_equal_p (x, p->exp))
1760 q = 0;
1761 else
1762 for (q = p->first_same_value; q; q = q->next_same_value)
1763 if (GET_CODE (q->exp) == REG)
1764 break;
1765
1766 if (q)
1767 break;
1768
1769 p = p->related_value;
1770
1771 /* We went all the way around, so there is nothing to be found.
1772 Alternatively, perhaps RELT was in the table for some other reason
1773 and it has no related values recorded. */
1774 if (p == relt || p == 0)
1775 break;
1776 }
1777
1778 if (q == 0)
1779 return 0;
1780
1781 offset = (get_integer_term (x) - get_integer_term (p->exp));
1782 /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity. */
1783 return plus_constant (q->exp, offset);
1784 }
1785 \f
1786 /* Hash an rtx. We are careful to make sure the value is never negative.
1787 Equivalent registers hash identically.
1788 MODE is used in hashing for CONST_INTs only;
1789 otherwise the mode of X is used.
1790
1791 Store 1 in do_not_record if any subexpression is volatile.
1792
1793 Store 1 in hash_arg_in_memory if X contains a MEM rtx
1794 which does not have the RTX_UNCHANGING_P bit set.
1795 In this case, also store 1 in hash_arg_in_struct
1796 if there is a MEM rtx which has the MEM_IN_STRUCT_P bit set.
1797
1798 Note that cse_insn knows that the hash code of a MEM expression
1799 is just (int) MEM plus the hash code of the address. */
1800
1801 static int
1802 canon_hash (x, mode)
1803 rtx x;
1804 enum machine_mode mode;
1805 {
1806 register int i, j;
1807 register int hash = 0;
1808 register enum rtx_code code;
1809 register char *fmt;
1810
1811 /* repeat is used to turn tail-recursion into iteration. */
1812 repeat:
1813 if (x == 0)
1814 return hash;
1815
1816 code = GET_CODE (x);
1817 switch (code)
1818 {
1819 case REG:
1820 {
1821 register int regno = REGNO (x);
1822
1823 /* On some machines, we can't record any non-fixed hard register,
1824 because extending its life will cause reload problems. We
1825 consider ap, fp, and sp to be fixed for this purpose.
1826 On all machines, we can't record any global registers. */
1827
1828 if (regno < FIRST_PSEUDO_REGISTER
1829 && (global_regs[regno]
1830 #ifdef SMALL_REGISTER_CLASSES
1831 || (! fixed_regs[regno]
1832 && regno != FRAME_POINTER_REGNUM
1833 && regno != ARG_POINTER_REGNUM
1834 && regno != STACK_POINTER_REGNUM)
1835 #endif
1836 ))
1837 {
1838 do_not_record = 1;
1839 return 0;
1840 }
1841 return hash + ((int) REG << 7) + reg_qty[regno];
1842 }
1843
1844 case CONST_INT:
1845 hash += ((int) mode + ((int) CONST_INT << 7)
1846 + INTVAL (x) + (INTVAL (x) >> HASHBITS));
1847 return ((1 << HASHBITS) - 1) & hash;
1848
1849 case CONST_DOUBLE:
1850 /* This is like the general case, except that it only counts
1851 the integers representing the constant. */
1852 hash += (int) code + (int) GET_MODE (x);
1853 {
1854 int i;
1855 for (i = 2; i < GET_RTX_LENGTH (CONST_DOUBLE); i++)
1856 {
1857 int tem = XINT (x, i);
1858 hash += ((1 << HASHBITS) - 1) & (tem + (tem >> HASHBITS));
1859 }
1860 }
1861 return hash;
1862
1863 /* Assume there is only one rtx object for any given label. */
1864 case LABEL_REF:
1865 /* Use `and' to ensure a positive number. */
1866 return (hash + ((HOST_WIDE_INT) LABEL_REF << 7)
1867 + ((HOST_WIDE_INT) XEXP (x, 0) & ((1 << HASHBITS) - 1)));
1868
1869 case SYMBOL_REF:
1870 return (hash + ((HOST_WIDE_INT) SYMBOL_REF << 7)
1871 + ((HOST_WIDE_INT) XEXP (x, 0) & ((1 << HASHBITS) - 1)));
1872
1873 case MEM:
1874 if (MEM_VOLATILE_P (x))
1875 {
1876 do_not_record = 1;
1877 return 0;
1878 }
1879 if (! RTX_UNCHANGING_P (x))
1880 {
1881 hash_arg_in_memory = 1;
1882 if (MEM_IN_STRUCT_P (x)) hash_arg_in_struct = 1;
1883 }
1884 /* Now that we have already found this special case,
1885 might as well speed it up as much as possible. */
1886 hash += (int) MEM;
1887 x = XEXP (x, 0);
1888 goto repeat;
1889
1890 case PRE_DEC:
1891 case PRE_INC:
1892 case POST_DEC:
1893 case POST_INC:
1894 case PC:
1895 case CC0:
1896 case CALL:
1897 case UNSPEC_VOLATILE:
1898 do_not_record = 1;
1899 return 0;
1900
1901 case ASM_OPERANDS:
1902 if (MEM_VOLATILE_P (x))
1903 {
1904 do_not_record = 1;
1905 return 0;
1906 }
1907 }
1908
1909 i = GET_RTX_LENGTH (code) - 1;
1910 hash += (int) code + (int) GET_MODE (x);
1911 fmt = GET_RTX_FORMAT (code);
1912 for (; i >= 0; i--)
1913 {
1914 if (fmt[i] == 'e')
1915 {
1916 rtx tem = XEXP (x, i);
1917 rtx tem1;
1918
1919 /* If the operand is a REG that is equivalent to a constant, hash
1920 as if we were hashing the constant, since we will be comparing
1921 that way. */
1922 if (tem != 0 && GET_CODE (tem) == REG
1923 && REGNO_QTY_VALID_P (REGNO (tem))
1924 && qty_mode[reg_qty[REGNO (tem)]] == GET_MODE (tem)
1925 && (tem1 = qty_const[reg_qty[REGNO (tem)]]) != 0
1926 && CONSTANT_P (tem1))
1927 tem = tem1;
1928
1929 /* If we are about to do the last recursive call
1930 needed at this level, change it into iteration.
1931 This function is called enough to be worth it. */
1932 if (i == 0)
1933 {
1934 x = tem;
1935 goto repeat;
1936 }
1937 hash += canon_hash (tem, 0);
1938 }
1939 else if (fmt[i] == 'E')
1940 for (j = 0; j < XVECLEN (x, i); j++)
1941 hash += canon_hash (XVECEXP (x, i, j), 0);
1942 else if (fmt[i] == 's')
1943 {
1944 register char *p = XSTR (x, i);
1945 if (p)
1946 while (*p)
1947 {
1948 register int tem = *p++;
1949 hash += ((1 << HASHBITS) - 1) & (tem + (tem >> HASHBITS));
1950 }
1951 }
1952 else if (fmt[i] == 'i')
1953 {
1954 register int tem = XINT (x, i);
1955 hash += ((1 << HASHBITS) - 1) & (tem + (tem >> HASHBITS));
1956 }
1957 else
1958 abort ();
1959 }
1960 return hash;
1961 }
1962
1963 /* Like canon_hash but with no side effects. */
1964
1965 static int
1966 safe_hash (x, mode)
1967 rtx x;
1968 enum machine_mode mode;
1969 {
1970 int save_do_not_record = do_not_record;
1971 int save_hash_arg_in_memory = hash_arg_in_memory;
1972 int save_hash_arg_in_struct = hash_arg_in_struct;
1973 int hash = canon_hash (x, mode);
1974 hash_arg_in_memory = save_hash_arg_in_memory;
1975 hash_arg_in_struct = save_hash_arg_in_struct;
1976 do_not_record = save_do_not_record;
1977 return hash;
1978 }
1979 \f
1980 /* Return 1 iff X and Y would canonicalize into the same thing,
1981 without actually constructing the canonicalization of either one.
1982 If VALIDATE is nonzero,
1983 we assume X is an expression being processed from the rtl
1984 and Y was found in the hash table. We check register refs
1985 in Y for being marked as valid.
1986
1987 If EQUAL_VALUES is nonzero, we allow a register to match a constant value
1988 that is known to be in the register. Ordinarily, we don't allow them
1989 to match, because letting them match would cause unpredictable results
1990 in all the places that search a hash table chain for an equivalent
1991 for a given value. A possible equivalent that has different structure
1992 has its hash code computed from different data. Whether the hash code
1993 is the same as that of the the given value is pure luck. */
1994
1995 static int
1996 exp_equiv_p (x, y, validate, equal_values)
1997 rtx x, y;
1998 int validate;
1999 int equal_values;
2000 {
2001 register int i, j;
2002 register enum rtx_code code;
2003 register char *fmt;
2004
2005 /* Note: it is incorrect to assume an expression is equivalent to itself
2006 if VALIDATE is nonzero. */
2007 if (x == y && !validate)
2008 return 1;
2009 if (x == 0 || y == 0)
2010 return x == y;
2011
2012 code = GET_CODE (x);
2013 if (code != GET_CODE (y))
2014 {
2015 if (!equal_values)
2016 return 0;
2017
2018 /* If X is a constant and Y is a register or vice versa, they may be
2019 equivalent. We only have to validate if Y is a register. */
2020 if (CONSTANT_P (x) && GET_CODE (y) == REG
2021 && REGNO_QTY_VALID_P (REGNO (y))
2022 && GET_MODE (y) == qty_mode[reg_qty[REGNO (y)]]
2023 && rtx_equal_p (x, qty_const[reg_qty[REGNO (y)]])
2024 && (! validate || reg_in_table[REGNO (y)] == reg_tick[REGNO (y)]))
2025 return 1;
2026
2027 if (CONSTANT_P (y) && code == REG
2028 && REGNO_QTY_VALID_P (REGNO (x))
2029 && GET_MODE (x) == qty_mode[reg_qty[REGNO (x)]]
2030 && rtx_equal_p (y, qty_const[reg_qty[REGNO (x)]]))
2031 return 1;
2032
2033 return 0;
2034 }
2035
2036 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2037 if (GET_MODE (x) != GET_MODE (y))
2038 return 0;
2039
2040 switch (code)
2041 {
2042 case PC:
2043 case CC0:
2044 return x == y;
2045
2046 case CONST_INT:
2047 return INTVAL (x) == INTVAL (y);
2048
2049 case LABEL_REF:
2050 case SYMBOL_REF:
2051 return XEXP (x, 0) == XEXP (y, 0);
2052
2053 case REG:
2054 {
2055 int regno = REGNO (y);
2056 int endregno
2057 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
2058 : HARD_REGNO_NREGS (regno, GET_MODE (y)));
2059 int i;
2060
2061 /* If the quantities are not the same, the expressions are not
2062 equivalent. If there are and we are not to validate, they
2063 are equivalent. Otherwise, ensure all regs are up-to-date. */
2064
2065 if (reg_qty[REGNO (x)] != reg_qty[regno])
2066 return 0;
2067
2068 if (! validate)
2069 return 1;
2070
2071 for (i = regno; i < endregno; i++)
2072 if (reg_in_table[i] != reg_tick[i])
2073 return 0;
2074
2075 return 1;
2076 }
2077
2078 /* For commutative operations, check both orders. */
2079 case PLUS:
2080 case MULT:
2081 case AND:
2082 case IOR:
2083 case XOR:
2084 case NE:
2085 case EQ:
2086 return ((exp_equiv_p (XEXP (x, 0), XEXP (y, 0), validate, equal_values)
2087 && exp_equiv_p (XEXP (x, 1), XEXP (y, 1),
2088 validate, equal_values))
2089 || (exp_equiv_p (XEXP (x, 0), XEXP (y, 1),
2090 validate, equal_values)
2091 && exp_equiv_p (XEXP (x, 1), XEXP (y, 0),
2092 validate, equal_values)));
2093 }
2094
2095 /* Compare the elements. If any pair of corresponding elements
2096 fail to match, return 0 for the whole things. */
2097
2098 fmt = GET_RTX_FORMAT (code);
2099 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2100 {
2101 switch (fmt[i])
2102 {
2103 case 'e':
2104 if (! exp_equiv_p (XEXP (x, i), XEXP (y, i), validate, equal_values))
2105 return 0;
2106 break;
2107
2108 case 'E':
2109 if (XVECLEN (x, i) != XVECLEN (y, i))
2110 return 0;
2111 for (j = 0; j < XVECLEN (x, i); j++)
2112 if (! exp_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j),
2113 validate, equal_values))
2114 return 0;
2115 break;
2116
2117 case 's':
2118 if (strcmp (XSTR (x, i), XSTR (y, i)))
2119 return 0;
2120 break;
2121
2122 case 'i':
2123 if (XINT (x, i) != XINT (y, i))
2124 return 0;
2125 break;
2126
2127 case 'w':
2128 if (XWINT (x, i) != XWINT (y, i))
2129 return 0;
2130 break;
2131
2132 case '0':
2133 break;
2134
2135 default:
2136 abort ();
2137 }
2138 }
2139
2140 return 1;
2141 }
2142 \f
2143 /* Return 1 iff any subexpression of X matches Y.
2144 Here we do not require that X or Y be valid (for registers referred to)
2145 for being in the hash table. */
2146
2147 static int
2148 refers_to_p (x, y)
2149 rtx x, y;
2150 {
2151 register int i;
2152 register enum rtx_code code;
2153 register char *fmt;
2154
2155 repeat:
2156 if (x == y)
2157 return 1;
2158 if (x == 0 || y == 0)
2159 return 0;
2160
2161 code = GET_CODE (x);
2162 /* If X as a whole has the same code as Y, they may match.
2163 If so, return 1. */
2164 if (code == GET_CODE (y))
2165 {
2166 if (exp_equiv_p (x, y, 0, 1))
2167 return 1;
2168 }
2169
2170 /* X does not match, so try its subexpressions. */
2171
2172 fmt = GET_RTX_FORMAT (code);
2173 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2174 if (fmt[i] == 'e')
2175 {
2176 if (i == 0)
2177 {
2178 x = XEXP (x, 0);
2179 goto repeat;
2180 }
2181 else
2182 if (refers_to_p (XEXP (x, i), y))
2183 return 1;
2184 }
2185 else if (fmt[i] == 'E')
2186 {
2187 int j;
2188 for (j = 0; j < XVECLEN (x, i); j++)
2189 if (refers_to_p (XVECEXP (x, i, j), y))
2190 return 1;
2191 }
2192
2193 return 0;
2194 }
2195 \f
2196 /* Given ADDR and SIZE (a memory address, and the size of the memory reference),
2197 set PBASE, PSTART, and PEND which correspond to the base of the address,
2198 the starting offset, and ending offset respectively.
2199
2200 ADDR is known to be a nonvarying address.
2201
2202 cse_address_varies_p returns zero for nonvarying addresses. */
2203
2204 static void
2205 set_nonvarying_address_components (addr, size, pbase, pstart, pend)
2206 rtx addr;
2207 int size;
2208 rtx *pbase;
2209 HOST_WIDE_INT *pstart, *pend;
2210 {
2211 rtx base;
2212 int start, end;
2213
2214 base = addr;
2215 start = 0;
2216 end = 0;
2217
2218 /* Registers with nonvarying addresses usually have constant equivalents;
2219 but the frame pointer register is also possible. */
2220 if (GET_CODE (base) == REG
2221 && qty_const != 0
2222 && REGNO_QTY_VALID_P (REGNO (base))
2223 && qty_mode[reg_qty[REGNO (base)]] == GET_MODE (base)
2224 && qty_const[reg_qty[REGNO (base)]] != 0)
2225 base = qty_const[reg_qty[REGNO (base)]];
2226 else if (GET_CODE (base) == PLUS
2227 && GET_CODE (XEXP (base, 1)) == CONST_INT
2228 && GET_CODE (XEXP (base, 0)) == REG
2229 && qty_const != 0
2230 && REGNO_QTY_VALID_P (REGNO (XEXP (base, 0)))
2231 && (qty_mode[reg_qty[REGNO (XEXP (base, 0))]]
2232 == GET_MODE (XEXP (base, 0)))
2233 && qty_const[reg_qty[REGNO (XEXP (base, 0))]])
2234 {
2235 start = INTVAL (XEXP (base, 1));
2236 base = qty_const[reg_qty[REGNO (XEXP (base, 0))]];
2237 }
2238
2239 /* By definition, operand1 of a LO_SUM is the associated constant
2240 address. Use the associated constant address as the base instead. */
2241 if (GET_CODE (base) == LO_SUM)
2242 base = XEXP (base, 1);
2243
2244 /* Strip off CONST. */
2245 if (GET_CODE (base) == CONST)
2246 base = XEXP (base, 0);
2247
2248 if (GET_CODE (base) == PLUS
2249 && GET_CODE (XEXP (base, 1)) == CONST_INT)
2250 {
2251 start += INTVAL (XEXP (base, 1));
2252 base = XEXP (base, 0);
2253 }
2254
2255 end = start + size;
2256
2257 /* Set the return values. */
2258 *pbase = base;
2259 *pstart = start;
2260 *pend = end;
2261 }
2262
2263 /* Return 1 iff any subexpression of X refers to memory
2264 at an address of BASE plus some offset
2265 such that any of the bytes' offsets fall between START (inclusive)
2266 and END (exclusive).
2267
2268 The value is undefined if X is a varying address (as determined by
2269 cse_rtx_addr_varies_p). This function is not used in such cases.
2270
2271 When used in the cse pass, `qty_const' is nonzero, and it is used
2272 to treat an address that is a register with a known constant value
2273 as if it were that constant value.
2274 In the loop pass, `qty_const' is zero, so this is not done. */
2275
2276 static int
2277 refers_to_mem_p (x, base, start, end)
2278 rtx x, base;
2279 HOST_WIDE_INT start, end;
2280 {
2281 register HOST_WIDE_INT i;
2282 register enum rtx_code code;
2283 register char *fmt;
2284
2285 if (GET_CODE (base) == CONST_INT)
2286 {
2287 start += INTVAL (base);
2288 end += INTVAL (base);
2289 base = const0_rtx;
2290 }
2291
2292 repeat:
2293 if (x == 0)
2294 return 0;
2295
2296 code = GET_CODE (x);
2297 if (code == MEM)
2298 {
2299 register rtx addr = XEXP (x, 0); /* Get the address. */
2300 rtx mybase;
2301 HOST_WIDE_INT mystart, myend;
2302
2303 set_nonvarying_address_components (addr, GET_MODE_SIZE (GET_MODE (x)),
2304 &mybase, &mystart, &myend);
2305
2306
2307 /* refers_to_mem_p is never called with varying addresses.
2308 If the base addresses are not equal, there is no chance
2309 of the memory addresses conflicting. */
2310 if (! rtx_equal_p (mybase, base))
2311 return 0;
2312
2313 return myend > start && mystart < end;
2314 }
2315
2316 /* X does not match, so try its subexpressions. */
2317
2318 fmt = GET_RTX_FORMAT (code);
2319 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2320 if (fmt[i] == 'e')
2321 {
2322 if (i == 0)
2323 {
2324 x = XEXP (x, 0);
2325 goto repeat;
2326 }
2327 else
2328 if (refers_to_mem_p (XEXP (x, i), base, start, end))
2329 return 1;
2330 }
2331 else if (fmt[i] == 'E')
2332 {
2333 int j;
2334 for (j = 0; j < XVECLEN (x, i); j++)
2335 if (refers_to_mem_p (XVECEXP (x, i, j), base, start, end))
2336 return 1;
2337 }
2338
2339 return 0;
2340 }
2341
2342 /* Nonzero if X refers to memory at a varying address;
2343 except that a register which has at the moment a known constant value
2344 isn't considered variable. */
2345
2346 static int
2347 cse_rtx_addr_varies_p (x)
2348 rtx x;
2349 {
2350 /* We need not check for X and the equivalence class being of the same
2351 mode because if X is equivalent to a constant in some mode, it
2352 doesn't vary in any mode. */
2353
2354 if (GET_CODE (x) == MEM
2355 && GET_CODE (XEXP (x, 0)) == REG
2356 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))
2357 && GET_MODE (XEXP (x, 0)) == qty_mode[reg_qty[REGNO (XEXP (x, 0))]]
2358 && qty_const[reg_qty[REGNO (XEXP (x, 0))]] != 0)
2359 return 0;
2360
2361 if (GET_CODE (x) == MEM
2362 && GET_CODE (XEXP (x, 0)) == PLUS
2363 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2364 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
2365 && REGNO_QTY_VALID_P (REGNO (XEXP (XEXP (x, 0), 0)))
2366 && (GET_MODE (XEXP (XEXP (x, 0), 0))
2367 == qty_mode[reg_qty[REGNO (XEXP (XEXP (x, 0), 0))]])
2368 && qty_const[reg_qty[REGNO (XEXP (XEXP (x, 0), 0))]])
2369 return 0;
2370
2371 return rtx_addr_varies_p (x);
2372 }
2373 \f
2374 /* Canonicalize an expression:
2375 replace each register reference inside it
2376 with the "oldest" equivalent register.
2377
2378 If INSN is non-zero and we are replacing a pseudo with a hard register
2379 or vice versa, validate_change is used to ensure that INSN remains valid
2380 after we make our substitution. The calls are made with IN_GROUP non-zero
2381 so apply_change_group must be called upon the outermost return from this
2382 function (unless INSN is zero). The result of apply_change_group can
2383 generally be discarded since the changes we are making are optional. */
2384
2385 static rtx
2386 canon_reg (x, insn)
2387 rtx x;
2388 rtx insn;
2389 {
2390 register int i;
2391 register enum rtx_code code;
2392 register char *fmt;
2393
2394 if (x == 0)
2395 return x;
2396
2397 code = GET_CODE (x);
2398 switch (code)
2399 {
2400 case PC:
2401 case CC0:
2402 case CONST:
2403 case CONST_INT:
2404 case CONST_DOUBLE:
2405 case SYMBOL_REF:
2406 case LABEL_REF:
2407 case ADDR_VEC:
2408 case ADDR_DIFF_VEC:
2409 return x;
2410
2411 case REG:
2412 {
2413 register int first;
2414
2415 /* Never replace a hard reg, because hard regs can appear
2416 in more than one machine mode, and we must preserve the mode
2417 of each occurrence. Also, some hard regs appear in
2418 MEMs that are shared and mustn't be altered. Don't try to
2419 replace any reg that maps to a reg of class NO_REGS. */
2420 if (REGNO (x) < FIRST_PSEUDO_REGISTER
2421 || ! REGNO_QTY_VALID_P (REGNO (x)))
2422 return x;
2423
2424 first = qty_first_reg[reg_qty[REGNO (x)]];
2425 return (first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
2426 : REGNO_REG_CLASS (first) == NO_REGS ? x
2427 : gen_rtx (REG, qty_mode[reg_qty[REGNO (x)]], first));
2428 }
2429 }
2430
2431 fmt = GET_RTX_FORMAT (code);
2432 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2433 {
2434 register int j;
2435
2436 if (fmt[i] == 'e')
2437 {
2438 rtx new = canon_reg (XEXP (x, i), insn);
2439
2440 /* If replacing pseudo with hard reg or vice versa, ensure the
2441 insn remains valid. Likewise if the insn has MATCH_DUPs. */
2442 if (insn != 0 && new != 0
2443 && GET_CODE (new) == REG && GET_CODE (XEXP (x, i)) == REG
2444 && (((REGNO (new) < FIRST_PSEUDO_REGISTER)
2445 != (REGNO (XEXP (x, i)) < FIRST_PSEUDO_REGISTER))
2446 || insn_n_dups[recog_memoized (insn)] > 0))
2447 validate_change (insn, &XEXP (x, i), new, 1);
2448 else
2449 XEXP (x, i) = new;
2450 }
2451 else if (fmt[i] == 'E')
2452 for (j = 0; j < XVECLEN (x, i); j++)
2453 XVECEXP (x, i, j) = canon_reg (XVECEXP (x, i, j), insn);
2454 }
2455
2456 return x;
2457 }
2458 \f
2459 /* LOC is a location with INSN that is an operand address (the contents of
2460 a MEM). Find the best equivalent address to use that is valid for this
2461 insn.
2462
2463 On most CISC machines, complicated address modes are costly, and rtx_cost
2464 is a good approximation for that cost. However, most RISC machines have
2465 only a few (usually only one) memory reference formats. If an address is
2466 valid at all, it is often just as cheap as any other address. Hence, for
2467 RISC machines, we use the configuration macro `ADDRESS_COST' to compare the
2468 costs of various addresses. For two addresses of equal cost, choose the one
2469 with the highest `rtx_cost' value as that has the potential of eliminating
2470 the most insns. For equal costs, we choose the first in the equivalence
2471 class. Note that we ignore the fact that pseudo registers are cheaper
2472 than hard registers here because we would also prefer the pseudo registers.
2473 */
2474
2475 static void
2476 find_best_addr (insn, loc)
2477 rtx insn;
2478 rtx *loc;
2479 {
2480 struct table_elt *elt, *p;
2481 rtx addr = *loc;
2482 int our_cost;
2483 int found_better = 1;
2484 int save_do_not_record = do_not_record;
2485 int save_hash_arg_in_memory = hash_arg_in_memory;
2486 int save_hash_arg_in_struct = hash_arg_in_struct;
2487 int hash_code;
2488 int addr_volatile;
2489 int regno;
2490
2491 /* Do not try to replace constant addresses or addresses of local and
2492 argument slots. These MEM expressions are made only once and inserted
2493 in many instructions, as well as being used to control symbol table
2494 output. It is not safe to clobber them.
2495
2496 There are some uncommon cases where the address is already in a register
2497 for some reason, but we cannot take advantage of that because we have
2498 no easy way to unshare the MEM. In addition, looking up all stack
2499 addresses is costly. */
2500 if ((GET_CODE (addr) == PLUS
2501 && GET_CODE (XEXP (addr, 0)) == REG
2502 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2503 && (regno = REGNO (XEXP (addr, 0)),
2504 regno == FRAME_POINTER_REGNUM || regno == ARG_POINTER_REGNUM))
2505 || (GET_CODE (addr) == REG
2506 && (regno = REGNO (addr),
2507 regno == FRAME_POINTER_REGNUM || regno == ARG_POINTER_REGNUM))
2508 || CONSTANT_ADDRESS_P (addr))
2509 return;
2510
2511 /* If this address is not simply a register, try to fold it. This will
2512 sometimes simplify the expression. Many simplifications
2513 will not be valid, but some, usually applying the associative rule, will
2514 be valid and produce better code. */
2515 if (GET_CODE (addr) != REG
2516 && validate_change (insn, loc, fold_rtx (addr, insn), 0))
2517 addr = *loc;
2518
2519 /* If this address is not in the hash table, we can't look for equivalences
2520 of the whole address. Also, ignore if volatile. */
2521
2522 do_not_record = 0;
2523 hash_code = HASH (addr, Pmode);
2524 addr_volatile = do_not_record;
2525 do_not_record = save_do_not_record;
2526 hash_arg_in_memory = save_hash_arg_in_memory;
2527 hash_arg_in_struct = save_hash_arg_in_struct;
2528
2529 if (addr_volatile)
2530 return;
2531
2532 elt = lookup (addr, hash_code, Pmode);
2533
2534 #ifndef ADDRESS_COST
2535 if (elt)
2536 {
2537 our_cost = elt->cost;
2538
2539 /* Find the lowest cost below ours that works. */
2540 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
2541 if (elt->cost < our_cost
2542 && (GET_CODE (elt->exp) == REG
2543 || exp_equiv_p (elt->exp, elt->exp, 1, 0))
2544 && validate_change (insn, loc,
2545 canon_reg (copy_rtx (elt->exp), NULL_RTX), 0))
2546 return;
2547 }
2548 #else
2549
2550 if (elt)
2551 {
2552 /* We need to find the best (under the criteria documented above) entry
2553 in the class that is valid. We use the `flag' field to indicate
2554 choices that were invalid and iterate until we can't find a better
2555 one that hasn't already been tried. */
2556
2557 for (p = elt->first_same_value; p; p = p->next_same_value)
2558 p->flag = 0;
2559
2560 while (found_better)
2561 {
2562 int best_addr_cost = ADDRESS_COST (*loc);
2563 int best_rtx_cost = (elt->cost + 1) >> 1;
2564 struct table_elt *best_elt = elt;
2565
2566 found_better = 0;
2567 for (p = elt->first_same_value; p; p = p->next_same_value)
2568 if (! p->flag
2569 && (GET_CODE (p->exp) == REG
2570 || exp_equiv_p (p->exp, p->exp, 1, 0))
2571 && (ADDRESS_COST (p->exp) < best_addr_cost
2572 || (ADDRESS_COST (p->exp) == best_addr_cost
2573 && (p->cost + 1) >> 1 > best_rtx_cost)))
2574 {
2575 found_better = 1;
2576 best_addr_cost = ADDRESS_COST (p->exp);
2577 best_rtx_cost = (p->cost + 1) >> 1;
2578 best_elt = p;
2579 }
2580
2581 if (found_better)
2582 {
2583 if (validate_change (insn, loc,
2584 canon_reg (copy_rtx (best_elt->exp),
2585 NULL_RTX), 0))
2586 return;
2587 else
2588 best_elt->flag = 1;
2589 }
2590 }
2591 }
2592
2593 /* If the address is a binary operation with the first operand a register
2594 and the second a constant, do the same as above, but looking for
2595 equivalences of the register. Then try to simplify before checking for
2596 the best address to use. This catches a few cases: First is when we
2597 have REG+const and the register is another REG+const. We can often merge
2598 the constants and eliminate one insn and one register. It may also be
2599 that a machine has a cheap REG+REG+const. Finally, this improves the
2600 code on the Alpha for unaligned byte stores. */
2601
2602 if (flag_expensive_optimizations
2603 && (GET_RTX_CLASS (GET_CODE (*loc)) == '2'
2604 || GET_RTX_CLASS (GET_CODE (*loc)) == 'c')
2605 && GET_CODE (XEXP (*loc, 0)) == REG
2606 && GET_CODE (XEXP (*loc, 1)) == CONST_INT)
2607 {
2608 rtx c = XEXP (*loc, 1);
2609
2610 do_not_record = 0;
2611 hash_code = HASH (XEXP (*loc, 0), Pmode);
2612 do_not_record = save_do_not_record;
2613 hash_arg_in_memory = save_hash_arg_in_memory;
2614 hash_arg_in_struct = save_hash_arg_in_struct;
2615
2616 elt = lookup (XEXP (*loc, 0), hash_code, Pmode);
2617 if (elt == 0)
2618 return;
2619
2620 /* We need to find the best (under the criteria documented above) entry
2621 in the class that is valid. We use the `flag' field to indicate
2622 choices that were invalid and iterate until we can't find a better
2623 one that hasn't already been tried. */
2624
2625 for (p = elt->first_same_value; p; p = p->next_same_value)
2626 p->flag = 0;
2627
2628 while (found_better)
2629 {
2630 int best_addr_cost = ADDRESS_COST (*loc);
2631 int best_rtx_cost = (COST (*loc) + 1) >> 1;
2632 struct table_elt *best_elt = elt;
2633 rtx best_rtx = *loc;
2634
2635 found_better = 0;
2636 for (p = elt->first_same_value; p; p = p->next_same_value)
2637 if (! p->flag
2638 && (GET_CODE (p->exp) == REG
2639 || exp_equiv_p (p->exp, p->exp, 1, 0)))
2640 {
2641 rtx new = cse_gen_binary (GET_CODE (*loc), Pmode, p->exp, c);
2642
2643 if ((ADDRESS_COST (new) < best_addr_cost
2644 || (ADDRESS_COST (new) == best_addr_cost
2645 && (COST (new) + 1) >> 1 > best_rtx_cost)))
2646 {
2647 found_better = 1;
2648 best_addr_cost = ADDRESS_COST (new);
2649 best_rtx_cost = (COST (new) + 1) >> 1;
2650 best_elt = p;
2651 best_rtx = new;
2652 }
2653 }
2654
2655 if (found_better)
2656 {
2657 if (validate_change (insn, loc,
2658 canon_reg (copy_rtx (best_rtx),
2659 NULL_RTX), 0))
2660 return;
2661 else
2662 best_elt->flag = 1;
2663 }
2664 }
2665 }
2666 #endif
2667 }
2668 \f
2669 /* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
2670 operation (EQ, NE, GT, etc.), follow it back through the hash table and
2671 what values are being compared.
2672
2673 *PARG1 and *PARG2 are updated to contain the rtx representing the values
2674 actually being compared. For example, if *PARG1 was (cc0) and *PARG2
2675 was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
2676 compared to produce cc0.
2677
2678 The return value is the comparison operator and is either the code of
2679 A or the code corresponding to the inverse of the comparison. */
2680
2681 static enum rtx_code
2682 find_comparison_args (code, parg1, parg2, pmode1, pmode2)
2683 enum rtx_code code;
2684 rtx *parg1, *parg2;
2685 enum machine_mode *pmode1, *pmode2;
2686 {
2687 rtx arg1, arg2;
2688
2689 arg1 = *parg1, arg2 = *parg2;
2690
2691 /* If ARG2 is const0_rtx, see what ARG1 is equivalent to. */
2692
2693 while (arg2 == CONST0_RTX (GET_MODE (arg1)))
2694 {
2695 /* Set non-zero when we find something of interest. */
2696 rtx x = 0;
2697 int reverse_code = 0;
2698 struct table_elt *p = 0;
2699
2700 /* If arg1 is a COMPARE, extract the comparison arguments from it.
2701 On machines with CC0, this is the only case that can occur, since
2702 fold_rtx will return the COMPARE or item being compared with zero
2703 when given CC0. */
2704
2705 if (GET_CODE (arg1) == COMPARE && arg2 == const0_rtx)
2706 x = arg1;
2707
2708 /* If ARG1 is a comparison operator and CODE is testing for
2709 STORE_FLAG_VALUE, get the inner arguments. */
2710
2711 else if (GET_RTX_CLASS (GET_CODE (arg1)) == '<')
2712 {
2713 if (code == NE
2714 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
2715 && code == LT && STORE_FLAG_VALUE == -1)
2716 #ifdef FLOAT_STORE_FLAG_VALUE
2717 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
2718 && FLOAT_STORE_FLAG_VALUE < 0)
2719 #endif
2720 )
2721 x = arg1;
2722 else if (code == EQ
2723 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
2724 && code == GE && STORE_FLAG_VALUE == -1)
2725 #ifdef FLOAT_STORE_FLAG_VALUE
2726 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
2727 && FLOAT_STORE_FLAG_VALUE < 0)
2728 #endif
2729 )
2730 x = arg1, reverse_code = 1;
2731 }
2732
2733 /* ??? We could also check for
2734
2735 (ne (and (eq (...) (const_int 1))) (const_int 0))
2736
2737 and related forms, but let's wait until we see them occurring. */
2738
2739 if (x == 0)
2740 /* Look up ARG1 in the hash table and see if it has an equivalence
2741 that lets us see what is being compared. */
2742 p = lookup (arg1, safe_hash (arg1, GET_MODE (arg1)) % NBUCKETS,
2743 GET_MODE (arg1));
2744 if (p) p = p->first_same_value;
2745
2746 for (; p; p = p->next_same_value)
2747 {
2748 enum machine_mode inner_mode = GET_MODE (p->exp);
2749
2750 /* If the entry isn't valid, skip it. */
2751 if (! exp_equiv_p (p->exp, p->exp, 1, 0))
2752 continue;
2753
2754 if (GET_CODE (p->exp) == COMPARE
2755 /* Another possibility is that this machine has a compare insn
2756 that includes the comparison code. In that case, ARG1 would
2757 be equivalent to a comparison operation that would set ARG1 to
2758 either STORE_FLAG_VALUE or zero. If this is an NE operation,
2759 ORIG_CODE is the actual comparison being done; if it is an EQ,
2760 we must reverse ORIG_CODE. On machine with a negative value
2761 for STORE_FLAG_VALUE, also look at LT and GE operations. */
2762 || ((code == NE
2763 || (code == LT
2764 && GET_MODE_CLASS (inner_mode) == MODE_INT
2765 && (GET_MODE_BITSIZE (inner_mode)
2766 <= HOST_BITS_PER_WIDE_INT)
2767 && (STORE_FLAG_VALUE
2768 & ((HOST_WIDE_INT) 1
2769 << (GET_MODE_BITSIZE (inner_mode) - 1))))
2770 #ifdef FLOAT_STORE_FLAG_VALUE
2771 || (code == LT
2772 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
2773 && FLOAT_STORE_FLAG_VALUE < 0)
2774 #endif
2775 )
2776 && GET_RTX_CLASS (GET_CODE (p->exp)) == '<'))
2777 {
2778 x = p->exp;
2779 break;
2780 }
2781 else if ((code == EQ
2782 || (code == GE
2783 && GET_MODE_CLASS (inner_mode) == MODE_INT
2784 && (GET_MODE_BITSIZE (inner_mode)
2785 <= HOST_BITS_PER_WIDE_INT)
2786 && (STORE_FLAG_VALUE
2787 & ((HOST_WIDE_INT) 1
2788 << (GET_MODE_BITSIZE (inner_mode) - 1))))
2789 #ifdef FLOAT_STORE_FLAG_VALUE
2790 || (code == GE
2791 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
2792 && FLOAT_STORE_FLAG_VALUE < 0)
2793 #endif
2794 )
2795 && GET_RTX_CLASS (GET_CODE (p->exp)) == '<')
2796 {
2797 reverse_code = 1;
2798 x = p->exp;
2799 break;
2800 }
2801
2802 /* If this is fp + constant, the equivalent is a better operand since
2803 it may let us predict the value of the comparison. */
2804 else if (NONZERO_BASE_PLUS_P (p->exp))
2805 {
2806 arg1 = p->exp;
2807 continue;
2808 }
2809 }
2810
2811 /* If we didn't find a useful equivalence for ARG1, we are done.
2812 Otherwise, set up for the next iteration. */
2813 if (x == 0)
2814 break;
2815
2816 arg1 = XEXP (x, 0), arg2 = XEXP (x, 1);
2817 if (GET_RTX_CLASS (GET_CODE (x)) == '<')
2818 code = GET_CODE (x);
2819
2820 if (reverse_code)
2821 code = reverse_condition (code);
2822 }
2823
2824 /* Return our results. Return the modes from before fold_rtx
2825 because fold_rtx might produce const_int, and then it's too late. */
2826 *pmode1 = GET_MODE (arg1), *pmode2 = GET_MODE (arg2);
2827 *parg1 = fold_rtx (arg1, 0), *parg2 = fold_rtx (arg2, 0);
2828
2829 return code;
2830 }
2831 \f
2832 /* Try to simplify a unary operation CODE whose output mode is to be
2833 MODE with input operand OP whose mode was originally OP_MODE.
2834 Return zero if no simplification can be made. */
2835
2836 rtx
2837 simplify_unary_operation (code, mode, op, op_mode)
2838 enum rtx_code code;
2839 enum machine_mode mode;
2840 rtx op;
2841 enum machine_mode op_mode;
2842 {
2843 register int width = GET_MODE_BITSIZE (mode);
2844
2845 /* The order of these tests is critical so that, for example, we don't
2846 check the wrong mode (input vs. output) for a conversion operation,
2847 such as FIX. At some point, this should be simplified. */
2848
2849 #if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
2850 if (code == FLOAT && GET_CODE (op) == CONST_INT)
2851 {
2852 REAL_VALUE_TYPE d;
2853
2854 #ifdef REAL_ARITHMETIC
2855 REAL_VALUE_FROM_INT (d, INTVAL (op), INTVAL (op) < 0 ? ~0 : 0);
2856 #else
2857 d = (double) INTVAL (op);
2858 #endif
2859 return CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
2860 }
2861 else if (code == UNSIGNED_FLOAT && GET_CODE (op) == CONST_INT)
2862 {
2863 REAL_VALUE_TYPE d;
2864
2865 #ifdef REAL_ARITHMETIC
2866 REAL_VALUE_FROM_INT (d, INTVAL (op), 0);
2867 #else
2868 d = (double) (unsigned int) INTVAL (op);
2869 #endif
2870 return CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
2871 }
2872
2873 else if (code == FLOAT && GET_CODE (op) == CONST_DOUBLE
2874 && GET_MODE (op) == VOIDmode)
2875 {
2876 REAL_VALUE_TYPE d;
2877
2878 #ifdef REAL_ARITHMETIC
2879 REAL_VALUE_FROM_INT (d, CONST_DOUBLE_LOW (op), CONST_DOUBLE_HIGH (op));
2880 #else
2881 if (CONST_DOUBLE_HIGH (op) < 0)
2882 {
2883 d = (double) (~ CONST_DOUBLE_HIGH (op));
2884 d *= ((double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2))
2885 * (double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)));
2886 d += (double) (unsigned HOST_WIDE_INT) (~ CONST_DOUBLE_LOW (op));
2887 d = (- d - 1.0);
2888 }
2889 else
2890 {
2891 d = (double) CONST_DOUBLE_HIGH (op);
2892 d *= ((double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2))
2893 * (double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)));
2894 d += (double) (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (op);
2895 }
2896 #endif /* REAL_ARITHMETIC */
2897 return CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
2898 }
2899 else if (code == UNSIGNED_FLOAT && GET_CODE (op) == CONST_DOUBLE
2900 && GET_MODE (op) == VOIDmode)
2901 {
2902 REAL_VALUE_TYPE d;
2903
2904 #ifdef REAL_ARITHMETIC
2905 REAL_VALUE_FROM_UNSIGNED_INT (d, CONST_DOUBLE_LOW (op),
2906 CONST_DOUBLE_HIGH (op));
2907 #else
2908 d = (double) CONST_DOUBLE_HIGH (op);
2909 d *= ((double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2))
2910 * (double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)));
2911 d += (double) (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (op);
2912 #endif /* REAL_ARITHMETIC */
2913 return CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
2914 }
2915 #endif
2916
2917 if (GET_CODE (op) == CONST_INT
2918 && width <= HOST_BITS_PER_WIDE_INT && width > 0)
2919 {
2920 register HOST_WIDE_INT arg0 = INTVAL (op);
2921 register HOST_WIDE_INT val;
2922
2923 switch (code)
2924 {
2925 case NOT:
2926 val = ~ arg0;
2927 break;
2928
2929 case NEG:
2930 val = - arg0;
2931 break;
2932
2933 case ABS:
2934 val = (arg0 >= 0 ? arg0 : - arg0);
2935 break;
2936
2937 case FFS:
2938 /* Don't use ffs here. Instead, get low order bit and then its
2939 number. If arg0 is zero, this will return 0, as desired. */
2940 arg0 &= GET_MODE_MASK (mode);
2941 val = exact_log2 (arg0 & (- arg0)) + 1;
2942 break;
2943
2944 case TRUNCATE:
2945 val = arg0;
2946 break;
2947
2948 case ZERO_EXTEND:
2949 if (op_mode == VOIDmode)
2950 op_mode = mode;
2951 if (GET_MODE_BITSIZE (op_mode) == HOST_BITS_PER_WIDE_INT)
2952 {
2953 /* If we were really extending the mode,
2954 we would have to distinguish between zero-extension
2955 and sign-extension. */
2956 if (width != GET_MODE_BITSIZE (op_mode))
2957 abort ();
2958 val = arg0;
2959 }
2960 else if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT)
2961 val = arg0 & ~((HOST_WIDE_INT) (-1) << GET_MODE_BITSIZE (op_mode));
2962 else
2963 return 0;
2964 break;
2965
2966 case SIGN_EXTEND:
2967 if (op_mode == VOIDmode)
2968 op_mode = mode;
2969 if (GET_MODE_BITSIZE (op_mode) == HOST_BITS_PER_WIDE_INT)
2970 {
2971 /* If we were really extending the mode,
2972 we would have to distinguish between zero-extension
2973 and sign-extension. */
2974 if (width != GET_MODE_BITSIZE (op_mode))
2975 abort ();
2976 val = arg0;
2977 }
2978 else if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT)
2979 {
2980 val
2981 = arg0 & ~((HOST_WIDE_INT) (-1) << GET_MODE_BITSIZE (op_mode));
2982 if (val
2983 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (op_mode) - 1)))
2984 val -= (HOST_WIDE_INT) 1 << GET_MODE_BITSIZE (op_mode);
2985 }
2986 else
2987 return 0;
2988 break;
2989
2990 case SQRT:
2991 return 0;
2992
2993 default:
2994 abort ();
2995 }
2996
2997 /* Clear the bits that don't belong in our mode,
2998 unless they and our sign bit are all one.
2999 So we get either a reasonable negative value or a reasonable
3000 unsigned value for this mode. */
3001 if (width < HOST_BITS_PER_WIDE_INT
3002 && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
3003 != ((HOST_WIDE_INT) (-1) << (width - 1))))
3004 val &= (1 << width) - 1;
3005
3006 return GEN_INT (val);
3007 }
3008
3009 /* We can do some operations on integer CONST_DOUBLEs. Also allow
3010 for a DImode operation on a CONST_INT. */
3011 else if (GET_MODE (op) == VOIDmode
3012 && (GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT))
3013 {
3014 HOST_WIDE_INT l1, h1, lv, hv;
3015
3016 if (GET_CODE (op) == CONST_DOUBLE)
3017 l1 = CONST_DOUBLE_LOW (op), h1 = CONST_DOUBLE_HIGH (op);
3018 else
3019 l1 = INTVAL (op), h1 = l1 < 0 ? -1 : 0;
3020
3021 switch (code)
3022 {
3023 case NOT:
3024 lv = ~ l1;
3025 hv = ~ h1;
3026 break;
3027
3028 case NEG:
3029 neg_double (l1, h1, &lv, &hv);
3030 break;
3031
3032 case ABS:
3033 if (h1 < 0)
3034 neg_double (l1, h1, &lv, &hv);
3035 else
3036 lv = l1, hv = h1;
3037 break;
3038
3039 case FFS:
3040 hv = 0;
3041 if (l1 == 0)
3042 lv = HOST_BITS_PER_WIDE_INT + exact_log2 (h1 & (-h1)) + 1;
3043 else
3044 lv = exact_log2 (l1 & (-l1)) + 1;
3045 break;
3046
3047 case TRUNCATE:
3048 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3049 return GEN_INT (l1 & GET_MODE_MASK (mode));
3050 else
3051 return 0;
3052 break;
3053
3054 case ZERO_EXTEND:
3055 if (op_mode == VOIDmode
3056 || GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT)
3057 return 0;
3058
3059 hv = 0;
3060 lv = l1 & GET_MODE_MASK (op_mode);
3061 break;
3062
3063 case SIGN_EXTEND:
3064 if (op_mode == VOIDmode
3065 || GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT)
3066 return 0;
3067 else
3068 {
3069 lv = l1 & GET_MODE_MASK (op_mode);
3070 if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT
3071 && (lv & ((HOST_WIDE_INT) 1
3072 << (GET_MODE_BITSIZE (op_mode) - 1))) != 0)
3073 lv -= (HOST_WIDE_INT) 1 << GET_MODE_BITSIZE (op_mode);
3074
3075 hv = (lv < 0) ? ~ (HOST_WIDE_INT) 0 : 0;
3076 }
3077 break;
3078
3079 case SQRT:
3080 return 0;
3081
3082 default:
3083 return 0;
3084 }
3085
3086 return immed_double_const (lv, hv, mode);
3087 }
3088
3089 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3090 else if (GET_CODE (op) == CONST_DOUBLE
3091 && GET_MODE_CLASS (mode) == MODE_FLOAT)
3092 {
3093 REAL_VALUE_TYPE d;
3094 jmp_buf handler;
3095 rtx x;
3096
3097 if (setjmp (handler))
3098 /* There used to be a warning here, but that is inadvisable.
3099 People may want to cause traps, and the natural way
3100 to do it should not get a warning. */
3101 return 0;
3102
3103 set_float_handler (handler);
3104
3105 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
3106
3107 switch (code)
3108 {
3109 case NEG:
3110 d = REAL_VALUE_NEGATE (d);
3111 break;
3112
3113 case ABS:
3114 if (REAL_VALUE_NEGATIVE (d))
3115 d = REAL_VALUE_NEGATE (d);
3116 break;
3117
3118 case FLOAT_TRUNCATE:
3119 d = (double) real_value_truncate (mode, d);
3120 break;
3121
3122 case FLOAT_EXTEND:
3123 /* All this does is change the mode. */
3124 break;
3125
3126 case FIX:
3127 d = (double) REAL_VALUE_FIX_TRUNCATE (d);
3128 break;
3129
3130 case UNSIGNED_FIX:
3131 d = (double) REAL_VALUE_UNSIGNED_FIX_TRUNCATE (d);
3132 break;
3133
3134 case SQRT:
3135 return 0;
3136
3137 default:
3138 abort ();
3139 }
3140
3141 x = immed_real_const_1 (d, mode);
3142 set_float_handler (NULL_PTR);
3143 return x;
3144 }
3145 else if (GET_CODE (op) == CONST_DOUBLE && GET_MODE_CLASS (mode) == MODE_INT
3146 && width <= HOST_BITS_PER_WIDE_INT && width > 0)
3147 {
3148 REAL_VALUE_TYPE d;
3149 jmp_buf handler;
3150 rtx x;
3151 HOST_WIDE_INT val;
3152
3153 if (setjmp (handler))
3154 return 0;
3155
3156 set_float_handler (handler);
3157
3158 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
3159
3160 switch (code)
3161 {
3162 case FIX:
3163 val = REAL_VALUE_FIX (d);
3164 break;
3165
3166 case UNSIGNED_FIX:
3167 val = REAL_VALUE_UNSIGNED_FIX (d);
3168 break;
3169
3170 default:
3171 abort ();
3172 }
3173
3174 set_float_handler (NULL_PTR);
3175
3176 /* Clear the bits that don't belong in our mode,
3177 unless they and our sign bit are all one.
3178 So we get either a reasonable negative value or a reasonable
3179 unsigned value for this mode. */
3180 if (width < HOST_BITS_PER_WIDE_INT
3181 && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
3182 != ((HOST_WIDE_INT) (-1) << (width - 1))))
3183 val &= ((HOST_WIDE_INT) 1 << width) - 1;
3184
3185 return GEN_INT (val);
3186 }
3187 #endif
3188 /* This was formerly used only for non-IEEE float.
3189 eggert@twinsun.com says it is safe for IEEE also. */
3190 else
3191 {
3192 /* There are some simplifications we can do even if the operands
3193 aren't constant. */
3194 switch (code)
3195 {
3196 case NEG:
3197 case NOT:
3198 /* (not (not X)) == X, similarly for NEG. */
3199 if (GET_CODE (op) == code)
3200 return XEXP (op, 0);
3201 break;
3202
3203 case SIGN_EXTEND:
3204 /* (sign_extend (truncate (minus (label_ref L1) (label_ref L2))))
3205 becomes just the MINUS if its mode is MODE. This allows
3206 folding switch statements on machines using casesi (such as
3207 the Vax). */
3208 if (GET_CODE (op) == TRUNCATE
3209 && GET_MODE (XEXP (op, 0)) == mode
3210 && GET_CODE (XEXP (op, 0)) == MINUS
3211 && GET_CODE (XEXP (XEXP (op, 0), 0)) == LABEL_REF
3212 && GET_CODE (XEXP (XEXP (op, 0), 1)) == LABEL_REF)
3213 return XEXP (op, 0);
3214 break;
3215 }
3216
3217 return 0;
3218 }
3219 }
3220 \f
3221 /* Simplify a binary operation CODE with result mode MODE, operating on OP0
3222 and OP1. Return 0 if no simplification is possible.
3223
3224 Don't use this for relational operations such as EQ or LT.
3225 Use simplify_relational_operation instead. */
3226
3227 rtx
3228 simplify_binary_operation (code, mode, op0, op1)
3229 enum rtx_code code;
3230 enum machine_mode mode;
3231 rtx op0, op1;
3232 {
3233 register HOST_WIDE_INT arg0, arg1, arg0s, arg1s;
3234 HOST_WIDE_INT val;
3235 int width = GET_MODE_BITSIZE (mode);
3236 rtx tem;
3237
3238 /* Relational operations don't work here. We must know the mode
3239 of the operands in order to do the comparison correctly.
3240 Assuming a full word can give incorrect results.
3241 Consider comparing 128 with -128 in QImode. */
3242
3243 if (GET_RTX_CLASS (code) == '<')
3244 abort ();
3245
3246 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3247 if (GET_MODE_CLASS (mode) == MODE_FLOAT
3248 && GET_CODE (op0) == CONST_DOUBLE && GET_CODE (op1) == CONST_DOUBLE
3249 && mode == GET_MODE (op0) && mode == GET_MODE (op1))
3250 {
3251 REAL_VALUE_TYPE f0, f1, value;
3252 jmp_buf handler;
3253
3254 if (setjmp (handler))
3255 return 0;
3256
3257 set_float_handler (handler);
3258
3259 REAL_VALUE_FROM_CONST_DOUBLE (f0, op0);
3260 REAL_VALUE_FROM_CONST_DOUBLE (f1, op1);
3261 f0 = real_value_truncate (mode, f0);
3262 f1 = real_value_truncate (mode, f1);
3263
3264 #ifdef REAL_ARITHMETIC
3265 REAL_ARITHMETIC (value, code, f0, f1);
3266 #else
3267 switch (code)
3268 {
3269 case PLUS:
3270 value = f0 + f1;
3271 break;
3272 case MINUS:
3273 value = f0 - f1;
3274 break;
3275 case MULT:
3276 value = f0 * f1;
3277 break;
3278 case DIV:
3279 #ifndef REAL_INFINITY
3280 if (f1 == 0)
3281 return 0;
3282 #endif
3283 value = f0 / f1;
3284 break;
3285 case SMIN:
3286 value = MIN (f0, f1);
3287 break;
3288 case SMAX:
3289 value = MAX (f0, f1);
3290 break;
3291 default:
3292 abort ();
3293 }
3294 #endif
3295
3296 set_float_handler (NULL_PTR);
3297 value = real_value_truncate (mode, value);
3298 return immed_real_const_1 (value, mode);
3299 }
3300 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
3301
3302 /* We can fold some multi-word operations. */
3303 if (GET_MODE_CLASS (mode) == MODE_INT
3304 && GET_CODE (op0) == CONST_DOUBLE
3305 && (GET_CODE (op1) == CONST_DOUBLE || GET_CODE (op1) == CONST_INT))
3306 {
3307 HOST_WIDE_INT l1, l2, h1, h2, lv, hv;
3308
3309 l1 = CONST_DOUBLE_LOW (op0), h1 = CONST_DOUBLE_HIGH (op0);
3310
3311 if (GET_CODE (op1) == CONST_DOUBLE)
3312 l2 = CONST_DOUBLE_LOW (op1), h2 = CONST_DOUBLE_HIGH (op1);
3313 else
3314 l2 = INTVAL (op1), h2 = l2 < 0 ? -1 : 0;
3315
3316 switch (code)
3317 {
3318 case MINUS:
3319 /* A - B == A + (-B). */
3320 neg_double (l2, h2, &lv, &hv);
3321 l2 = lv, h2 = hv;
3322
3323 /* .. fall through ... */
3324
3325 case PLUS:
3326 add_double (l1, h1, l2, h2, &lv, &hv);
3327 break;
3328
3329 case MULT:
3330 mul_double (l1, h1, l2, h2, &lv, &hv);
3331 break;
3332
3333 case DIV: case MOD: case UDIV: case UMOD:
3334 /* We'd need to include tree.h to do this and it doesn't seem worth
3335 it. */
3336 return 0;
3337
3338 case AND:
3339 lv = l1 & l2, hv = h1 & h2;
3340 break;
3341
3342 case IOR:
3343 lv = l1 | l2, hv = h1 | h2;
3344 break;
3345
3346 case XOR:
3347 lv = l1 ^ l2, hv = h1 ^ h2;
3348 break;
3349
3350 case SMIN:
3351 if (h1 < h2
3352 || (h1 == h2
3353 && ((unsigned HOST_WIDE_INT) l1
3354 < (unsigned HOST_WIDE_INT) l2)))
3355 lv = l1, hv = h1;
3356 else
3357 lv = l2, hv = h2;
3358 break;
3359
3360 case SMAX:
3361 if (h1 > h2
3362 || (h1 == h2
3363 && ((unsigned HOST_WIDE_INT) l1
3364 > (unsigned HOST_WIDE_INT) l2)))
3365 lv = l1, hv = h1;
3366 else
3367 lv = l2, hv = h2;
3368 break;
3369
3370 case UMIN:
3371 if ((unsigned HOST_WIDE_INT) h1 < (unsigned HOST_WIDE_INT) h2
3372 || (h1 == h2
3373 && ((unsigned HOST_WIDE_INT) l1
3374 < (unsigned HOST_WIDE_INT) l2)))
3375 lv = l1, hv = h1;
3376 else
3377 lv = l2, hv = h2;
3378 break;
3379
3380 case UMAX:
3381 if ((unsigned HOST_WIDE_INT) h1 > (unsigned HOST_WIDE_INT) h2
3382 || (h1 == h2
3383 && ((unsigned HOST_WIDE_INT) l1
3384 > (unsigned HOST_WIDE_INT) l2)))
3385 lv = l1, hv = h1;
3386 else
3387 lv = l2, hv = h2;
3388 break;
3389
3390 case LSHIFTRT: case ASHIFTRT:
3391 case ASHIFT: case LSHIFT:
3392 case ROTATE: case ROTATERT:
3393 #ifdef SHIFT_COUNT_TRUNCATED
3394 l2 &= (GET_MODE_BITSIZE (mode) - 1), h2 = 0;
3395 #endif
3396
3397 if (h2 != 0 || l2 < 0 || l2 >= GET_MODE_BITSIZE (mode))
3398 return 0;
3399
3400 if (code == LSHIFTRT || code == ASHIFTRT)
3401 rshift_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv,
3402 code == ASHIFTRT);
3403 else if (code == ASHIFT || code == LSHIFT)
3404 lshift_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv,
3405 code == ASHIFT);
3406 else if (code == ROTATE)
3407 lrotate_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv);
3408 else /* code == ROTATERT */
3409 rrotate_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv);
3410 break;
3411
3412 default:
3413 return 0;
3414 }
3415
3416 return immed_double_const (lv, hv, mode);
3417 }
3418
3419 if (GET_CODE (op0) != CONST_INT || GET_CODE (op1) != CONST_INT
3420 || width > HOST_BITS_PER_WIDE_INT || width == 0)
3421 {
3422 /* Even if we can't compute a constant result,
3423 there are some cases worth simplifying. */
3424
3425 switch (code)
3426 {
3427 case PLUS:
3428 /* In IEEE floating point, x+0 is not the same as x. Similarly
3429 for the other optimizations below. */
3430 if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
3431 && GET_MODE_CLASS (mode) != MODE_INT)
3432 break;
3433
3434 if (op1 == CONST0_RTX (mode))
3435 return op0;
3436
3437 /* ((-a) + b) -> (b - a) and similarly for (a + (-b)) */
3438 if (GET_CODE (op0) == NEG)
3439 return cse_gen_binary (MINUS, mode, op1, XEXP (op0, 0));
3440 else if (GET_CODE (op1) == NEG)
3441 return cse_gen_binary (MINUS, mode, op0, XEXP (op1, 0));
3442
3443 /* Handle both-operands-constant cases. We can only add
3444 CONST_INTs to constants since the sum of relocatable symbols
3445 can't be handled by most assemblers. */
3446
3447 if (CONSTANT_P (op0) && GET_CODE (op1) == CONST_INT)
3448 return plus_constant (op0, INTVAL (op1));
3449 else if (CONSTANT_P (op1) && GET_CODE (op0) == CONST_INT)
3450 return plus_constant (op1, INTVAL (op0));
3451
3452 /* If one of the operands is a PLUS or a MINUS, see if we can
3453 simplify this by the associative law.
3454 Don't use the associative law for floating point.
3455 The inaccuracy makes it nonassociative,
3456 and subtle programs can break if operations are associated. */
3457
3458 if ((GET_MODE_CLASS (mode) == MODE_INT
3459 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
3460 && (GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS
3461 || GET_CODE (op1) == PLUS || GET_CODE (op1) == MINUS)
3462 && (tem = simplify_plus_minus (code, mode, op0, op1)) != 0)
3463 return tem;
3464 break;
3465
3466 case COMPARE:
3467 #ifdef HAVE_cc0
3468 /* Convert (compare FOO (const_int 0)) to FOO unless we aren't
3469 using cc0, in which case we want to leave it as a COMPARE
3470 so we can distinguish it from a register-register-copy.
3471
3472 In IEEE floating point, x-0 is not the same as x. */
3473
3474 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3475 || GET_MODE_CLASS (mode) == MODE_INT)
3476 && op1 == CONST0_RTX (mode))
3477 return op0;
3478 #else
3479 /* Do nothing here. */
3480 #endif
3481 break;
3482
3483 case MINUS:
3484 /* None of these optimizations can be done for IEEE
3485 floating point. */
3486 if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
3487 && GET_MODE_CLASS (mode) != MODE_INT
3488 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
3489 break;
3490
3491 /* We can't assume x-x is 0 even with non-IEEE floating point. */
3492 if (rtx_equal_p (op0, op1)
3493 && ! side_effects_p (op0)
3494 && GET_MODE_CLASS (mode) != MODE_FLOAT
3495 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
3496 return const0_rtx;
3497
3498 /* Change subtraction from zero into negation. */
3499 if (op0 == CONST0_RTX (mode))
3500 return gen_rtx (NEG, mode, op1);
3501
3502 /* (-1 - a) is ~a. */
3503 if (op0 == constm1_rtx)
3504 return gen_rtx (NOT, mode, op1);
3505
3506 /* Subtracting 0 has no effect. */
3507 if (op1 == CONST0_RTX (mode))
3508 return op0;
3509
3510 /* (a - (-b)) -> (a + b). */
3511 if (GET_CODE (op1) == NEG)
3512 return cse_gen_binary (PLUS, mode, op0, XEXP (op1, 0));
3513
3514 /* If one of the operands is a PLUS or a MINUS, see if we can
3515 simplify this by the associative law.
3516 Don't use the associative law for floating point.
3517 The inaccuracy makes it nonassociative,
3518 and subtle programs can break if operations are associated. */
3519
3520 if ((GET_MODE_CLASS (mode) == MODE_INT
3521 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
3522 && (GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS
3523 || GET_CODE (op1) == PLUS || GET_CODE (op1) == MINUS)
3524 && (tem = simplify_plus_minus (code, mode, op0, op1)) != 0)
3525 return tem;
3526
3527 /* Don't let a relocatable value get a negative coeff. */
3528 if (GET_CODE (op1) == CONST_INT)
3529 return plus_constant (op0, - INTVAL (op1));
3530 break;
3531
3532 case MULT:
3533 if (op1 == constm1_rtx)
3534 {
3535 tem = simplify_unary_operation (NEG, mode, op0, mode);
3536
3537 return tem ? tem : gen_rtx (NEG, mode, op0);
3538 }
3539
3540 /* In IEEE floating point, x*0 is not always 0. */
3541 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3542 || GET_MODE_CLASS (mode) == MODE_INT)
3543 && op1 == CONST0_RTX (mode)
3544 && ! side_effects_p (op0))
3545 return op1;
3546
3547 /* In IEEE floating point, x*1 is not equivalent to x for nans.
3548 However, ANSI says we can drop signals,
3549 so we can do this anyway. */
3550 if (op1 == CONST1_RTX (mode))
3551 return op0;
3552
3553 /* Convert multiply by constant power of two into shift. */
3554 if (GET_CODE (op1) == CONST_INT
3555 && (val = exact_log2 (INTVAL (op1))) >= 0)
3556 return gen_rtx (ASHIFT, mode, op0, GEN_INT (val));
3557
3558 if (GET_CODE (op1) == CONST_DOUBLE
3559 && GET_MODE_CLASS (GET_MODE (op1)) == MODE_FLOAT)
3560 {
3561 REAL_VALUE_TYPE d;
3562 REAL_VALUE_FROM_CONST_DOUBLE (d, op1);
3563
3564 /* x*2 is x+x and x*(-1) is -x */
3565 if (REAL_VALUES_EQUAL (d, dconst2)
3566 && GET_MODE (op0) == mode)
3567 return gen_rtx (PLUS, mode, op0, copy_rtx (op0));
3568
3569 else if (REAL_VALUES_EQUAL (d, dconstm1)
3570 && GET_MODE (op0) == mode)
3571 return gen_rtx (NEG, mode, op0);
3572 }
3573 break;
3574
3575 case IOR:
3576 if (op1 == const0_rtx)
3577 return op0;
3578 if (GET_CODE (op1) == CONST_INT
3579 && (INTVAL (op1) & GET_MODE_MASK (mode)) == GET_MODE_MASK (mode))
3580 return op1;
3581 if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
3582 return op0;
3583 /* A | (~A) -> -1 */
3584 if (((GET_CODE (op0) == NOT && rtx_equal_p (XEXP (op0, 0), op1))
3585 || (GET_CODE (op1) == NOT && rtx_equal_p (XEXP (op1, 0), op0)))
3586 && ! side_effects_p (op0))
3587 return constm1_rtx;
3588 break;
3589
3590 case XOR:
3591 if (op1 == const0_rtx)
3592 return op0;
3593 if (GET_CODE (op1) == CONST_INT
3594 && (INTVAL (op1) & GET_MODE_MASK (mode)) == GET_MODE_MASK (mode))
3595 return gen_rtx (NOT, mode, op0);
3596 if (op0 == op1 && ! side_effects_p (op0))
3597 return const0_rtx;
3598 break;
3599
3600 case AND:
3601 if (op1 == const0_rtx && ! side_effects_p (op0))
3602 return const0_rtx;
3603 if (GET_CODE (op1) == CONST_INT
3604 && (INTVAL (op1) & GET_MODE_MASK (mode)) == GET_MODE_MASK (mode))
3605 return op0;
3606 if (op0 == op1 && ! side_effects_p (op0))
3607 return op0;
3608 /* A & (~A) -> 0 */
3609 if (((GET_CODE (op0) == NOT && rtx_equal_p (XEXP (op0, 0), op1))
3610 || (GET_CODE (op1) == NOT && rtx_equal_p (XEXP (op1, 0), op0)))
3611 && ! side_effects_p (op0))
3612 return const0_rtx;
3613 break;
3614
3615 case UDIV:
3616 /* Convert divide by power of two into shift (divide by 1 handled
3617 below). */
3618 if (GET_CODE (op1) == CONST_INT
3619 && (arg1 = exact_log2 (INTVAL (op1))) > 0)
3620 return gen_rtx (LSHIFTRT, mode, op0, GEN_INT (arg1));
3621
3622 /* ... fall through ... */
3623
3624 case DIV:
3625 if (op1 == CONST1_RTX (mode))
3626 return op0;
3627
3628 /* In IEEE floating point, 0/x is not always 0. */
3629 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3630 || GET_MODE_CLASS (mode) == MODE_INT)
3631 && op0 == CONST0_RTX (mode)
3632 && ! side_effects_p (op1))
3633 return op0;
3634
3635 #if 0 /* Turned off till an expert says this is a safe thing to do. */
3636 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3637 /* Change division by a constant into multiplication. */
3638 else if (GET_CODE (op1) == CONST_DOUBLE
3639 && GET_MODE_CLASS (GET_MODE (op1)) == MODE_FLOAT
3640 && op1 != CONST0_RTX (mode))
3641 {
3642 REAL_VALUE_TYPE d;
3643 REAL_VALUE_FROM_CONST_DOUBLE (d, op1);
3644 if (REAL_VALUES_EQUAL (d, dconst0))
3645 abort();
3646 #if defined (REAL_ARITHMETIC)
3647 REAL_ARITHMETIC (d, RDIV_EXPR, dconst1, d);
3648 return gen_rtx (MULT, mode, op0,
3649 CONST_DOUBLE_FROM_REAL_VALUE (d, mode));
3650 #else
3651 return gen_rtx (MULT, mode, op0,
3652 CONST_DOUBLE_FROM_REAL_VALUE (1./d, mode));
3653 }
3654 #endif
3655 #endif
3656 #endif
3657 break;
3658
3659 case UMOD:
3660 /* Handle modulus by power of two (mod with 1 handled below). */
3661 if (GET_CODE (op1) == CONST_INT
3662 && exact_log2 (INTVAL (op1)) > 0)
3663 return gen_rtx (AND, mode, op0, GEN_INT (INTVAL (op1) - 1));
3664
3665 /* ... fall through ... */
3666
3667 case MOD:
3668 if ((op0 == const0_rtx || op1 == const1_rtx)
3669 && ! side_effects_p (op0) && ! side_effects_p (op1))
3670 return const0_rtx;
3671 break;
3672
3673 case ROTATERT:
3674 case ROTATE:
3675 /* Rotating ~0 always results in ~0. */
3676 if (GET_CODE (op0) == CONST_INT && width <= HOST_BITS_PER_WIDE_INT
3677 && INTVAL (op0) == GET_MODE_MASK (mode)
3678 && ! side_effects_p (op1))
3679 return op0;
3680
3681 /* ... fall through ... */
3682
3683 case LSHIFT:
3684 case ASHIFT:
3685 case ASHIFTRT:
3686 case LSHIFTRT:
3687 if (op1 == const0_rtx)
3688 return op0;
3689 if (op0 == const0_rtx && ! side_effects_p (op1))
3690 return op0;
3691 break;
3692
3693 case SMIN:
3694 if (width <= HOST_BITS_PER_WIDE_INT && GET_CODE (op1) == CONST_INT
3695 && INTVAL (op1) == (HOST_WIDE_INT) 1 << (width -1)
3696 && ! side_effects_p (op0))
3697 return op1;
3698 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
3699 return op0;
3700 break;
3701
3702 case SMAX:
3703 if (width <= HOST_BITS_PER_WIDE_INT && GET_CODE (op1) == CONST_INT
3704 && INTVAL (op1) == (unsigned) GET_MODE_MASK (mode) >> 1
3705 && ! side_effects_p (op0))
3706 return op1;
3707 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
3708 return op0;
3709 break;
3710
3711 case UMIN:
3712 if (op1 == const0_rtx && ! side_effects_p (op0))
3713 return op1;
3714 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
3715 return op0;
3716 break;
3717
3718 case UMAX:
3719 if (op1 == constm1_rtx && ! side_effects_p (op0))
3720 return op1;
3721 else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
3722 return op0;
3723 break;
3724
3725 default:
3726 abort ();
3727 }
3728
3729 return 0;
3730 }
3731
3732 /* Get the integer argument values in two forms:
3733 zero-extended in ARG0, ARG1 and sign-extended in ARG0S, ARG1S. */
3734
3735 arg0 = INTVAL (op0);
3736 arg1 = INTVAL (op1);
3737
3738 if (width < HOST_BITS_PER_WIDE_INT)
3739 {
3740 arg0 &= ((HOST_WIDE_INT) 1 << width) - 1;
3741 arg1 &= ((HOST_WIDE_INT) 1 << width) - 1;
3742
3743 arg0s = arg0;
3744 if (arg0s & ((HOST_WIDE_INT) 1 << (width - 1)))
3745 arg0s |= ((HOST_WIDE_INT) (-1) << width);
3746
3747 arg1s = arg1;
3748 if (arg1s & ((HOST_WIDE_INT) 1 << (width - 1)))
3749 arg1s |= ((HOST_WIDE_INT) (-1) << width);
3750 }
3751 else
3752 {
3753 arg0s = arg0;
3754 arg1s = arg1;
3755 }
3756
3757 /* Compute the value of the arithmetic. */
3758
3759 switch (code)
3760 {
3761 case PLUS:
3762 val = arg0s + arg1s;
3763 break;
3764
3765 case MINUS:
3766 val = arg0s - arg1s;
3767 break;
3768
3769 case MULT:
3770 val = arg0s * arg1s;
3771 break;
3772
3773 case DIV:
3774 if (arg1s == 0)
3775 return 0;
3776 val = arg0s / arg1s;
3777 break;
3778
3779 case MOD:
3780 if (arg1s == 0)
3781 return 0;
3782 val = arg0s % arg1s;
3783 break;
3784
3785 case UDIV:
3786 if (arg1 == 0)
3787 return 0;
3788 val = (unsigned HOST_WIDE_INT) arg0 / arg1;
3789 break;
3790
3791 case UMOD:
3792 if (arg1 == 0)
3793 return 0;
3794 val = (unsigned HOST_WIDE_INT) arg0 % arg1;
3795 break;
3796
3797 case AND:
3798 val = arg0 & arg1;
3799 break;
3800
3801 case IOR:
3802 val = arg0 | arg1;
3803 break;
3804
3805 case XOR:
3806 val = arg0 ^ arg1;
3807 break;
3808
3809 case LSHIFTRT:
3810 /* If shift count is undefined, don't fold it; let the machine do
3811 what it wants. But truncate it if the machine will do that. */
3812 if (arg1 < 0)
3813 return 0;
3814
3815 #ifdef SHIFT_COUNT_TRUNCATED
3816 arg1 &= (BITS_PER_WORD - 1);
3817 #endif
3818
3819 if (arg1 >= width)
3820 return 0;
3821
3822 val = ((unsigned HOST_WIDE_INT) arg0) >> arg1;
3823 break;
3824
3825 case ASHIFT:
3826 case LSHIFT:
3827 if (arg1 < 0)
3828 return 0;
3829
3830 #ifdef SHIFT_COUNT_TRUNCATED
3831 arg1 &= (BITS_PER_WORD - 1);
3832 #endif
3833
3834 if (arg1 >= width)
3835 return 0;
3836
3837 val = ((unsigned HOST_WIDE_INT) arg0) << arg1;
3838 break;
3839
3840 case ASHIFTRT:
3841 if (arg1 < 0)
3842 return 0;
3843
3844 #ifdef SHIFT_COUNT_TRUNCATED
3845 arg1 &= (BITS_PER_WORD - 1);
3846 #endif
3847
3848 if (arg1 >= width)
3849 return 0;
3850
3851 val = arg0s >> arg1;
3852
3853 /* Bootstrap compiler may not have sign extended the right shift.
3854 Manually extend the sign to insure bootstrap cc matches gcc. */
3855 if (arg0s < 0 && arg1 > 0)
3856 val |= ((HOST_WIDE_INT) -1) << (HOST_BITS_PER_WIDE_INT - arg1);
3857
3858 break;
3859
3860 case ROTATERT:
3861 if (arg1 < 0)
3862 return 0;
3863
3864 arg1 %= width;
3865 val = ((((unsigned HOST_WIDE_INT) arg0) << (width - arg1))
3866 | (((unsigned HOST_WIDE_INT) arg0) >> arg1));
3867 break;
3868
3869 case ROTATE:
3870 if (arg1 < 0)
3871 return 0;
3872
3873 arg1 %= width;
3874 val = ((((unsigned HOST_WIDE_INT) arg0) << arg1)
3875 | (((unsigned HOST_WIDE_INT) arg0) >> (width - arg1)));
3876 break;
3877
3878 case COMPARE:
3879 /* Do nothing here. */
3880 return 0;
3881
3882 case SMIN:
3883 val = arg0s <= arg1s ? arg0s : arg1s;
3884 break;
3885
3886 case UMIN:
3887 val = ((unsigned HOST_WIDE_INT) arg0
3888 <= (unsigned HOST_WIDE_INT) arg1 ? arg0 : arg1);
3889 break;
3890
3891 case SMAX:
3892 val = arg0s > arg1s ? arg0s : arg1s;
3893 break;
3894
3895 case UMAX:
3896 val = ((unsigned HOST_WIDE_INT) arg0
3897 > (unsigned HOST_WIDE_INT) arg1 ? arg0 : arg1);
3898 break;
3899
3900 default:
3901 abort ();
3902 }
3903
3904 /* Clear the bits that don't belong in our mode, unless they and our sign
3905 bit are all one. So we get either a reasonable negative value or a
3906 reasonable unsigned value for this mode. */
3907 if (width < HOST_BITS_PER_WIDE_INT
3908 && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
3909 != ((HOST_WIDE_INT) (-1) << (width - 1))))
3910 val &= ((HOST_WIDE_INT) 1 << width) - 1;
3911
3912 return GEN_INT (val);
3913 }
3914 \f
3915 /* Simplify a PLUS or MINUS, at least one of whose operands may be another
3916 PLUS or MINUS.
3917
3918 Rather than test for specific case, we do this by a brute-force method
3919 and do all possible simplifications until no more changes occur. Then
3920 we rebuild the operation. */
3921
3922 static rtx
3923 simplify_plus_minus (code, mode, op0, op1)
3924 enum rtx_code code;
3925 enum machine_mode mode;
3926 rtx op0, op1;
3927 {
3928 rtx ops[8];
3929 int negs[8];
3930 rtx result, tem;
3931 int n_ops = 2, input_ops = 2;
3932 int i, j, k;
3933 int first = 1, negate = 0, changed;
3934
3935 bzero (ops, sizeof ops);
3936
3937 /* Set up the two operands and then expand them until nothing has been
3938 changed. If we run out of room in our array, give up; this should
3939 almost never happen. */
3940
3941 ops[0] = op0, ops[1] = op1, negs[0] = 0, negs[1] = (code == MINUS);
3942
3943 changed = 1;
3944 while (changed)
3945 {
3946 changed = 0;
3947
3948 for (i = 0; i < n_ops; i++)
3949 switch (GET_CODE (ops[i]))
3950 {
3951 case PLUS:
3952 case MINUS:
3953 if (n_ops == 7)
3954 return 0;
3955
3956 ops[n_ops] = XEXP (ops[i], 1);
3957 negs[n_ops++] = GET_CODE (ops[i]) == MINUS ? !negs[i] : negs[i];
3958 ops[i] = XEXP (ops[i], 0);
3959 input_ops++;
3960 changed = 1;
3961 break;
3962
3963 case NEG:
3964 ops[i] = XEXP (ops[i], 0);
3965 negs[i] = ! negs[i];
3966 changed = 1;
3967 break;
3968
3969 case CONST:
3970 ops[i] = XEXP (ops[i], 0);
3971 changed = 1;
3972 break;
3973
3974 case NOT:
3975 /* ~a -> (-a - 1) */
3976 if (n_ops != 7)
3977 {
3978 ops[n_ops] = constm1_rtx;
3979 negs[n_ops++] = negs[i];
3980 ops[i] = XEXP (ops[i], 0);
3981 negs[i] = ! negs[i];
3982 changed = 1;
3983 }
3984 break;
3985
3986 case CONST_INT:
3987 if (negs[i])
3988 ops[i] = GEN_INT (- INTVAL (ops[i])), negs[i] = 0, changed = 1;
3989 break;
3990 }
3991 }
3992
3993 /* If we only have two operands, we can't do anything. */
3994 if (n_ops <= 2)
3995 return 0;
3996
3997 /* Now simplify each pair of operands until nothing changes. The first
3998 time through just simplify constants against each other. */
3999
4000 changed = 1;
4001 while (changed)
4002 {
4003 changed = first;
4004
4005 for (i = 0; i < n_ops - 1; i++)
4006 for (j = i + 1; j < n_ops; j++)
4007 if (ops[i] != 0 && ops[j] != 0
4008 && (! first || (CONSTANT_P (ops[i]) && CONSTANT_P (ops[j]))))
4009 {
4010 rtx lhs = ops[i], rhs = ops[j];
4011 enum rtx_code ncode = PLUS;
4012
4013 if (negs[i] && ! negs[j])
4014 lhs = ops[j], rhs = ops[i], ncode = MINUS;
4015 else if (! negs[i] && negs[j])
4016 ncode = MINUS;
4017
4018 tem = simplify_binary_operation (ncode, mode, lhs, rhs);
4019 if (tem)
4020 {
4021 ops[i] = tem, ops[j] = 0;
4022 negs[i] = negs[i] && negs[j];
4023 if (GET_CODE (tem) == NEG)
4024 ops[i] = XEXP (tem, 0), negs[i] = ! negs[i];
4025
4026 if (GET_CODE (ops[i]) == CONST_INT && negs[i])
4027 ops[i] = GEN_INT (- INTVAL (ops[i])), negs[i] = 0;
4028 changed = 1;
4029 }
4030 }
4031
4032 first = 0;
4033 }
4034
4035 /* Pack all the operands to the lower-numbered entries and give up if
4036 we didn't reduce the number of operands we had. Make sure we
4037 count a CONST as two operands. */
4038
4039 for (i = 0, j = 0, k = 0; j < n_ops; j++)
4040 if (ops[j] != 0)
4041 {
4042 ops[i] = ops[j], negs[i++] = negs[j];
4043 if (GET_CODE (ops[j]) == CONST)
4044 k++;
4045 }
4046
4047 if (i + k >= input_ops)
4048 return 0;
4049
4050 n_ops = i;
4051
4052 /* If we have a CONST_INT, put it last. */
4053 for (i = 0; i < n_ops - 1; i++)
4054 if (GET_CODE (ops[i]) == CONST_INT)
4055 {
4056 tem = ops[n_ops - 1], ops[n_ops - 1] = ops[i] , ops[i] = tem;
4057 j = negs[n_ops - 1], negs[n_ops - 1] = negs[i], negs[i] = j;
4058 }
4059
4060 /* Put a non-negated operand first. If there aren't any, make all
4061 operands positive and negate the whole thing later. */
4062 for (i = 0; i < n_ops && negs[i]; i++)
4063 ;
4064
4065 if (i == n_ops)
4066 {
4067 for (i = 0; i < n_ops; i++)
4068 negs[i] = 0;
4069 negate = 1;
4070 }
4071 else if (i != 0)
4072 {
4073 tem = ops[0], ops[0] = ops[i], ops[i] = tem;
4074 j = negs[0], negs[0] = negs[i], negs[i] = j;
4075 }
4076
4077 /* Now make the result by performing the requested operations. */
4078 result = ops[0];
4079 for (i = 1; i < n_ops; i++)
4080 result = cse_gen_binary (negs[i] ? MINUS : PLUS, mode, result, ops[i]);
4081
4082 return negate ? gen_rtx (NEG, mode, result) : result;
4083 }
4084 \f
4085 /* Make a binary operation by properly ordering the operands and
4086 seeing if the expression folds. */
4087
4088 static rtx
4089 cse_gen_binary (code, mode, op0, op1)
4090 enum rtx_code code;
4091 enum machine_mode mode;
4092 rtx op0, op1;
4093 {
4094 rtx tem;
4095
4096 /* Put complex operands first and constants second if commutative. */
4097 if (GET_RTX_CLASS (code) == 'c'
4098 && ((CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)
4099 || (GET_RTX_CLASS (GET_CODE (op0)) == 'o'
4100 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')
4101 || (GET_CODE (op0) == SUBREG
4102 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op0))) == 'o'
4103 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')))
4104 tem = op0, op0 = op1, op1 = tem;
4105
4106 /* If this simplifies, do it. */
4107 tem = simplify_binary_operation (code, mode, op0, op1);
4108
4109 if (tem)
4110 return tem;
4111
4112 /* Handle addition and subtraction of CONST_INT specially. Otherwise,
4113 just form the operation. */
4114
4115 if (code == PLUS && GET_CODE (op1) == CONST_INT
4116 && GET_MODE (op0) != VOIDmode)
4117 return plus_constant (op0, INTVAL (op1));
4118 else if (code == MINUS && GET_CODE (op1) == CONST_INT
4119 && GET_MODE (op0) != VOIDmode)
4120 return plus_constant (op0, - INTVAL (op1));
4121 else
4122 return gen_rtx (code, mode, op0, op1);
4123 }
4124 \f
4125 /* Like simplify_binary_operation except used for relational operators.
4126 MODE is the mode of the operands, not that of the result. */
4127
4128 rtx
4129 simplify_relational_operation (code, mode, op0, op1)
4130 enum rtx_code code;
4131 enum machine_mode mode;
4132 rtx op0, op1;
4133 {
4134 register HOST_WIDE_INT arg0, arg1, arg0s, arg1s;
4135 HOST_WIDE_INT val;
4136 int width = GET_MODE_BITSIZE (mode);
4137
4138 /* If op0 is a compare, extract the comparison arguments from it. */
4139 if (GET_CODE (op0) == COMPARE && op1 == const0_rtx)
4140 op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
4141
4142 /* Unlike the arithmetic operations, we can do the comparison whether
4143 or not WIDTH is larger than HOST_BITS_PER_WIDE_INT because the
4144 CONST_INTs are to be understood as being infinite precision as
4145 is the comparison. So there is no question of overflow. */
4146
4147 if (GET_CODE (op0) != CONST_INT || GET_CODE (op1) != CONST_INT || width == 0)
4148 {
4149 /* Even if we can't compute a constant result,
4150 there are some cases worth simplifying. */
4151
4152 /* For non-IEEE floating-point, if the two operands are equal, we know
4153 the result. */
4154 if (rtx_equal_p (op0, op1)
4155 && (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
4156 || GET_MODE_CLASS (GET_MODE (op0)) != MODE_FLOAT))
4157 return (code == EQ || code == GE || code == LE || code == LEU
4158 || code == GEU) ? const_true_rtx : const0_rtx;
4159
4160 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
4161 else if (GET_CODE (op0) == CONST_DOUBLE
4162 && GET_CODE (op1) == CONST_DOUBLE
4163 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_FLOAT)
4164 {
4165 REAL_VALUE_TYPE d0, d1;
4166 jmp_buf handler;
4167 int op0lt, op1lt, equal;
4168
4169 if (setjmp (handler))
4170 return 0;
4171
4172 set_float_handler (handler);
4173 REAL_VALUE_FROM_CONST_DOUBLE (d0, op0);
4174 REAL_VALUE_FROM_CONST_DOUBLE (d1, op1);
4175 equal = REAL_VALUES_EQUAL (d0, d1);
4176 op0lt = REAL_VALUES_LESS (d0, d1);
4177 op1lt = REAL_VALUES_LESS (d1, d0);
4178 set_float_handler (NULL_PTR);
4179
4180 switch (code)
4181 {
4182 case EQ:
4183 return equal ? const_true_rtx : const0_rtx;
4184 case NE:
4185 return !equal ? const_true_rtx : const0_rtx;
4186 case LE:
4187 return equal || op0lt ? const_true_rtx : const0_rtx;
4188 case LT:
4189 return op0lt ? const_true_rtx : const0_rtx;
4190 case GE:
4191 return equal || op1lt ? const_true_rtx : const0_rtx;
4192 case GT:
4193 return op1lt ? const_true_rtx : const0_rtx;
4194 }
4195 }
4196 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
4197
4198 else if (GET_MODE_CLASS (mode) == MODE_INT
4199 && width > HOST_BITS_PER_WIDE_INT
4200 && (GET_CODE (op0) == CONST_DOUBLE
4201 || GET_CODE (op0) == CONST_INT)
4202 && (GET_CODE (op1) == CONST_DOUBLE
4203 || GET_CODE (op1) == CONST_INT))
4204 {
4205 HOST_WIDE_INT h0, l0, h1, l1;
4206 unsigned HOST_WIDE_INT uh0, ul0, uh1, ul1;
4207 int op0lt, op0ltu, equal;
4208
4209 if (GET_CODE (op0) == CONST_DOUBLE)
4210 l0 = CONST_DOUBLE_LOW (op0), h0 = CONST_DOUBLE_HIGH (op0);
4211 else
4212 l0 = INTVAL (op0), h0 = l0 < 0 ? -1 : 0;
4213
4214 if (GET_CODE (op1) == CONST_DOUBLE)
4215 l1 = CONST_DOUBLE_LOW (op1), h1 = CONST_DOUBLE_HIGH (op1);
4216 else
4217 l1 = INTVAL (op1), h1 = l1 < 0 ? -1 : 0;
4218
4219 uh0 = h0, ul0 = l0, uh1 = h1, ul1 = l1;
4220
4221 equal = (h0 == h1 && l0 == l1);
4222 op0lt = (h0 < h1 || (h0 == h1 && l0 < l1));
4223 op0ltu = (uh0 < uh1 || (uh0 == uh1 && ul0 < ul1));
4224
4225 switch (code)
4226 {
4227 case EQ:
4228 return equal ? const_true_rtx : const0_rtx;
4229 case NE:
4230 return !equal ? const_true_rtx : const0_rtx;
4231 case LE:
4232 return equal || op0lt ? const_true_rtx : const0_rtx;
4233 case LT:
4234 return op0lt ? const_true_rtx : const0_rtx;
4235 case GE:
4236 return !op0lt ? const_true_rtx : const0_rtx;
4237 case GT:
4238 return !equal && !op0lt ? const_true_rtx : const0_rtx;
4239 case LEU:
4240 return equal || op0ltu ? const_true_rtx : const0_rtx;
4241 case LTU:
4242 return op0ltu ? const_true_rtx : const0_rtx;
4243 case GEU:
4244 return !op0ltu ? const_true_rtx : const0_rtx;
4245 case GTU:
4246 return !equal && !op0ltu ? const_true_rtx : const0_rtx;
4247 }
4248 }
4249
4250 switch (code)
4251 {
4252 case EQ:
4253 {
4254 #if 0
4255 /* We can't make this assumption due to #pragma weak */
4256 if (CONSTANT_P (op0) && op1 == const0_rtx)
4257 return const0_rtx;
4258 #endif
4259 if (NONZERO_BASE_PLUS_P (op0) && op1 == const0_rtx
4260 /* On some machines, the ap reg can be 0 sometimes. */
4261 && op0 != arg_pointer_rtx)
4262 return const0_rtx;
4263 break;
4264 }
4265
4266 case NE:
4267 #if 0
4268 /* We can't make this assumption due to #pragma weak */
4269 if (CONSTANT_P (op0) && op1 == const0_rtx)
4270 return const_true_rtx;
4271 #endif
4272 if (NONZERO_BASE_PLUS_P (op0) && op1 == const0_rtx
4273 /* On some machines, the ap reg can be 0 sometimes. */
4274 && op0 != arg_pointer_rtx)
4275 return const_true_rtx;
4276 break;
4277
4278 case GEU:
4279 /* Unsigned values are never negative, but we must be sure we are
4280 actually comparing a value, not a CC operand. */
4281 if (op1 == const0_rtx
4282 && GET_MODE_CLASS (mode) == MODE_INT)
4283 return const_true_rtx;
4284 break;
4285
4286 case LTU:
4287 if (op1 == const0_rtx
4288 && GET_MODE_CLASS (mode) == MODE_INT)
4289 return const0_rtx;
4290 break;
4291
4292 case LEU:
4293 /* Unsigned values are never greater than the largest
4294 unsigned value. */
4295 if (GET_CODE (op1) == CONST_INT
4296 && INTVAL (op1) == GET_MODE_MASK (mode)
4297 && GET_MODE_CLASS (mode) == MODE_INT)
4298 return const_true_rtx;
4299 break;
4300
4301 case GTU:
4302 if (GET_CODE (op1) == CONST_INT
4303 && INTVAL (op1) == GET_MODE_MASK (mode)
4304 && GET_MODE_CLASS (mode) == MODE_INT)
4305 return const0_rtx;
4306 break;
4307 }
4308
4309 return 0;
4310 }
4311
4312 /* Get the integer argument values in two forms:
4313 zero-extended in ARG0, ARG1 and sign-extended in ARG0S, ARG1S. */
4314
4315 arg0 = INTVAL (op0);
4316 arg1 = INTVAL (op1);
4317
4318 if (width < HOST_BITS_PER_WIDE_INT)
4319 {
4320 arg0 &= ((HOST_WIDE_INT) 1 << width) - 1;
4321 arg1 &= ((HOST_WIDE_INT) 1 << width) - 1;
4322
4323 arg0s = arg0;
4324 if (arg0s & ((HOST_WIDE_INT) 1 << (width - 1)))
4325 arg0s |= ((HOST_WIDE_INT) (-1) << width);
4326
4327 arg1s = arg1;
4328 if (arg1s & ((HOST_WIDE_INT) 1 << (width - 1)))
4329 arg1s |= ((HOST_WIDE_INT) (-1) << width);
4330 }
4331 else
4332 {
4333 arg0s = arg0;
4334 arg1s = arg1;
4335 }
4336
4337 /* Compute the value of the arithmetic. */
4338
4339 switch (code)
4340 {
4341 case NE:
4342 val = arg0 != arg1 ? STORE_FLAG_VALUE : 0;
4343 break;
4344
4345 case EQ:
4346 val = arg0 == arg1 ? STORE_FLAG_VALUE : 0;
4347 break;
4348
4349 case LE:
4350 val = arg0s <= arg1s ? STORE_FLAG_VALUE : 0;
4351 break;
4352
4353 case LT:
4354 val = arg0s < arg1s ? STORE_FLAG_VALUE : 0;
4355 break;
4356
4357 case GE:
4358 val = arg0s >= arg1s ? STORE_FLAG_VALUE : 0;
4359 break;
4360
4361 case GT:
4362 val = arg0s > arg1s ? STORE_FLAG_VALUE : 0;
4363 break;
4364
4365 case LEU:
4366 val = (((unsigned HOST_WIDE_INT) arg0)
4367 <= ((unsigned HOST_WIDE_INT) arg1) ? STORE_FLAG_VALUE : 0);
4368 break;
4369
4370 case LTU:
4371 val = (((unsigned HOST_WIDE_INT) arg0)
4372 < ((unsigned HOST_WIDE_INT) arg1) ? STORE_FLAG_VALUE : 0);
4373 break;
4374
4375 case GEU:
4376 val = (((unsigned HOST_WIDE_INT) arg0)
4377 >= ((unsigned HOST_WIDE_INT) arg1) ? STORE_FLAG_VALUE : 0);
4378 break;
4379
4380 case GTU:
4381 val = (((unsigned HOST_WIDE_INT) arg0)
4382 > ((unsigned HOST_WIDE_INT) arg1) ? STORE_FLAG_VALUE : 0);
4383 break;
4384
4385 default:
4386 abort ();
4387 }
4388
4389 /* Clear the bits that don't belong in our mode, unless they and our sign
4390 bit are all one. So we get either a reasonable negative value or a
4391 reasonable unsigned value for this mode. */
4392 if (width < HOST_BITS_PER_WIDE_INT
4393 && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
4394 != ((HOST_WIDE_INT) (-1) << (width - 1))))
4395 val &= ((HOST_WIDE_INT) 1 << width) - 1;
4396
4397 return GEN_INT (val);
4398 }
4399 \f
4400 /* Simplify CODE, an operation with result mode MODE and three operands,
4401 OP0, OP1, and OP2. OP0_MODE was the mode of OP0 before it became
4402 a constant. Return 0 if no simplifications is possible. */
4403
4404 rtx
4405 simplify_ternary_operation (code, mode, op0_mode, op0, op1, op2)
4406 enum rtx_code code;
4407 enum machine_mode mode, op0_mode;
4408 rtx op0, op1, op2;
4409 {
4410 int width = GET_MODE_BITSIZE (mode);
4411
4412 /* VOIDmode means "infinite" precision. */
4413 if (width == 0)
4414 width = HOST_BITS_PER_WIDE_INT;
4415
4416 switch (code)
4417 {
4418 case SIGN_EXTRACT:
4419 case ZERO_EXTRACT:
4420 if (GET_CODE (op0) == CONST_INT
4421 && GET_CODE (op1) == CONST_INT
4422 && GET_CODE (op2) == CONST_INT
4423 && INTVAL (op1) + INTVAL (op2) <= GET_MODE_BITSIZE (op0_mode)
4424 && width <= HOST_BITS_PER_WIDE_INT)
4425 {
4426 /* Extracting a bit-field from a constant */
4427 HOST_WIDE_INT val = INTVAL (op0);
4428
4429 #if BITS_BIG_ENDIAN
4430 val >>= (GET_MODE_BITSIZE (op0_mode) - INTVAL (op2) - INTVAL (op1));
4431 #else
4432 val >>= INTVAL (op2);
4433 #endif
4434 if (HOST_BITS_PER_WIDE_INT != INTVAL (op1))
4435 {
4436 /* First zero-extend. */
4437 val &= ((HOST_WIDE_INT) 1 << INTVAL (op1)) - 1;
4438 /* If desired, propagate sign bit. */
4439 if (code == SIGN_EXTRACT
4440 && (val & ((HOST_WIDE_INT) 1 << (INTVAL (op1) - 1))))
4441 val |= ~ (((HOST_WIDE_INT) 1 << INTVAL (op1)) - 1);
4442 }
4443
4444 /* Clear the bits that don't belong in our mode,
4445 unless they and our sign bit are all one.
4446 So we get either a reasonable negative value or a reasonable
4447 unsigned value for this mode. */
4448 if (width < HOST_BITS_PER_WIDE_INT
4449 && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
4450 != ((HOST_WIDE_INT) (-1) << (width - 1))))
4451 val &= ((HOST_WIDE_INT) 1 << width) - 1;
4452
4453 return GEN_INT (val);
4454 }
4455 break;
4456
4457 case IF_THEN_ELSE:
4458 if (GET_CODE (op0) == CONST_INT)
4459 return op0 != const0_rtx ? op1 : op2;
4460 break;
4461
4462 default:
4463 abort ();
4464 }
4465
4466 return 0;
4467 }
4468 \f
4469 /* If X is a nontrivial arithmetic operation on an argument
4470 for which a constant value can be determined, return
4471 the result of operating on that value, as a constant.
4472 Otherwise, return X, possibly with one or more operands
4473 modified by recursive calls to this function.
4474
4475 If X is a register whose contents are known, we do NOT
4476 return those contents. This is because an instruction that
4477 uses a register is usually faster than one that uses a constant.
4478
4479 INSN is the insn that we may be modifying. If it is 0, make a copy
4480 of X before modifying it. */
4481
4482 static rtx
4483 fold_rtx (x, insn)
4484 rtx x;
4485 rtx insn;
4486 {
4487 register enum rtx_code code;
4488 register enum machine_mode mode;
4489 register char *fmt;
4490 register int i;
4491 rtx new = 0;
4492 int copied = 0;
4493 int must_swap = 0;
4494
4495 /* Folded equivalents of first two operands of X. */
4496 rtx folded_arg0;
4497 rtx folded_arg1;
4498
4499 /* Constant equivalents of first three operands of X;
4500 0 when no such equivalent is known. */
4501 rtx const_arg0;
4502 rtx const_arg1;
4503 rtx const_arg2;
4504
4505 /* The mode of the first operand of X. We need this for sign and zero
4506 extends. */
4507 enum machine_mode mode_arg0;
4508
4509 if (x == 0)
4510 return x;
4511
4512 mode = GET_MODE (x);
4513 code = GET_CODE (x);
4514 switch (code)
4515 {
4516 case CONST:
4517 case CONST_INT:
4518 case CONST_DOUBLE:
4519 case SYMBOL_REF:
4520 case LABEL_REF:
4521 case REG:
4522 /* No use simplifying an EXPR_LIST
4523 since they are used only for lists of args
4524 in a function call's REG_EQUAL note. */
4525 case EXPR_LIST:
4526 return x;
4527
4528 #ifdef HAVE_cc0
4529 case CC0:
4530 return prev_insn_cc0;
4531 #endif
4532
4533 case PC:
4534 /* If the next insn is a CODE_LABEL followed by a jump table,
4535 PC's value is a LABEL_REF pointing to that label. That
4536 lets us fold switch statements on the Vax. */
4537 if (insn && GET_CODE (insn) == JUMP_INSN)
4538 {
4539 rtx next = next_nonnote_insn (insn);
4540
4541 if (next && GET_CODE (next) == CODE_LABEL
4542 && NEXT_INSN (next) != 0
4543 && GET_CODE (NEXT_INSN (next)) == JUMP_INSN
4544 && (GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_VEC
4545 || GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_DIFF_VEC))
4546 return gen_rtx (LABEL_REF, Pmode, next);
4547 }
4548 break;
4549
4550 case SUBREG:
4551 /* See if we previously assigned a constant value to this SUBREG. */
4552 if ((new = lookup_as_function (x, CONST_INT)) != 0
4553 || (new = lookup_as_function (x, CONST_DOUBLE)) != 0)
4554 return new;
4555
4556 /* If this is a paradoxical SUBREG, we have no idea what value the
4557 extra bits would have. However, if the operand is equivalent
4558 to a SUBREG whose operand is the same as our mode, and all the
4559 modes are within a word, we can just use the inner operand
4560 because these SUBREGs just say how to treat the register. */
4561
4562 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
4563 {
4564 enum machine_mode imode = GET_MODE (SUBREG_REG (x));
4565 struct table_elt *elt;
4566
4567 if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
4568 && GET_MODE_SIZE (imode) <= UNITS_PER_WORD
4569 && (elt = lookup (SUBREG_REG (x), HASH (SUBREG_REG (x), imode),
4570 imode)) != 0)
4571 {
4572 for (elt = elt->first_same_value;
4573 elt; elt = elt->next_same_value)
4574 if (GET_CODE (elt->exp) == SUBREG
4575 && GET_MODE (SUBREG_REG (elt->exp)) == mode
4576 && exp_equiv_p (elt->exp, elt->exp, 1, 0))
4577 return copy_rtx (SUBREG_REG (elt->exp));
4578 }
4579
4580 return x;
4581 }
4582
4583 /* Fold SUBREG_REG. If it changed, see if we can simplify the SUBREG.
4584 We might be able to if the SUBREG is extracting a single word in an
4585 integral mode or extracting the low part. */
4586
4587 folded_arg0 = fold_rtx (SUBREG_REG (x), insn);
4588 const_arg0 = equiv_constant (folded_arg0);
4589 if (const_arg0)
4590 folded_arg0 = const_arg0;
4591
4592 if (folded_arg0 != SUBREG_REG (x))
4593 {
4594 new = 0;
4595
4596 if (GET_MODE_CLASS (mode) == MODE_INT
4597 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
4598 && GET_MODE (SUBREG_REG (x)) != VOIDmode)
4599 new = operand_subword (folded_arg0, SUBREG_WORD (x), 0,
4600 GET_MODE (SUBREG_REG (x)));
4601 if (new == 0 && subreg_lowpart_p (x))
4602 new = gen_lowpart_if_possible (mode, folded_arg0);
4603 if (new)
4604 return new;
4605 }
4606
4607 /* If this is a narrowing SUBREG and our operand is a REG, see if
4608 we can find an equivalence for REG that is an arithmetic operation
4609 in a wider mode where both operands are paradoxical SUBREGs
4610 from objects of our result mode. In that case, we couldn't report
4611 an equivalent value for that operation, since we don't know what the
4612 extra bits will be. But we can find an equivalence for this SUBREG
4613 by folding that operation is the narrow mode. This allows us to
4614 fold arithmetic in narrow modes when the machine only supports
4615 word-sized arithmetic.
4616
4617 Also look for a case where we have a SUBREG whose operand is the
4618 same as our result. If both modes are smaller than a word, we
4619 are simply interpreting a register in different modes and we
4620 can use the inner value. */
4621
4622 if (GET_CODE (folded_arg0) == REG
4623 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (folded_arg0))
4624 && subreg_lowpart_p (x))
4625 {
4626 struct table_elt *elt;
4627
4628 /* We can use HASH here since we know that canon_hash won't be
4629 called. */
4630 elt = lookup (folded_arg0,
4631 HASH (folded_arg0, GET_MODE (folded_arg0)),
4632 GET_MODE (folded_arg0));
4633
4634 if (elt)
4635 elt = elt->first_same_value;
4636
4637 for (; elt; elt = elt->next_same_value)
4638 {
4639 enum rtx_code eltcode = GET_CODE (elt->exp);
4640
4641 /* Just check for unary and binary operations. */
4642 if (GET_RTX_CLASS (GET_CODE (elt->exp)) == '1'
4643 && GET_CODE (elt->exp) != SIGN_EXTEND
4644 && GET_CODE (elt->exp) != ZERO_EXTEND
4645 && GET_CODE (XEXP (elt->exp, 0)) == SUBREG
4646 && GET_MODE (SUBREG_REG (XEXP (elt->exp, 0))) == mode)
4647 {
4648 rtx op0 = SUBREG_REG (XEXP (elt->exp, 0));
4649
4650 if (GET_CODE (op0) != REG && ! CONSTANT_P (op0))
4651 op0 = fold_rtx (op0, NULL_RTX);
4652
4653 op0 = equiv_constant (op0);
4654 if (op0)
4655 new = simplify_unary_operation (GET_CODE (elt->exp), mode,
4656 op0, mode);
4657 }
4658 else if ((GET_RTX_CLASS (GET_CODE (elt->exp)) == '2'
4659 || GET_RTX_CLASS (GET_CODE (elt->exp)) == 'c')
4660 && eltcode != DIV && eltcode != MOD
4661 && eltcode != UDIV && eltcode != UMOD
4662 && eltcode != ASHIFTRT && eltcode != LSHIFTRT
4663 && eltcode != ROTATE && eltcode != ROTATERT
4664 && ((GET_CODE (XEXP (elt->exp, 0)) == SUBREG
4665 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 0)))
4666 == mode))
4667 || CONSTANT_P (XEXP (elt->exp, 0)))
4668 && ((GET_CODE (XEXP (elt->exp, 1)) == SUBREG
4669 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 1)))
4670 == mode))
4671 || CONSTANT_P (XEXP (elt->exp, 1))))
4672 {
4673 rtx op0 = gen_lowpart_common (mode, XEXP (elt->exp, 0));
4674 rtx op1 = gen_lowpart_common (mode, XEXP (elt->exp, 1));
4675
4676 if (op0 && GET_CODE (op0) != REG && ! CONSTANT_P (op0))
4677 op0 = fold_rtx (op0, NULL_RTX);
4678
4679 if (op0)
4680 op0 = equiv_constant (op0);
4681
4682 if (op1 && GET_CODE (op1) != REG && ! CONSTANT_P (op1))
4683 op1 = fold_rtx (op1, NULL_RTX);
4684
4685 if (op1)
4686 op1 = equiv_constant (op1);
4687
4688 if (op0 && op1)
4689 new = simplify_binary_operation (GET_CODE (elt->exp), mode,
4690 op0, op1);
4691 }
4692
4693 else if (GET_CODE (elt->exp) == SUBREG
4694 && GET_MODE (SUBREG_REG (elt->exp)) == mode
4695 && (GET_MODE_SIZE (GET_MODE (folded_arg0))
4696 <= UNITS_PER_WORD)
4697 && exp_equiv_p (elt->exp, elt->exp, 1, 0))
4698 new = copy_rtx (SUBREG_REG (elt->exp));
4699
4700 if (new)
4701 return new;
4702 }
4703 }
4704
4705 return x;
4706
4707 case NOT:
4708 case NEG:
4709 /* If we have (NOT Y), see if Y is known to be (NOT Z).
4710 If so, (NOT Y) simplifies to Z. Similarly for NEG. */
4711 new = lookup_as_function (XEXP (x, 0), code);
4712 if (new)
4713 return fold_rtx (copy_rtx (XEXP (new, 0)), insn);
4714 break;
4715
4716 case MEM:
4717 /* If we are not actually processing an insn, don't try to find the
4718 best address. Not only don't we care, but we could modify the
4719 MEM in an invalid way since we have no insn to validate against. */
4720 if (insn != 0)
4721 find_best_addr (insn, &XEXP (x, 0));
4722
4723 {
4724 /* Even if we don't fold in the insn itself,
4725 we can safely do so here, in hopes of getting a constant. */
4726 rtx addr = fold_rtx (XEXP (x, 0), NULL_RTX);
4727 rtx base = 0;
4728 HOST_WIDE_INT offset = 0;
4729
4730 if (GET_CODE (addr) == REG
4731 && REGNO_QTY_VALID_P (REGNO (addr))
4732 && GET_MODE (addr) == qty_mode[reg_qty[REGNO (addr)]]
4733 && qty_const[reg_qty[REGNO (addr)]] != 0)
4734 addr = qty_const[reg_qty[REGNO (addr)]];
4735
4736 /* If address is constant, split it into a base and integer offset. */
4737 if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
4738 base = addr;
4739 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == PLUS
4740 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT)
4741 {
4742 base = XEXP (XEXP (addr, 0), 0);
4743 offset = INTVAL (XEXP (XEXP (addr, 0), 1));
4744 }
4745 else if (GET_CODE (addr) == LO_SUM
4746 && GET_CODE (XEXP (addr, 1)) == SYMBOL_REF)
4747 base = XEXP (addr, 1);
4748
4749 /* If this is a constant pool reference, we can fold it into its
4750 constant to allow better value tracking. */
4751 if (base && GET_CODE (base) == SYMBOL_REF
4752 && CONSTANT_POOL_ADDRESS_P (base))
4753 {
4754 rtx constant = get_pool_constant (base);
4755 enum machine_mode const_mode = get_pool_mode (base);
4756 rtx new;
4757
4758 if (CONSTANT_P (constant) && GET_CODE (constant) != CONST_INT)
4759 constant_pool_entries_cost = COST (constant);
4760
4761 /* If we are loading the full constant, we have an equivalence. */
4762 if (offset == 0 && mode == const_mode)
4763 return constant;
4764
4765 /* If this actually isn't a constant (wierd!), we can't do
4766 anything. Otherwise, handle the two most common cases:
4767 extracting a word from a multi-word constant, and extracting
4768 the low-order bits. Other cases don't seem common enough to
4769 worry about. */
4770 if (! CONSTANT_P (constant))
4771 return x;
4772
4773 if (GET_MODE_CLASS (mode) == MODE_INT
4774 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
4775 && offset % UNITS_PER_WORD == 0
4776 && (new = operand_subword (constant,
4777 offset / UNITS_PER_WORD,
4778 0, const_mode)) != 0)
4779 return new;
4780
4781 if (((BYTES_BIG_ENDIAN
4782 && offset == GET_MODE_SIZE (GET_MODE (constant)) - 1)
4783 || (! BYTES_BIG_ENDIAN && offset == 0))
4784 && (new = gen_lowpart_if_possible (mode, constant)) != 0)
4785 return new;
4786 }
4787
4788 /* If this is a reference to a label at a known position in a jump
4789 table, we also know its value. */
4790 if (base && GET_CODE (base) == LABEL_REF)
4791 {
4792 rtx label = XEXP (base, 0);
4793 rtx table_insn = NEXT_INSN (label);
4794
4795 if (table_insn && GET_CODE (table_insn) == JUMP_INSN
4796 && GET_CODE (PATTERN (table_insn)) == ADDR_VEC)
4797 {
4798 rtx table = PATTERN (table_insn);
4799
4800 if (offset >= 0
4801 && (offset / GET_MODE_SIZE (GET_MODE (table))
4802 < XVECLEN (table, 0)))
4803 return XVECEXP (table, 0,
4804 offset / GET_MODE_SIZE (GET_MODE (table)));
4805 }
4806 if (table_insn && GET_CODE (table_insn) == JUMP_INSN
4807 && GET_CODE (PATTERN (table_insn)) == ADDR_DIFF_VEC)
4808 {
4809 rtx table = PATTERN (table_insn);
4810
4811 if (offset >= 0
4812 && (offset / GET_MODE_SIZE (GET_MODE (table))
4813 < XVECLEN (table, 1)))
4814 {
4815 offset /= GET_MODE_SIZE (GET_MODE (table));
4816 new = gen_rtx (MINUS, Pmode, XVECEXP (table, 1, offset),
4817 XEXP (table, 0));
4818
4819 if (GET_MODE (table) != Pmode)
4820 new = gen_rtx (TRUNCATE, GET_MODE (table), new);
4821
4822 return new;
4823 }
4824 }
4825 }
4826
4827 return x;
4828 }
4829 }
4830
4831 const_arg0 = 0;
4832 const_arg1 = 0;
4833 const_arg2 = 0;
4834 mode_arg0 = VOIDmode;
4835
4836 /* Try folding our operands.
4837 Then see which ones have constant values known. */
4838
4839 fmt = GET_RTX_FORMAT (code);
4840 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4841 if (fmt[i] == 'e')
4842 {
4843 rtx arg = XEXP (x, i);
4844 rtx folded_arg = arg, const_arg = 0;
4845 enum machine_mode mode_arg = GET_MODE (arg);
4846 rtx cheap_arg, expensive_arg;
4847 rtx replacements[2];
4848 int j;
4849
4850 /* Most arguments are cheap, so handle them specially. */
4851 switch (GET_CODE (arg))
4852 {
4853 case REG:
4854 /* This is the same as calling equiv_constant; it is duplicated
4855 here for speed. */
4856 if (REGNO_QTY_VALID_P (REGNO (arg))
4857 && qty_const[reg_qty[REGNO (arg)]] != 0
4858 && GET_CODE (qty_const[reg_qty[REGNO (arg)]]) != REG
4859 && GET_CODE (qty_const[reg_qty[REGNO (arg)]]) != PLUS)
4860 const_arg
4861 = gen_lowpart_if_possible (GET_MODE (arg),
4862 qty_const[reg_qty[REGNO (arg)]]);
4863 break;
4864
4865 case CONST:
4866 case CONST_INT:
4867 case SYMBOL_REF:
4868 case LABEL_REF:
4869 case CONST_DOUBLE:
4870 const_arg = arg;
4871 break;
4872
4873 #ifdef HAVE_cc0
4874 case CC0:
4875 folded_arg = prev_insn_cc0;
4876 mode_arg = prev_insn_cc0_mode;
4877 const_arg = equiv_constant (folded_arg);
4878 break;
4879 #endif
4880
4881 default:
4882 folded_arg = fold_rtx (arg, insn);
4883 const_arg = equiv_constant (folded_arg);
4884 }
4885
4886 /* For the first three operands, see if the operand
4887 is constant or equivalent to a constant. */
4888 switch (i)
4889 {
4890 case 0:
4891 folded_arg0 = folded_arg;
4892 const_arg0 = const_arg;
4893 mode_arg0 = mode_arg;
4894 break;
4895 case 1:
4896 folded_arg1 = folded_arg;
4897 const_arg1 = const_arg;
4898 break;
4899 case 2:
4900 const_arg2 = const_arg;
4901 break;
4902 }
4903
4904 /* Pick the least expensive of the folded argument and an
4905 equivalent constant argument. */
4906 if (const_arg == 0 || const_arg == folded_arg
4907 || COST (const_arg) > COST (folded_arg))
4908 cheap_arg = folded_arg, expensive_arg = const_arg;
4909 else
4910 cheap_arg = const_arg, expensive_arg = folded_arg;
4911
4912 /* Try to replace the operand with the cheapest of the two
4913 possibilities. If it doesn't work and this is either of the first
4914 two operands of a commutative operation, try swapping them.
4915 If THAT fails, try the more expensive, provided it is cheaper
4916 than what is already there. */
4917
4918 if (cheap_arg == XEXP (x, i))
4919 continue;
4920
4921 if (insn == 0 && ! copied)
4922 {
4923 x = copy_rtx (x);
4924 copied = 1;
4925 }
4926
4927 replacements[0] = cheap_arg, replacements[1] = expensive_arg;
4928 for (j = 0;
4929 j < 2 && replacements[j]
4930 && COST (replacements[j]) < COST (XEXP (x, i));
4931 j++)
4932 {
4933 if (validate_change (insn, &XEXP (x, i), replacements[j], 0))
4934 break;
4935
4936 if (code == NE || code == EQ || GET_RTX_CLASS (code) == 'c')
4937 {
4938 validate_change (insn, &XEXP (x, i), XEXP (x, 1 - i), 1);
4939 validate_change (insn, &XEXP (x, 1 - i), replacements[j], 1);
4940
4941 if (apply_change_group ())
4942 {
4943 /* Swap them back to be invalid so that this loop can
4944 continue and flag them to be swapped back later. */
4945 rtx tem;
4946
4947 tem = XEXP (x, 0); XEXP (x, 0) = XEXP (x, 1);
4948 XEXP (x, 1) = tem;
4949 must_swap = 1;
4950 break;
4951 }
4952 }
4953 }
4954 }
4955
4956 else if (fmt[i] == 'E')
4957 /* Don't try to fold inside of a vector of expressions.
4958 Doing nothing is harmless. */
4959 ;
4960
4961 /* If a commutative operation, place a constant integer as the second
4962 operand unless the first operand is also a constant integer. Otherwise,
4963 place any constant second unless the first operand is also a constant. */
4964
4965 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
4966 {
4967 if (must_swap || (const_arg0
4968 && (const_arg1 == 0
4969 || (GET_CODE (const_arg0) == CONST_INT
4970 && GET_CODE (const_arg1) != CONST_INT))))
4971 {
4972 register rtx tem = XEXP (x, 0);
4973
4974 if (insn == 0 && ! copied)
4975 {
4976 x = copy_rtx (x);
4977 copied = 1;
4978 }
4979
4980 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
4981 validate_change (insn, &XEXP (x, 1), tem, 1);
4982 if (apply_change_group ())
4983 {
4984 tem = const_arg0, const_arg0 = const_arg1, const_arg1 = tem;
4985 tem = folded_arg0, folded_arg0 = folded_arg1, folded_arg1 = tem;
4986 }
4987 }
4988 }
4989
4990 /* If X is an arithmetic operation, see if we can simplify it. */
4991
4992 switch (GET_RTX_CLASS (code))
4993 {
4994 case '1':
4995 /* We can't simplify extension ops unless we know the original mode. */
4996 if ((code == ZERO_EXTEND || code == SIGN_EXTEND)
4997 && mode_arg0 == VOIDmode)
4998 break;
4999 new = simplify_unary_operation (code, mode,
5000 const_arg0 ? const_arg0 : folded_arg0,
5001 mode_arg0);
5002 break;
5003
5004 case '<':
5005 /* See what items are actually being compared and set FOLDED_ARG[01]
5006 to those values and CODE to the actual comparison code. If any are
5007 constant, set CONST_ARG0 and CONST_ARG1 appropriately. We needn't
5008 do anything if both operands are already known to be constant. */
5009
5010 if (const_arg0 == 0 || const_arg1 == 0)
5011 {
5012 struct table_elt *p0, *p1;
5013 rtx true = const_true_rtx, false = const0_rtx;
5014 enum machine_mode mode_arg1;
5015
5016 #ifdef FLOAT_STORE_FLAG_VALUE
5017 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
5018 {
5019 true = immed_real_const_1 (FLOAT_STORE_FLAG_VALUE, mode);
5020 false = CONST0_RTX (mode);
5021 }
5022 #endif
5023
5024 code = find_comparison_args (code, &folded_arg0, &folded_arg1,
5025 &mode_arg0, &mode_arg1);
5026 const_arg0 = equiv_constant (folded_arg0);
5027 const_arg1 = equiv_constant (folded_arg1);
5028
5029 /* If the mode is VOIDmode or a MODE_CC mode, we don't know
5030 what kinds of things are being compared, so we can't do
5031 anything with this comparison. */
5032
5033 if (mode_arg0 == VOIDmode || GET_MODE_CLASS (mode_arg0) == MODE_CC)
5034 break;
5035
5036 /* If we do not now have two constants being compared, see if we
5037 can nevertheless deduce some things about the comparison. */
5038 if (const_arg0 == 0 || const_arg1 == 0)
5039 {
5040 /* Is FOLDED_ARG0 frame-pointer plus a constant? Or non-explicit
5041 constant? These aren't zero, but we don't know their sign. */
5042 if (const_arg1 == const0_rtx
5043 && (NONZERO_BASE_PLUS_P (folded_arg0)
5044 #if 0 /* Sad to say, on sysvr4, #pragma weak can make a symbol address
5045 come out as 0. */
5046 || GET_CODE (folded_arg0) == SYMBOL_REF
5047 #endif
5048 || GET_CODE (folded_arg0) == LABEL_REF
5049 || GET_CODE (folded_arg0) == CONST))
5050 {
5051 if (code == EQ)
5052 return false;
5053 else if (code == NE)
5054 return true;
5055 }
5056
5057 /* See if the two operands are the same. We don't do this
5058 for IEEE floating-point since we can't assume x == x
5059 since x might be a NaN. */
5060
5061 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
5062 || GET_MODE_CLASS (mode_arg0) != MODE_FLOAT)
5063 && (folded_arg0 == folded_arg1
5064 || (GET_CODE (folded_arg0) == REG
5065 && GET_CODE (folded_arg1) == REG
5066 && (reg_qty[REGNO (folded_arg0)]
5067 == reg_qty[REGNO (folded_arg1)]))
5068 || ((p0 = lookup (folded_arg0,
5069 (safe_hash (folded_arg0, mode_arg0)
5070 % NBUCKETS), mode_arg0))
5071 && (p1 = lookup (folded_arg1,
5072 (safe_hash (folded_arg1, mode_arg0)
5073 % NBUCKETS), mode_arg0))
5074 && p0->first_same_value == p1->first_same_value)))
5075 return ((code == EQ || code == LE || code == GE
5076 || code == LEU || code == GEU)
5077 ? true : false);
5078
5079 /* If FOLDED_ARG0 is a register, see if the comparison we are
5080 doing now is either the same as we did before or the reverse
5081 (we only check the reverse if not floating-point). */
5082 else if (GET_CODE (folded_arg0) == REG)
5083 {
5084 int qty = reg_qty[REGNO (folded_arg0)];
5085
5086 if (REGNO_QTY_VALID_P (REGNO (folded_arg0))
5087 && (comparison_dominates_p (qty_comparison_code[qty], code)
5088 || (comparison_dominates_p (qty_comparison_code[qty],
5089 reverse_condition (code))
5090 && GET_MODE_CLASS (mode_arg0) == MODE_INT))
5091 && (rtx_equal_p (qty_comparison_const[qty], folded_arg1)
5092 || (const_arg1
5093 && rtx_equal_p (qty_comparison_const[qty],
5094 const_arg1))
5095 || (GET_CODE (folded_arg1) == REG
5096 && (reg_qty[REGNO (folded_arg1)]
5097 == qty_comparison_qty[qty]))))
5098 return (comparison_dominates_p (qty_comparison_code[qty],
5099 code)
5100 ? true : false);
5101 }
5102 }
5103 }
5104
5105 /* If we are comparing against zero, see if the first operand is
5106 equivalent to an IOR with a constant. If so, we may be able to
5107 determine the result of this comparison. */
5108
5109 if (const_arg1 == const0_rtx)
5110 {
5111 rtx y = lookup_as_function (folded_arg0, IOR);
5112 rtx inner_const;
5113
5114 if (y != 0
5115 && (inner_const = equiv_constant (XEXP (y, 1))) != 0
5116 && GET_CODE (inner_const) == CONST_INT
5117 && INTVAL (inner_const) != 0)
5118 {
5119 int sign_bitnum = GET_MODE_BITSIZE (mode_arg0) - 1;
5120 int has_sign = (HOST_BITS_PER_WIDE_INT >= sign_bitnum
5121 && (INTVAL (inner_const)
5122 & ((HOST_WIDE_INT) 1 << sign_bitnum)));
5123 rtx true = const_true_rtx, false = const0_rtx;
5124
5125 #ifdef FLOAT_STORE_FLAG_VALUE
5126 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
5127 {
5128 true = immed_real_const_1 (FLOAT_STORE_FLAG_VALUE, mode);
5129 false = CONST0_RTX (mode);
5130 }
5131 #endif
5132
5133 switch (code)
5134 {
5135 case EQ:
5136 return false;
5137 case NE:
5138 return true;
5139 case LT: case LE:
5140 if (has_sign)
5141 return true;
5142 break;
5143 case GT: case GE:
5144 if (has_sign)
5145 return false;
5146 break;
5147 }
5148 }
5149 }
5150
5151 new = simplify_relational_operation (code, mode_arg0,
5152 const_arg0 ? const_arg0 : folded_arg0,
5153 const_arg1 ? const_arg1 : folded_arg1);
5154 #ifdef FLOAT_STORE_FLAG_VALUE
5155 if (new != 0 && GET_MODE_CLASS (mode) == MODE_FLOAT)
5156 new = ((new == const0_rtx) ? CONST0_RTX (mode)
5157 : immed_real_const_1 (FLOAT_STORE_FLAG_VALUE, mode));
5158 #endif
5159 break;
5160
5161 case '2':
5162 case 'c':
5163 switch (code)
5164 {
5165 case PLUS:
5166 /* If the second operand is a LABEL_REF, see if the first is a MINUS
5167 with that LABEL_REF as its second operand. If so, the result is
5168 the first operand of that MINUS. This handles switches with an
5169 ADDR_DIFF_VEC table. */
5170 if (const_arg1 && GET_CODE (const_arg1) == LABEL_REF)
5171 {
5172 rtx y = lookup_as_function (folded_arg0, MINUS);
5173
5174 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
5175 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg1, 0))
5176 return XEXP (y, 0);
5177 }
5178 goto from_plus;
5179
5180 case MINUS:
5181 /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
5182 If so, produce (PLUS Z C2-C). */
5183 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT)
5184 {
5185 rtx y = lookup_as_function (XEXP (x, 0), PLUS);
5186 if (y && GET_CODE (XEXP (y, 1)) == CONST_INT)
5187 return fold_rtx (plus_constant (y, -INTVAL (const_arg1)),
5188 NULL_RTX);
5189 }
5190
5191 /* ... fall through ... */
5192
5193 from_plus:
5194 case SMIN: case SMAX: case UMIN: case UMAX:
5195 case IOR: case AND: case XOR:
5196 case MULT: case DIV: case UDIV:
5197 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
5198 /* If we have (<op> <reg> <const_int>) for an associative OP and REG
5199 is known to be of similar form, we may be able to replace the
5200 operation with a combined operation. This may eliminate the
5201 intermediate operation if every use is simplified in this way.
5202 Note that the similar optimization done by combine.c only works
5203 if the intermediate operation's result has only one reference. */
5204
5205 if (GET_CODE (folded_arg0) == REG
5206 && const_arg1 && GET_CODE (const_arg1) == CONST_INT)
5207 {
5208 int is_shift
5209 = (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
5210 rtx y = lookup_as_function (folded_arg0, code);
5211 rtx inner_const;
5212 enum rtx_code associate_code;
5213 rtx new_const;
5214
5215 if (y == 0
5216 || 0 == (inner_const
5217 = equiv_constant (fold_rtx (XEXP (y, 1), 0)))
5218 || GET_CODE (inner_const) != CONST_INT
5219 /* If we have compiled a statement like
5220 "if (x == (x & mask1))", and now are looking at
5221 "x & mask2", we will have a case where the first operand
5222 of Y is the same as our first operand. Unless we detect
5223 this case, an infinite loop will result. */
5224 || XEXP (y, 0) == folded_arg0)
5225 break;
5226
5227 /* Don't associate these operations if they are a PLUS with the
5228 same constant and it is a power of two. These might be doable
5229 with a pre- or post-increment. Similarly for two subtracts of
5230 identical powers of two with post decrement. */
5231
5232 if (code == PLUS && INTVAL (const_arg1) == INTVAL (inner_const)
5233 && (0
5234 #if defined(HAVE_PRE_INCREMENT) || defined(HAVE_POST_INCREMENT)
5235 || exact_log2 (INTVAL (const_arg1)) >= 0
5236 #endif
5237 #if defined(HAVE_PRE_DECREMENT) || defined(HAVE_POST_DECREMENT)
5238 || exact_log2 (- INTVAL (const_arg1)) >= 0
5239 #endif
5240 ))
5241 break;
5242
5243 /* Compute the code used to compose the constants. For example,
5244 A/C1/C2 is A/(C1 * C2), so if CODE == DIV, we want MULT. */
5245
5246 associate_code
5247 = (code == MULT || code == DIV || code == UDIV ? MULT
5248 : is_shift || code == PLUS || code == MINUS ? PLUS : code);
5249
5250 new_const = simplify_binary_operation (associate_code, mode,
5251 const_arg1, inner_const);
5252
5253 if (new_const == 0)
5254 break;
5255
5256 /* If we are associating shift operations, don't let this
5257 produce a shift of larger than the object. This could
5258 occur when we following a sign-extend by a right shift on
5259 a machine that does a sign-extend as a pair of shifts. */
5260
5261 if (is_shift && GET_CODE (new_const) == CONST_INT
5262 && INTVAL (new_const) > GET_MODE_BITSIZE (mode))
5263 break;
5264
5265 y = copy_rtx (XEXP (y, 0));
5266
5267 /* If Y contains our first operand (the most common way this
5268 can happen is if Y is a MEM), we would do into an infinite
5269 loop if we tried to fold it. So don't in that case. */
5270
5271 if (! reg_mentioned_p (folded_arg0, y))
5272 y = fold_rtx (y, insn);
5273
5274 return cse_gen_binary (code, mode, y, new_const);
5275 }
5276 }
5277
5278 new = simplify_binary_operation (code, mode,
5279 const_arg0 ? const_arg0 : folded_arg0,
5280 const_arg1 ? const_arg1 : folded_arg1);
5281 break;
5282
5283 case 'o':
5284 /* (lo_sum (high X) X) is simply X. */
5285 if (code == LO_SUM && const_arg0 != 0
5286 && GET_CODE (const_arg0) == HIGH
5287 && rtx_equal_p (XEXP (const_arg0, 0), const_arg1))
5288 return const_arg1;
5289 break;
5290
5291 case '3':
5292 case 'b':
5293 new = simplify_ternary_operation (code, mode, mode_arg0,
5294 const_arg0 ? const_arg0 : folded_arg0,
5295 const_arg1 ? const_arg1 : folded_arg1,
5296 const_arg2 ? const_arg2 : XEXP (x, 2));
5297 break;
5298 }
5299
5300 return new ? new : x;
5301 }
5302 \f
5303 /* Return a constant value currently equivalent to X.
5304 Return 0 if we don't know one. */
5305
5306 static rtx
5307 equiv_constant (x)
5308 rtx x;
5309 {
5310 if (GET_CODE (x) == REG
5311 && REGNO_QTY_VALID_P (REGNO (x))
5312 && qty_const[reg_qty[REGNO (x)]])
5313 x = gen_lowpart_if_possible (GET_MODE (x), qty_const[reg_qty[REGNO (x)]]);
5314
5315 if (x != 0 && CONSTANT_P (x))
5316 return x;
5317
5318 /* If X is a MEM, try to fold it outside the context of any insn to see if
5319 it might be equivalent to a constant. That handles the case where it
5320 is a constant-pool reference. Then try to look it up in the hash table
5321 in case it is something whose value we have seen before. */
5322
5323 if (GET_CODE (x) == MEM)
5324 {
5325 struct table_elt *elt;
5326
5327 x = fold_rtx (x, NULL_RTX);
5328 if (CONSTANT_P (x))
5329 return x;
5330
5331 elt = lookup (x, safe_hash (x, GET_MODE (x)) % NBUCKETS, GET_MODE (x));
5332 if (elt == 0)
5333 return 0;
5334
5335 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
5336 if (elt->is_const && CONSTANT_P (elt->exp))
5337 return elt->exp;
5338 }
5339
5340 return 0;
5341 }
5342 \f
5343 /* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a fixed-point
5344 number, return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
5345 least-significant part of X.
5346 MODE specifies how big a part of X to return.
5347
5348 If the requested operation cannot be done, 0 is returned.
5349
5350 This is similar to gen_lowpart in emit-rtl.c. */
5351
5352 rtx
5353 gen_lowpart_if_possible (mode, x)
5354 enum machine_mode mode;
5355 register rtx x;
5356 {
5357 rtx result = gen_lowpart_common (mode, x);
5358
5359 if (result)
5360 return result;
5361 else if (GET_CODE (x) == MEM)
5362 {
5363 /* This is the only other case we handle. */
5364 register int offset = 0;
5365 rtx new;
5366
5367 #if WORDS_BIG_ENDIAN
5368 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
5369 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
5370 #endif
5371 #if BYTES_BIG_ENDIAN
5372 /* Adjust the address so that the address-after-the-data
5373 is unchanged. */
5374 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
5375 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
5376 #endif
5377 new = gen_rtx (MEM, mode, plus_constant (XEXP (x, 0), offset));
5378 if (! memory_address_p (mode, XEXP (new, 0)))
5379 return 0;
5380 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
5381 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
5382 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
5383 return new;
5384 }
5385 else
5386 return 0;
5387 }
5388 \f
5389 /* Given INSN, a jump insn, TAKEN indicates if we are following the "taken"
5390 branch. It will be zero if not.
5391
5392 In certain cases, this can cause us to add an equivalence. For example,
5393 if we are following the taken case of
5394 if (i == 2)
5395 we can add the fact that `i' and '2' are now equivalent.
5396
5397 In any case, we can record that this comparison was passed. If the same
5398 comparison is seen later, we will know its value. */
5399
5400 static void
5401 record_jump_equiv (insn, taken)
5402 rtx insn;
5403 int taken;
5404 {
5405 int cond_known_true;
5406 rtx op0, op1;
5407 enum machine_mode mode, mode0, mode1;
5408 int reversed_nonequality = 0;
5409 enum rtx_code code;
5410
5411 /* Ensure this is the right kind of insn. */
5412 if (! condjump_p (insn) || simplejump_p (insn))
5413 return;
5414
5415 /* See if this jump condition is known true or false. */
5416 if (taken)
5417 cond_known_true = (XEXP (SET_SRC (PATTERN (insn)), 2) == pc_rtx);
5418 else
5419 cond_known_true = (XEXP (SET_SRC (PATTERN (insn)), 1) == pc_rtx);
5420
5421 /* Get the type of comparison being done and the operands being compared.
5422 If we had to reverse a non-equality condition, record that fact so we
5423 know that it isn't valid for floating-point. */
5424 code = GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 0));
5425 op0 = fold_rtx (XEXP (XEXP (SET_SRC (PATTERN (insn)), 0), 0), insn);
5426 op1 = fold_rtx (XEXP (XEXP (SET_SRC (PATTERN (insn)), 0), 1), insn);
5427
5428 code = find_comparison_args (code, &op0, &op1, &mode0, &mode1);
5429 if (! cond_known_true)
5430 {
5431 reversed_nonequality = (code != EQ && code != NE);
5432 code = reverse_condition (code);
5433 }
5434
5435 /* The mode is the mode of the non-constant. */
5436 mode = mode0;
5437 if (mode1 != VOIDmode)
5438 mode = mode1;
5439
5440 record_jump_cond (code, mode, op0, op1, reversed_nonequality);
5441 }
5442
5443 /* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
5444 REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
5445 Make any useful entries we can with that information. Called from
5446 above function and called recursively. */
5447
5448 static void
5449 record_jump_cond (code, mode, op0, op1, reversed_nonequality)
5450 enum rtx_code code;
5451 enum machine_mode mode;
5452 rtx op0, op1;
5453 int reversed_nonequality;
5454 {
5455 int op0_hash_code, op1_hash_code;
5456 int op0_in_memory, op0_in_struct, op1_in_memory, op1_in_struct;
5457 struct table_elt *op0_elt, *op1_elt;
5458
5459 /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
5460 we know that they are also equal in the smaller mode (this is also
5461 true for all smaller modes whether or not there is a SUBREG, but
5462 is not worth testing for with no SUBREG. */
5463
5464 if (code == EQ && GET_CODE (op0) == SUBREG
5465 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0))))
5466 {
5467 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
5468 rtx tem = gen_lowpart_if_possible (inner_mode, op1);
5469
5470 record_jump_cond (code, mode, SUBREG_REG (op0),
5471 tem ? tem : gen_rtx (SUBREG, inner_mode, op1, 0),
5472 reversed_nonequality);
5473 }
5474
5475 if (code == EQ && GET_CODE (op1) == SUBREG
5476 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1))))
5477 {
5478 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
5479 rtx tem = gen_lowpart_if_possible (inner_mode, op0);
5480
5481 record_jump_cond (code, mode, SUBREG_REG (op1),
5482 tem ? tem : gen_rtx (SUBREG, inner_mode, op0, 0),
5483 reversed_nonequality);
5484 }
5485
5486 /* Similarly, if this is an NE comparison, and either is a SUBREG
5487 making a smaller mode, we know the whole thing is also NE. */
5488
5489 if (code == NE && GET_CODE (op0) == SUBREG
5490 && subreg_lowpart_p (op0)
5491 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0))))
5492 {
5493 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
5494 rtx tem = gen_lowpart_if_possible (inner_mode, op1);
5495
5496 record_jump_cond (code, mode, SUBREG_REG (op0),
5497 tem ? tem : gen_rtx (SUBREG, inner_mode, op1, 0),
5498 reversed_nonequality);
5499 }
5500
5501 if (code == NE && GET_CODE (op1) == SUBREG
5502 && subreg_lowpart_p (op1)
5503 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1))))
5504 {
5505 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
5506 rtx tem = gen_lowpart_if_possible (inner_mode, op0);
5507
5508 record_jump_cond (code, mode, SUBREG_REG (op1),
5509 tem ? tem : gen_rtx (SUBREG, inner_mode, op0, 0),
5510 reversed_nonequality);
5511 }
5512
5513 /* Hash both operands. */
5514
5515 do_not_record = 0;
5516 hash_arg_in_memory = 0;
5517 hash_arg_in_struct = 0;
5518 op0_hash_code = HASH (op0, mode);
5519 op0_in_memory = hash_arg_in_memory;
5520 op0_in_struct = hash_arg_in_struct;
5521
5522 if (do_not_record)
5523 return;
5524
5525 do_not_record = 0;
5526 hash_arg_in_memory = 0;
5527 hash_arg_in_struct = 0;
5528 op1_hash_code = HASH (op1, mode);
5529 op1_in_memory = hash_arg_in_memory;
5530 op1_in_struct = hash_arg_in_struct;
5531
5532 if (do_not_record)
5533 return;
5534
5535 /* Look up both operands. */
5536 op0_elt = lookup (op0, op0_hash_code, mode);
5537 op1_elt = lookup (op1, op1_hash_code, mode);
5538
5539 /* If we aren't setting two things equal all we can do is save this
5540 comparison. Similarly if this is floating-point. In the latter
5541 case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
5542 If we record the equality, we might inadvertently delete code
5543 whose intent was to change -0 to +0. */
5544
5545 if (code != EQ || GET_MODE_CLASS (GET_MODE (op0)) == MODE_FLOAT)
5546 {
5547 /* If we reversed a floating-point comparison, if OP0 is not a
5548 register, or if OP1 is neither a register or constant, we can't
5549 do anything. */
5550
5551 if (GET_CODE (op1) != REG)
5552 op1 = equiv_constant (op1);
5553
5554 if ((reversed_nonequality && GET_MODE_CLASS (mode) != MODE_INT)
5555 || GET_CODE (op0) != REG || op1 == 0)
5556 return;
5557
5558 /* Put OP0 in the hash table if it isn't already. This gives it a
5559 new quantity number. */
5560 if (op0_elt == 0)
5561 {
5562 if (insert_regs (op0, NULL_PTR, 0))
5563 {
5564 rehash_using_reg (op0);
5565 op0_hash_code = HASH (op0, mode);
5566 }
5567
5568 op0_elt = insert (op0, NULL_PTR, op0_hash_code, mode);
5569 op0_elt->in_memory = op0_in_memory;
5570 op0_elt->in_struct = op0_in_struct;
5571 }
5572
5573 qty_comparison_code[reg_qty[REGNO (op0)]] = code;
5574 if (GET_CODE (op1) == REG)
5575 {
5576 /* Put OP1 in the hash table so it gets a new quantity number. */
5577 if (op1_elt == 0)
5578 {
5579 if (insert_regs (op1, NULL_PTR, 0))
5580 {
5581 rehash_using_reg (op1);
5582 op1_hash_code = HASH (op1, mode);
5583 }
5584
5585 op1_elt = insert (op1, NULL_PTR, op1_hash_code, mode);
5586 op1_elt->in_memory = op1_in_memory;
5587 op1_elt->in_struct = op1_in_struct;
5588 }
5589
5590 qty_comparison_qty[reg_qty[REGNO (op0)]] = reg_qty[REGNO (op1)];
5591 qty_comparison_const[reg_qty[REGNO (op0)]] = 0;
5592 }
5593 else
5594 {
5595 qty_comparison_qty[reg_qty[REGNO (op0)]] = -1;
5596 qty_comparison_const[reg_qty[REGNO (op0)]] = op1;
5597 }
5598
5599 return;
5600 }
5601
5602 /* If both are equivalent, merge the two classes. Save this class for
5603 `cse_set_around_loop'. */
5604 if (op0_elt && op1_elt)
5605 {
5606 merge_equiv_classes (op0_elt, op1_elt);
5607 last_jump_equiv_class = op0_elt;
5608 }
5609
5610 /* For whichever side doesn't have an equivalence, make one. */
5611 if (op0_elt == 0)
5612 {
5613 if (insert_regs (op0, op1_elt, 0))
5614 {
5615 rehash_using_reg (op0);
5616 op0_hash_code = HASH (op0, mode);
5617 }
5618
5619 op0_elt = insert (op0, op1_elt, op0_hash_code, mode);
5620 op0_elt->in_memory = op0_in_memory;
5621 op0_elt->in_struct = op0_in_struct;
5622 last_jump_equiv_class = op0_elt;
5623 }
5624
5625 if (op1_elt == 0)
5626 {
5627 if (insert_regs (op1, op0_elt, 0))
5628 {
5629 rehash_using_reg (op1);
5630 op1_hash_code = HASH (op1, mode);
5631 }
5632
5633 op1_elt = insert (op1, op0_elt, op1_hash_code, mode);
5634 op1_elt->in_memory = op1_in_memory;
5635 op1_elt->in_struct = op1_in_struct;
5636 last_jump_equiv_class = op1_elt;
5637 }
5638 }
5639 \f
5640 /* CSE processing for one instruction.
5641 First simplify sources and addresses of all assignments
5642 in the instruction, using previously-computed equivalents values.
5643 Then install the new sources and destinations in the table
5644 of available values.
5645
5646 If IN_LIBCALL_BLOCK is nonzero, don't record any equivalence made in
5647 the insn. */
5648
5649 /* Data on one SET contained in the instruction. */
5650
5651 struct set
5652 {
5653 /* The SET rtx itself. */
5654 rtx rtl;
5655 /* The SET_SRC of the rtx (the original value, if it is changing). */
5656 rtx src;
5657 /* The hash-table element for the SET_SRC of the SET. */
5658 struct table_elt *src_elt;
5659 /* Hash code for the SET_SRC. */
5660 int src_hash_code;
5661 /* Hash code for the SET_DEST. */
5662 int dest_hash_code;
5663 /* The SET_DEST, with SUBREG, etc., stripped. */
5664 rtx inner_dest;
5665 /* Place where the pointer to the INNER_DEST was found. */
5666 rtx *inner_dest_loc;
5667 /* Nonzero if the SET_SRC is in memory. */
5668 char src_in_memory;
5669 /* Nonzero if the SET_SRC is in a structure. */
5670 char src_in_struct;
5671 /* Nonzero if the SET_SRC contains something
5672 whose value cannot be predicted and understood. */
5673 char src_volatile;
5674 /* Original machine mode, in case it becomes a CONST_INT. */
5675 enum machine_mode mode;
5676 /* A constant equivalent for SET_SRC, if any. */
5677 rtx src_const;
5678 /* Hash code of constant equivalent for SET_SRC. */
5679 int src_const_hash_code;
5680 /* Table entry for constant equivalent for SET_SRC, if any. */
5681 struct table_elt *src_const_elt;
5682 };
5683
5684 static void
5685 cse_insn (insn, in_libcall_block)
5686 rtx insn;
5687 int in_libcall_block;
5688 {
5689 register rtx x = PATTERN (insn);
5690 rtx tem;
5691 register int i;
5692 register int n_sets = 0;
5693
5694 /* Records what this insn does to set CC0. */
5695 rtx this_insn_cc0 = 0;
5696 enum machine_mode this_insn_cc0_mode;
5697 struct write_data writes_memory;
5698 static struct write_data init = {0, 0, 0, 0};
5699
5700 rtx src_eqv = 0;
5701 struct table_elt *src_eqv_elt = 0;
5702 int src_eqv_volatile;
5703 int src_eqv_in_memory;
5704 int src_eqv_in_struct;
5705 int src_eqv_hash_code;
5706
5707 struct set *sets;
5708
5709 this_insn = insn;
5710 writes_memory = init;
5711
5712 /* Find all the SETs and CLOBBERs in this instruction.
5713 Record all the SETs in the array `set' and count them.
5714 Also determine whether there is a CLOBBER that invalidates
5715 all memory references, or all references at varying addresses. */
5716
5717 if (GET_CODE (x) == SET)
5718 {
5719 sets = (struct set *) alloca (sizeof (struct set));
5720 sets[0].rtl = x;
5721
5722 /* Ignore SETs that are unconditional jumps.
5723 They never need cse processing, so this does not hurt.
5724 The reason is not efficiency but rather
5725 so that we can test at the end for instructions
5726 that have been simplified to unconditional jumps
5727 and not be misled by unchanged instructions
5728 that were unconditional jumps to begin with. */
5729 if (SET_DEST (x) == pc_rtx
5730 && GET_CODE (SET_SRC (x)) == LABEL_REF)
5731 ;
5732
5733 /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
5734 The hard function value register is used only once, to copy to
5735 someplace else, so it isn't worth cse'ing (and on 80386 is unsafe)!
5736 Ensure we invalidate the destination register. On the 80386 no
5737 other code would invalidate it since it is a fixed_reg.
5738 We need not check the return of apply_change_group; see canon_reg. */
5739
5740 else if (GET_CODE (SET_SRC (x)) == CALL)
5741 {
5742 canon_reg (SET_SRC (x), insn);
5743 apply_change_group ();
5744 fold_rtx (SET_SRC (x), insn);
5745 invalidate (SET_DEST (x));
5746 }
5747 else
5748 n_sets = 1;
5749 }
5750 else if (GET_CODE (x) == PARALLEL)
5751 {
5752 register int lim = XVECLEN (x, 0);
5753
5754 sets = (struct set *) alloca (lim * sizeof (struct set));
5755
5756 /* Find all regs explicitly clobbered in this insn,
5757 and ensure they are not replaced with any other regs
5758 elsewhere in this insn.
5759 When a reg that is clobbered is also used for input,
5760 we should presume that that is for a reason,
5761 and we should not substitute some other register
5762 which is not supposed to be clobbered.
5763 Therefore, this loop cannot be merged into the one below
5764 because a CALL may precede a CLOBBER and refer to the
5765 value clobbered. We must not let a canonicalization do
5766 anything in that case. */
5767 for (i = 0; i < lim; i++)
5768 {
5769 register rtx y = XVECEXP (x, 0, i);
5770 if (GET_CODE (y) == CLOBBER
5771 && (GET_CODE (XEXP (y, 0)) == REG
5772 || GET_CODE (XEXP (y, 0)) == SUBREG))
5773 invalidate (XEXP (y, 0));
5774 }
5775
5776 for (i = 0; i < lim; i++)
5777 {
5778 register rtx y = XVECEXP (x, 0, i);
5779 if (GET_CODE (y) == SET)
5780 {
5781 /* As above, we ignore unconditional jumps and call-insns and
5782 ignore the result of apply_change_group. */
5783 if (GET_CODE (SET_SRC (y)) == CALL)
5784 {
5785 canon_reg (SET_SRC (y), insn);
5786 apply_change_group ();
5787 fold_rtx (SET_SRC (y), insn);
5788 invalidate (SET_DEST (y));
5789 }
5790 else if (SET_DEST (y) == pc_rtx
5791 && GET_CODE (SET_SRC (y)) == LABEL_REF)
5792 ;
5793 else
5794 sets[n_sets++].rtl = y;
5795 }
5796 else if (GET_CODE (y) == CLOBBER)
5797 {
5798 /* If we clobber memory, take note of that,
5799 and canon the address.
5800 This does nothing when a register is clobbered
5801 because we have already invalidated the reg. */
5802 if (GET_CODE (XEXP (y, 0)) == MEM)
5803 {
5804 canon_reg (XEXP (y, 0), NULL_RTX);
5805 note_mem_written (XEXP (y, 0), &writes_memory);
5806 }
5807 }
5808 else if (GET_CODE (y) == USE
5809 && ! (GET_CODE (XEXP (y, 0)) == REG
5810 && REGNO (XEXP (y, 0)) < FIRST_PSEUDO_REGISTER))
5811 canon_reg (y, NULL_RTX);
5812 else if (GET_CODE (y) == CALL)
5813 {
5814 /* The result of apply_change_group can be ignored; see
5815 canon_reg. */
5816 canon_reg (y, insn);
5817 apply_change_group ();
5818 fold_rtx (y, insn);
5819 }
5820 }
5821 }
5822 else if (GET_CODE (x) == CLOBBER)
5823 {
5824 if (GET_CODE (XEXP (x, 0)) == MEM)
5825 {
5826 canon_reg (XEXP (x, 0), NULL_RTX);
5827 note_mem_written (XEXP (x, 0), &writes_memory);
5828 }
5829 }
5830
5831 /* Canonicalize a USE of a pseudo register or memory location. */
5832 else if (GET_CODE (x) == USE
5833 && ! (GET_CODE (XEXP (x, 0)) == REG
5834 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER))
5835 canon_reg (XEXP (x, 0), NULL_RTX);
5836 else if (GET_CODE (x) == CALL)
5837 {
5838 /* The result of apply_change_group can be ignored; see canon_reg. */
5839 canon_reg (x, insn);
5840 apply_change_group ();
5841 fold_rtx (x, insn);
5842 }
5843
5844 if (n_sets == 1 && REG_NOTES (insn) != 0)
5845 {
5846 /* Store the equivalent value in SRC_EQV, if different. */
5847 rtx tem = find_reg_note (insn, REG_EQUAL, NULL_RTX);
5848
5849 if (tem && ! rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl)))
5850 src_eqv = canon_reg (XEXP (tem, 0), NULL_RTX);
5851 }
5852
5853 /* Canonicalize sources and addresses of destinations.
5854 We do this in a separate pass to avoid problems when a MATCH_DUP is
5855 present in the insn pattern. In that case, we want to ensure that
5856 we don't break the duplicate nature of the pattern. So we will replace
5857 both operands at the same time. Otherwise, we would fail to find an
5858 equivalent substitution in the loop calling validate_change below.
5859
5860 We used to suppress canonicalization of DEST if it appears in SRC,
5861 but we don't do this any more. */
5862
5863 for (i = 0; i < n_sets; i++)
5864 {
5865 rtx dest = SET_DEST (sets[i].rtl);
5866 rtx src = SET_SRC (sets[i].rtl);
5867 rtx new = canon_reg (src, insn);
5868
5869 if ((GET_CODE (new) == REG && GET_CODE (src) == REG
5870 && ((REGNO (new) < FIRST_PSEUDO_REGISTER)
5871 != (REGNO (src) < FIRST_PSEUDO_REGISTER)))
5872 || insn_n_dups[recog_memoized (insn)] > 0)
5873 validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
5874 else
5875 SET_SRC (sets[i].rtl) = new;
5876
5877 if (GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == SIGN_EXTRACT)
5878 {
5879 validate_change (insn, &XEXP (dest, 1),
5880 canon_reg (XEXP (dest, 1), insn), 1);
5881 validate_change (insn, &XEXP (dest, 2),
5882 canon_reg (XEXP (dest, 2), insn), 1);
5883 }
5884
5885 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
5886 || GET_CODE (dest) == ZERO_EXTRACT
5887 || GET_CODE (dest) == SIGN_EXTRACT)
5888 dest = XEXP (dest, 0);
5889
5890 if (GET_CODE (dest) == MEM)
5891 canon_reg (dest, insn);
5892 }
5893
5894 /* Now that we have done all the replacements, we can apply the change
5895 group and see if they all work. Note that this will cause some
5896 canonicalizations that would have worked individually not to be applied
5897 because some other canonicalization didn't work, but this should not
5898 occur often.
5899
5900 The result of apply_change_group can be ignored; see canon_reg. */
5901
5902 apply_change_group ();
5903
5904 /* Set sets[i].src_elt to the class each source belongs to.
5905 Detect assignments from or to volatile things
5906 and set set[i] to zero so they will be ignored
5907 in the rest of this function.
5908
5909 Nothing in this loop changes the hash table or the register chains. */
5910
5911 for (i = 0; i < n_sets; i++)
5912 {
5913 register rtx src, dest;
5914 register rtx src_folded;
5915 register struct table_elt *elt = 0, *p;
5916 enum machine_mode mode;
5917 rtx src_eqv_here;
5918 rtx src_const = 0;
5919 rtx src_related = 0;
5920 struct table_elt *src_const_elt = 0;
5921 int src_cost = 10000, src_eqv_cost = 10000, src_folded_cost = 10000;
5922 int src_related_cost = 10000, src_elt_cost = 10000;
5923 /* Set non-zero if we need to call force_const_mem on with the
5924 contents of src_folded before using it. */
5925 int src_folded_force_flag = 0;
5926
5927 dest = SET_DEST (sets[i].rtl);
5928 src = SET_SRC (sets[i].rtl);
5929
5930 /* If SRC is a constant that has no machine mode,
5931 hash it with the destination's machine mode.
5932 This way we can keep different modes separate. */
5933
5934 mode = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
5935 sets[i].mode = mode;
5936
5937 if (src_eqv)
5938 {
5939 enum machine_mode eqvmode = mode;
5940 if (GET_CODE (dest) == STRICT_LOW_PART)
5941 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
5942 do_not_record = 0;
5943 hash_arg_in_memory = 0;
5944 hash_arg_in_struct = 0;
5945 src_eqv = fold_rtx (src_eqv, insn);
5946 src_eqv_hash_code = HASH (src_eqv, eqvmode);
5947
5948 /* Find the equivalence class for the equivalent expression. */
5949
5950 if (!do_not_record)
5951 src_eqv_elt = lookup (src_eqv, src_eqv_hash_code, eqvmode);
5952
5953 src_eqv_volatile = do_not_record;
5954 src_eqv_in_memory = hash_arg_in_memory;
5955 src_eqv_in_struct = hash_arg_in_struct;
5956 }
5957
5958 /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
5959 value of the INNER register, not the destination. So it is not
5960 a legal substitution for the source. But save it for later. */
5961 if (GET_CODE (dest) == STRICT_LOW_PART)
5962 src_eqv_here = 0;
5963 else
5964 src_eqv_here = src_eqv;
5965
5966 /* Simplify and foldable subexpressions in SRC. Then get the fully-
5967 simplified result, which may not necessarily be valid. */
5968 src_folded = fold_rtx (src, insn);
5969
5970 /* If storing a constant in a bitfield, pre-truncate the constant
5971 so we will be able to record it later. */
5972 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
5973 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
5974 {
5975 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5976
5977 if (GET_CODE (src) == CONST_INT
5978 && GET_CODE (width) == CONST_INT
5979 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
5980 && (INTVAL (src) & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
5981 src_folded
5982 = GEN_INT (INTVAL (src) & (((HOST_WIDE_INT) 1
5983 << INTVAL (width)) - 1));
5984 }
5985
5986 /* Compute SRC's hash code, and also notice if it
5987 should not be recorded at all. In that case,
5988 prevent any further processing of this assignment. */
5989 do_not_record = 0;
5990 hash_arg_in_memory = 0;
5991 hash_arg_in_struct = 0;
5992
5993 sets[i].src = src;
5994 sets[i].src_hash_code = HASH (src, mode);
5995 sets[i].src_volatile = do_not_record;
5996 sets[i].src_in_memory = hash_arg_in_memory;
5997 sets[i].src_in_struct = hash_arg_in_struct;
5998
5999 #if 0
6000 /* It is no longer clear why we used to do this, but it doesn't
6001 appear to still be needed. So let's try without it since this
6002 code hurts cse'ing widened ops. */
6003 /* If source is a perverse subreg (such as QI treated as an SI),
6004 treat it as volatile. It may do the work of an SI in one context
6005 where the extra bits are not being used, but cannot replace an SI
6006 in general. */
6007 if (GET_CODE (src) == SUBREG
6008 && (GET_MODE_SIZE (GET_MODE (src))
6009 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))))
6010 sets[i].src_volatile = 1;
6011 #endif
6012
6013 /* Locate all possible equivalent forms for SRC. Try to replace
6014 SRC in the insn with each cheaper equivalent.
6015
6016 We have the following types of equivalents: SRC itself, a folded
6017 version, a value given in a REG_EQUAL note, or a value related
6018 to a constant.
6019
6020 Each of these equivalents may be part of an additional class
6021 of equivalents (if more than one is in the table, they must be in
6022 the same class; we check for this).
6023
6024 If the source is volatile, we don't do any table lookups.
6025
6026 We note any constant equivalent for possible later use in a
6027 REG_NOTE. */
6028
6029 if (!sets[i].src_volatile)
6030 elt = lookup (src, sets[i].src_hash_code, mode);
6031
6032 sets[i].src_elt = elt;
6033
6034 if (elt && src_eqv_here && src_eqv_elt)
6035 {
6036 if (elt->first_same_value != src_eqv_elt->first_same_value)
6037 {
6038 /* The REG_EQUAL is indicating that two formerly distinct
6039 classes are now equivalent. So merge them. */
6040 merge_equiv_classes (elt, src_eqv_elt);
6041 src_eqv_hash_code = HASH (src_eqv, elt->mode);
6042 src_eqv_elt = lookup (src_eqv, src_eqv_hash_code, elt->mode);
6043 }
6044
6045 src_eqv_here = 0;
6046 }
6047
6048 else if (src_eqv_elt)
6049 elt = src_eqv_elt;
6050
6051 /* Try to find a constant somewhere and record it in `src_const'.
6052 Record its table element, if any, in `src_const_elt'. Look in
6053 any known equivalences first. (If the constant is not in the
6054 table, also set `sets[i].src_const_hash_code'). */
6055 if (elt)
6056 for (p = elt->first_same_value; p; p = p->next_same_value)
6057 if (p->is_const)
6058 {
6059 src_const = p->exp;
6060 src_const_elt = elt;
6061 break;
6062 }
6063
6064 if (src_const == 0
6065 && (CONSTANT_P (src_folded)
6066 /* Consider (minus (label_ref L1) (label_ref L2)) as
6067 "constant" here so we will record it. This allows us
6068 to fold switch statements when an ADDR_DIFF_VEC is used. */
6069 || (GET_CODE (src_folded) == MINUS
6070 && GET_CODE (XEXP (src_folded, 0)) == LABEL_REF
6071 && GET_CODE (XEXP (src_folded, 1)) == LABEL_REF)))
6072 src_const = src_folded, src_const_elt = elt;
6073 else if (src_const == 0 && src_eqv_here && CONSTANT_P (src_eqv_here))
6074 src_const = src_eqv_here, src_const_elt = src_eqv_elt;
6075
6076 /* If we don't know if the constant is in the table, get its
6077 hash code and look it up. */
6078 if (src_const && src_const_elt == 0)
6079 {
6080 sets[i].src_const_hash_code = HASH (src_const, mode);
6081 src_const_elt = lookup (src_const, sets[i].src_const_hash_code,
6082 mode);
6083 }
6084
6085 sets[i].src_const = src_const;
6086 sets[i].src_const_elt = src_const_elt;
6087
6088 /* If the constant and our source are both in the table, mark them as
6089 equivalent. Otherwise, if a constant is in the table but the source
6090 isn't, set ELT to it. */
6091 if (src_const_elt && elt
6092 && src_const_elt->first_same_value != elt->first_same_value)
6093 merge_equiv_classes (elt, src_const_elt);
6094 else if (src_const_elt && elt == 0)
6095 elt = src_const_elt;
6096
6097 /* See if there is a register linearly related to a constant
6098 equivalent of SRC. */
6099 if (src_const
6100 && (GET_CODE (src_const) == CONST
6101 || (src_const_elt && src_const_elt->related_value != 0)))
6102 {
6103 src_related = use_related_value (src_const, src_const_elt);
6104 if (src_related)
6105 {
6106 struct table_elt *src_related_elt
6107 = lookup (src_related, HASH (src_related, mode), mode);
6108 if (src_related_elt && elt)
6109 {
6110 if (elt->first_same_value
6111 != src_related_elt->first_same_value)
6112 /* This can occur when we previously saw a CONST
6113 involving a SYMBOL_REF and then see the SYMBOL_REF
6114 twice. Merge the involved classes. */
6115 merge_equiv_classes (elt, src_related_elt);
6116
6117 src_related = 0;
6118 src_related_elt = 0;
6119 }
6120 else if (src_related_elt && elt == 0)
6121 elt = src_related_elt;
6122 }
6123 }
6124
6125 /* See if we have a CONST_INT that is already in a register in a
6126 wider mode. */
6127
6128 if (src_const && src_related == 0 && GET_CODE (src_const) == CONST_INT
6129 && GET_MODE_CLASS (mode) == MODE_INT
6130 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
6131 {
6132 enum machine_mode wider_mode;
6133
6134 for (wider_mode = GET_MODE_WIDER_MODE (mode);
6135 GET_MODE_BITSIZE (wider_mode) <= BITS_PER_WORD
6136 && src_related == 0;
6137 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
6138 {
6139 struct table_elt *const_elt
6140 = lookup (src_const, HASH (src_const, wider_mode), wider_mode);
6141
6142 if (const_elt == 0)
6143 continue;
6144
6145 for (const_elt = const_elt->first_same_value;
6146 const_elt; const_elt = const_elt->next_same_value)
6147 if (GET_CODE (const_elt->exp) == REG)
6148 {
6149 src_related = gen_lowpart_if_possible (mode,
6150 const_elt->exp);
6151 break;
6152 }
6153 }
6154 }
6155
6156 /* Another possibility is that we have an AND with a constant in
6157 a mode narrower than a word. If so, it might have been generated
6158 as part of an "if" which would narrow the AND. If we already
6159 have done the AND in a wider mode, we can use a SUBREG of that
6160 value. */
6161
6162 if (flag_expensive_optimizations && ! src_related
6163 && GET_CODE (src) == AND && GET_CODE (XEXP (src, 1)) == CONST_INT
6164 && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
6165 {
6166 enum machine_mode tmode;
6167 rtx new_and = gen_rtx (AND, VOIDmode, NULL_RTX, XEXP (src, 1));
6168
6169 for (tmode = GET_MODE_WIDER_MODE (mode);
6170 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
6171 tmode = GET_MODE_WIDER_MODE (tmode))
6172 {
6173 rtx inner = gen_lowpart_if_possible (tmode, XEXP (src, 0));
6174 struct table_elt *larger_elt;
6175
6176 if (inner)
6177 {
6178 PUT_MODE (new_and, tmode);
6179 XEXP (new_and, 0) = inner;
6180 larger_elt = lookup (new_and, HASH (new_and, tmode), tmode);
6181 if (larger_elt == 0)
6182 continue;
6183
6184 for (larger_elt = larger_elt->first_same_value;
6185 larger_elt; larger_elt = larger_elt->next_same_value)
6186 if (GET_CODE (larger_elt->exp) == REG)
6187 {
6188 src_related
6189 = gen_lowpart_if_possible (mode, larger_elt->exp);
6190 break;
6191 }
6192
6193 if (src_related)
6194 break;
6195 }
6196 }
6197 }
6198
6199 if (src == src_folded)
6200 src_folded = 0;
6201
6202 /* At this point, ELT, if non-zero, points to a class of expressions
6203 equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
6204 and SRC_RELATED, if non-zero, each contain additional equivalent
6205 expressions. Prune these latter expressions by deleting expressions
6206 already in the equivalence class.
6207
6208 Check for an equivalent identical to the destination. If found,
6209 this is the preferred equivalent since it will likely lead to
6210 elimination of the insn. Indicate this by placing it in
6211 `src_related'. */
6212
6213 if (elt) elt = elt->first_same_value;
6214 for (p = elt; p; p = p->next_same_value)
6215 {
6216 enum rtx_code code = GET_CODE (p->exp);
6217
6218 /* If the expression is not valid, ignore it. Then we do not
6219 have to check for validity below. In most cases, we can use
6220 `rtx_equal_p', since canonicalization has already been done. */
6221 if (code != REG && ! exp_equiv_p (p->exp, p->exp, 1, 0))
6222 continue;
6223
6224 if (src && GET_CODE (src) == code && rtx_equal_p (src, p->exp))
6225 src = 0;
6226 else if (src_folded && GET_CODE (src_folded) == code
6227 && rtx_equal_p (src_folded, p->exp))
6228 src_folded = 0;
6229 else if (src_eqv_here && GET_CODE (src_eqv_here) == code
6230 && rtx_equal_p (src_eqv_here, p->exp))
6231 src_eqv_here = 0;
6232 else if (src_related && GET_CODE (src_related) == code
6233 && rtx_equal_p (src_related, p->exp))
6234 src_related = 0;
6235
6236 /* This is the same as the destination of the insns, we want
6237 to prefer it. Copy it to src_related. The code below will
6238 then give it a negative cost. */
6239 if (GET_CODE (dest) == code && rtx_equal_p (p->exp, dest))
6240 src_related = dest;
6241
6242 }
6243
6244 /* Find the cheapest valid equivalent, trying all the available
6245 possibilities. Prefer items not in the hash table to ones
6246 that are when they are equal cost. Note that we can never
6247 worsen an insn as the current contents will also succeed.
6248 If we find an equivalent identical to the destination, use it as best,
6249 since this insn will probably be eliminated in that case. */
6250 if (src)
6251 {
6252 if (rtx_equal_p (src, dest))
6253 src_cost = -1;
6254 else
6255 src_cost = COST (src);
6256 }
6257
6258 if (src_eqv_here)
6259 {
6260 if (rtx_equal_p (src_eqv_here, dest))
6261 src_eqv_cost = -1;
6262 else
6263 src_eqv_cost = COST (src_eqv_here);
6264 }
6265
6266 if (src_folded)
6267 {
6268 if (rtx_equal_p (src_folded, dest))
6269 src_folded_cost = -1;
6270 else
6271 src_folded_cost = COST (src_folded);
6272 }
6273
6274 if (src_related)
6275 {
6276 if (rtx_equal_p (src_related, dest))
6277 src_related_cost = -1;
6278 else
6279 src_related_cost = COST (src_related);
6280 }
6281
6282 /* If this was an indirect jump insn, a known label will really be
6283 cheaper even though it looks more expensive. */
6284 if (dest == pc_rtx && src_const && GET_CODE (src_const) == LABEL_REF)
6285 src_folded = src_const, src_folded_cost = -1;
6286
6287 /* Terminate loop when replacement made. This must terminate since
6288 the current contents will be tested and will always be valid. */
6289 while (1)
6290 {
6291 rtx trial;
6292
6293 /* Skip invalid entries. */
6294 while (elt && GET_CODE (elt->exp) != REG
6295 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
6296 elt = elt->next_same_value;
6297
6298 if (elt) src_elt_cost = elt->cost;
6299
6300 /* Find cheapest and skip it for the next time. For items
6301 of equal cost, use this order:
6302 src_folded, src, src_eqv, src_related and hash table entry. */
6303 if (src_folded_cost <= src_cost
6304 && src_folded_cost <= src_eqv_cost
6305 && src_folded_cost <= src_related_cost
6306 && src_folded_cost <= src_elt_cost)
6307 {
6308 trial = src_folded, src_folded_cost = 10000;
6309 if (src_folded_force_flag)
6310 trial = force_const_mem (mode, trial);
6311 }
6312 else if (src_cost <= src_eqv_cost
6313 && src_cost <= src_related_cost
6314 && src_cost <= src_elt_cost)
6315 trial = src, src_cost = 10000;
6316 else if (src_eqv_cost <= src_related_cost
6317 && src_eqv_cost <= src_elt_cost)
6318 trial = src_eqv_here, src_eqv_cost = 10000;
6319 else if (src_related_cost <= src_elt_cost)
6320 trial = src_related, src_related_cost = 10000;
6321 else
6322 {
6323 trial = copy_rtx (elt->exp);
6324 elt = elt->next_same_value;
6325 src_elt_cost = 10000;
6326 }
6327
6328 /* We don't normally have an insn matching (set (pc) (pc)), so
6329 check for this separately here. We will delete such an
6330 insn below.
6331
6332 Tablejump insns contain a USE of the table, so simply replacing
6333 the operand with the constant won't match. This is simply an
6334 unconditional branch, however, and is therefore valid. Just
6335 insert the substitution here and we will delete and re-emit
6336 the insn later. */
6337
6338 if (n_sets == 1 && dest == pc_rtx
6339 && (trial == pc_rtx
6340 || (GET_CODE (trial) == LABEL_REF
6341 && ! condjump_p (insn))))
6342 {
6343 /* If TRIAL is a label in front of a jump table, we are
6344 really falling through the switch (this is how casesi
6345 insns work), so we must branch around the table. */
6346 if (GET_CODE (trial) == CODE_LABEL
6347 && NEXT_INSN (trial) != 0
6348 && GET_CODE (NEXT_INSN (trial)) == JUMP_INSN
6349 && (GET_CODE (PATTERN (NEXT_INSN (trial))) == ADDR_DIFF_VEC
6350 || GET_CODE (PATTERN (NEXT_INSN (trial))) == ADDR_VEC))
6351
6352 trial = gen_rtx (LABEL_REF, Pmode, get_label_after (trial));
6353
6354 SET_SRC (sets[i].rtl) = trial;
6355 break;
6356 }
6357
6358 /* Look for a substitution that makes a valid insn. */
6359 else if (validate_change (insn, &SET_SRC (sets[i].rtl), trial, 0))
6360 {
6361 /* The result of apply_change_group can be ignored; see
6362 canon_reg. */
6363
6364 validate_change (insn, &SET_SRC (sets[i].rtl),
6365 canon_reg (SET_SRC (sets[i].rtl), insn),
6366 1);
6367 apply_change_group ();
6368 break;
6369 }
6370
6371 /* If we previously found constant pool entries for
6372 constants and this is a constant, try making a
6373 pool entry. Put it in src_folded unless we already have done
6374 this since that is where it likely came from. */
6375
6376 else if (constant_pool_entries_cost
6377 && CONSTANT_P (trial)
6378 && (src_folded == 0 || GET_CODE (src_folded) != MEM)
6379 && GET_MODE_CLASS (mode) != MODE_CC)
6380 {
6381 src_folded_force_flag = 1;
6382 src_folded = trial;
6383 src_folded_cost = constant_pool_entries_cost;
6384 }
6385 }
6386
6387 src = SET_SRC (sets[i].rtl);
6388
6389 /* In general, it is good to have a SET with SET_SRC == SET_DEST.
6390 However, there is an important exception: If both are registers
6391 that are not the head of their equivalence class, replace SET_SRC
6392 with the head of the class. If we do not do this, we will have
6393 both registers live over a portion of the basic block. This way,
6394 their lifetimes will likely abut instead of overlapping. */
6395 if (GET_CODE (dest) == REG
6396 && REGNO_QTY_VALID_P (REGNO (dest))
6397 && qty_mode[reg_qty[REGNO (dest)]] == GET_MODE (dest)
6398 && qty_first_reg[reg_qty[REGNO (dest)]] != REGNO (dest)
6399 && GET_CODE (src) == REG && REGNO (src) == REGNO (dest)
6400 /* Don't do this if the original insn had a hard reg as
6401 SET_SRC. */
6402 && (GET_CODE (sets[i].src) != REG
6403 || REGNO (sets[i].src) >= FIRST_PSEUDO_REGISTER))
6404 /* We can't call canon_reg here because it won't do anything if
6405 SRC is a hard register. */
6406 {
6407 int first = qty_first_reg[reg_qty[REGNO (src)]];
6408
6409 src = SET_SRC (sets[i].rtl)
6410 = first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
6411 : gen_rtx (REG, GET_MODE (src), first);
6412
6413 /* If we had a constant that is cheaper than what we are now
6414 setting SRC to, use that constant. We ignored it when we
6415 thought we could make this into a no-op. */
6416 if (src_const && COST (src_const) < COST (src)
6417 && validate_change (insn, &SET_SRC (sets[i].rtl), src_const, 0))
6418 src = src_const;
6419 }
6420
6421 /* If we made a change, recompute SRC values. */
6422 if (src != sets[i].src)
6423 {
6424 do_not_record = 0;
6425 hash_arg_in_memory = 0;
6426 hash_arg_in_struct = 0;
6427 sets[i].src = src;
6428 sets[i].src_hash_code = HASH (src, mode);
6429 sets[i].src_volatile = do_not_record;
6430 sets[i].src_in_memory = hash_arg_in_memory;
6431 sets[i].src_in_struct = hash_arg_in_struct;
6432 sets[i].src_elt = lookup (src, sets[i].src_hash_code, mode);
6433 }
6434
6435 /* If this is a single SET, we are setting a register, and we have an
6436 equivalent constant, we want to add a REG_NOTE. We don't want
6437 to write a REG_EQUAL note for a constant pseudo since verifying that
6438 that pseudo hasn't been eliminated is a pain. Such a note also
6439 won't help anything. */
6440 if (n_sets == 1 && src_const && GET_CODE (dest) == REG
6441 && GET_CODE (src_const) != REG)
6442 {
6443 rtx tem = find_reg_note (insn, REG_EQUAL, NULL_RTX);
6444
6445 /* Record the actual constant value in a REG_EQUAL note, making
6446 a new one if one does not already exist. */
6447 if (tem)
6448 XEXP (tem, 0) = src_const;
6449 else
6450 REG_NOTES (insn) = gen_rtx (EXPR_LIST, REG_EQUAL,
6451 src_const, REG_NOTES (insn));
6452
6453 /* If storing a constant value in a register that
6454 previously held the constant value 0,
6455 record this fact with a REG_WAS_0 note on this insn.
6456
6457 Note that the *register* is required to have previously held 0,
6458 not just any register in the quantity and we must point to the
6459 insn that set that register to zero.
6460
6461 Rather than track each register individually, we just see if
6462 the last set for this quantity was for this register. */
6463
6464 if (REGNO_QTY_VALID_P (REGNO (dest))
6465 && qty_const[reg_qty[REGNO (dest)]] == const0_rtx)
6466 {
6467 /* See if we previously had a REG_WAS_0 note. */
6468 rtx note = find_reg_note (insn, REG_WAS_0, NULL_RTX);
6469 rtx const_insn = qty_const_insn[reg_qty[REGNO (dest)]];
6470
6471 if ((tem = single_set (const_insn)) != 0
6472 && rtx_equal_p (SET_DEST (tem), dest))
6473 {
6474 if (note)
6475 XEXP (note, 0) = const_insn;
6476 else
6477 REG_NOTES (insn) = gen_rtx (INSN_LIST, REG_WAS_0,
6478 const_insn, REG_NOTES (insn));
6479 }
6480 }
6481 }
6482
6483 /* Now deal with the destination. */
6484 do_not_record = 0;
6485 sets[i].inner_dest_loc = &SET_DEST (sets[0].rtl);
6486
6487 /* Look within any SIGN_EXTRACT or ZERO_EXTRACT
6488 to the MEM or REG within it. */
6489 while (GET_CODE (dest) == SIGN_EXTRACT
6490 || GET_CODE (dest) == ZERO_EXTRACT
6491 || GET_CODE (dest) == SUBREG
6492 || GET_CODE (dest) == STRICT_LOW_PART)
6493 {
6494 sets[i].inner_dest_loc = &XEXP (dest, 0);
6495 dest = XEXP (dest, 0);
6496 }
6497
6498 sets[i].inner_dest = dest;
6499
6500 if (GET_CODE (dest) == MEM)
6501 {
6502 dest = fold_rtx (dest, insn);
6503
6504 /* Decide whether we invalidate everything in memory,
6505 or just things at non-fixed places.
6506 Writing a large aggregate must invalidate everything
6507 because we don't know how long it is. */
6508 note_mem_written (dest, &writes_memory);
6509 }
6510
6511 /* Compute the hash code of the destination now,
6512 before the effects of this instruction are recorded,
6513 since the register values used in the address computation
6514 are those before this instruction. */
6515 sets[i].dest_hash_code = HASH (dest, mode);
6516
6517 /* Don't enter a bit-field in the hash table
6518 because the value in it after the store
6519 may not equal what was stored, due to truncation. */
6520
6521 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
6522 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
6523 {
6524 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
6525
6526 if (src_const != 0 && GET_CODE (src_const) == CONST_INT
6527 && GET_CODE (width) == CONST_INT
6528 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
6529 && ! (INTVAL (src_const)
6530 & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
6531 /* Exception: if the value is constant,
6532 and it won't be truncated, record it. */
6533 ;
6534 else
6535 {
6536 /* This is chosen so that the destination will be invalidated
6537 but no new value will be recorded.
6538 We must invalidate because sometimes constant
6539 values can be recorded for bitfields. */
6540 sets[i].src_elt = 0;
6541 sets[i].src_volatile = 1;
6542 src_eqv = 0;
6543 src_eqv_elt = 0;
6544 }
6545 }
6546
6547 /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
6548 the insn. */
6549 else if (n_sets == 1 && dest == pc_rtx && src == pc_rtx)
6550 {
6551 PUT_CODE (insn, NOTE);
6552 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
6553 NOTE_SOURCE_FILE (insn) = 0;
6554 cse_jumps_altered = 1;
6555 /* One less use of the label this insn used to jump to. */
6556 --LABEL_NUSES (JUMP_LABEL (insn));
6557 /* No more processing for this set. */
6558 sets[i].rtl = 0;
6559 }
6560
6561 /* If this SET is now setting PC to a label, we know it used to
6562 be a conditional or computed branch. So we see if we can follow
6563 it. If it was a computed branch, delete it and re-emit. */
6564 else if (dest == pc_rtx && GET_CODE (src) == LABEL_REF)
6565 {
6566 rtx p;
6567
6568 /* If this is not in the format for a simple branch and
6569 we are the only SET in it, re-emit it. */
6570 if (! simplejump_p (insn) && n_sets == 1)
6571 {
6572 rtx new = emit_jump_insn_before (gen_jump (XEXP (src, 0)), insn);
6573 JUMP_LABEL (new) = XEXP (src, 0);
6574 LABEL_NUSES (XEXP (src, 0))++;
6575 delete_insn (insn);
6576 insn = new;
6577 }
6578
6579 /* Now that we've converted this jump to an unconditional jump,
6580 there is dead code after it. Delete the dead code until we
6581 reach a BARRIER, the end of the function, or a label. Do
6582 not delete NOTEs except for NOTE_INSN_DELETED since later
6583 phases assume these notes are retained. */
6584
6585 p = insn;
6586
6587 while (NEXT_INSN (p) != 0
6588 && GET_CODE (NEXT_INSN (p)) != BARRIER
6589 && GET_CODE (NEXT_INSN (p)) != CODE_LABEL)
6590 {
6591 if (GET_CODE (NEXT_INSN (p)) != NOTE
6592 || NOTE_LINE_NUMBER (NEXT_INSN (p)) == NOTE_INSN_DELETED)
6593 delete_insn (NEXT_INSN (p));
6594 else
6595 p = NEXT_INSN (p);
6596 }
6597
6598 /* If we don't have a BARRIER immediately after INSN, put one there.
6599 Much code assumes that there are no NOTEs between a JUMP_INSN and
6600 BARRIER. */
6601
6602 if (NEXT_INSN (insn) == 0
6603 || GET_CODE (NEXT_INSN (insn)) != BARRIER)
6604 emit_barrier_after (insn);
6605
6606 /* We might have two BARRIERs separated by notes. Delete the second
6607 one if so. */
6608
6609 if (p != insn && NEXT_INSN (p) != 0
6610 && GET_CODE (NEXT_INSN (p)) == BARRIER)
6611 delete_insn (NEXT_INSN (p));
6612
6613 cse_jumps_altered = 1;
6614 sets[i].rtl = 0;
6615 }
6616
6617 /* If destination is volatile, invalidate it and then do no further
6618 processing for this assignment. */
6619
6620 else if (do_not_record)
6621 {
6622 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG
6623 || GET_CODE (dest) == MEM)
6624 invalidate (dest);
6625 sets[i].rtl = 0;
6626 }
6627
6628 if (sets[i].rtl != 0 && dest != SET_DEST (sets[i].rtl))
6629 sets[i].dest_hash_code = HASH (SET_DEST (sets[i].rtl), mode);
6630
6631 #ifdef HAVE_cc0
6632 /* If setting CC0, record what it was set to, or a constant, if it
6633 is equivalent to a constant. If it is being set to a floating-point
6634 value, make a COMPARE with the appropriate constant of 0. If we
6635 don't do this, later code can interpret this as a test against
6636 const0_rtx, which can cause problems if we try to put it into an
6637 insn as a floating-point operand. */
6638 if (dest == cc0_rtx)
6639 {
6640 this_insn_cc0 = src_const && mode != VOIDmode ? src_const : src;
6641 this_insn_cc0_mode = mode;
6642 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
6643 this_insn_cc0 = gen_rtx (COMPARE, VOIDmode, this_insn_cc0,
6644 CONST0_RTX (mode));
6645 }
6646 #endif
6647 }
6648
6649 /* Now enter all non-volatile source expressions in the hash table
6650 if they are not already present.
6651 Record their equivalence classes in src_elt.
6652 This way we can insert the corresponding destinations into
6653 the same classes even if the actual sources are no longer in them
6654 (having been invalidated). */
6655
6656 if (src_eqv && src_eqv_elt == 0 && sets[0].rtl != 0 && ! src_eqv_volatile
6657 && ! rtx_equal_p (src_eqv, SET_DEST (sets[0].rtl)))
6658 {
6659 register struct table_elt *elt;
6660 register struct table_elt *classp = sets[0].src_elt;
6661 rtx dest = SET_DEST (sets[0].rtl);
6662 enum machine_mode eqvmode = GET_MODE (dest);
6663
6664 if (GET_CODE (dest) == STRICT_LOW_PART)
6665 {
6666 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
6667 classp = 0;
6668 }
6669 if (insert_regs (src_eqv, classp, 0))
6670 src_eqv_hash_code = HASH (src_eqv, eqvmode);
6671 elt = insert (src_eqv, classp, src_eqv_hash_code, eqvmode);
6672 elt->in_memory = src_eqv_in_memory;
6673 elt->in_struct = src_eqv_in_struct;
6674 src_eqv_elt = elt;
6675 }
6676
6677 for (i = 0; i < n_sets; i++)
6678 if (sets[i].rtl && ! sets[i].src_volatile
6679 && ! rtx_equal_p (SET_SRC (sets[i].rtl), SET_DEST (sets[i].rtl)))
6680 {
6681 if (GET_CODE (SET_DEST (sets[i].rtl)) == STRICT_LOW_PART)
6682 {
6683 /* REG_EQUAL in setting a STRICT_LOW_PART
6684 gives an equivalent for the entire destination register,
6685 not just for the subreg being stored in now.
6686 This is a more interesting equivalence, so we arrange later
6687 to treat the entire reg as the destination. */
6688 sets[i].src_elt = src_eqv_elt;
6689 sets[i].src_hash_code = src_eqv_hash_code;
6690 }
6691 else
6692 {
6693 /* Insert source and constant equivalent into hash table, if not
6694 already present. */
6695 register struct table_elt *classp = src_eqv_elt;
6696 register rtx src = sets[i].src;
6697 register rtx dest = SET_DEST (sets[i].rtl);
6698 enum machine_mode mode
6699 = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
6700
6701 if (sets[i].src_elt == 0)
6702 {
6703 register struct table_elt *elt;
6704
6705 /* Note that these insert_regs calls cannot remove
6706 any of the src_elt's, because they would have failed to
6707 match if not still valid. */
6708 if (insert_regs (src, classp, 0))
6709 sets[i].src_hash_code = HASH (src, mode);
6710 elt = insert (src, classp, sets[i].src_hash_code, mode);
6711 elt->in_memory = sets[i].src_in_memory;
6712 elt->in_struct = sets[i].src_in_struct;
6713 sets[i].src_elt = classp = elt;
6714 }
6715
6716 if (sets[i].src_const && sets[i].src_const_elt == 0
6717 && src != sets[i].src_const
6718 && ! rtx_equal_p (sets[i].src_const, src))
6719 sets[i].src_elt = insert (sets[i].src_const, classp,
6720 sets[i].src_const_hash_code, mode);
6721 }
6722 }
6723 else if (sets[i].src_elt == 0)
6724 /* If we did not insert the source into the hash table (e.g., it was
6725 volatile), note the equivalence class for the REG_EQUAL value, if any,
6726 so that the destination goes into that class. */
6727 sets[i].src_elt = src_eqv_elt;
6728
6729 invalidate_from_clobbers (&writes_memory, x);
6730
6731 /* Some registers are invalidated by subroutine calls. Memory is
6732 invalidated by non-constant calls. */
6733
6734 if (GET_CODE (insn) == CALL_INSN)
6735 {
6736 static struct write_data everything = {0, 1, 1, 1};
6737
6738 if (! CONST_CALL_P (insn))
6739 invalidate_memory (&everything);
6740 invalidate_for_call ();
6741 }
6742
6743 /* Now invalidate everything set by this instruction.
6744 If a SUBREG or other funny destination is being set,
6745 sets[i].rtl is still nonzero, so here we invalidate the reg
6746 a part of which is being set. */
6747
6748 for (i = 0; i < n_sets; i++)
6749 if (sets[i].rtl)
6750 {
6751 register rtx dest = sets[i].inner_dest;
6752
6753 /* Needed for registers to remove the register from its
6754 previous quantity's chain.
6755 Needed for memory if this is a nonvarying address, unless
6756 we have just done an invalidate_memory that covers even those. */
6757 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG
6758 || (! writes_memory.all && ! cse_rtx_addr_varies_p (dest)))
6759 invalidate (dest);
6760 }
6761
6762 /* Make sure registers mentioned in destinations
6763 are safe for use in an expression to be inserted.
6764 This removes from the hash table
6765 any invalid entry that refers to one of these registers.
6766
6767 We don't care about the return value from mention_regs because
6768 we are going to hash the SET_DEST values unconditionally. */
6769
6770 for (i = 0; i < n_sets; i++)
6771 if (sets[i].rtl && GET_CODE (SET_DEST (sets[i].rtl)) != REG)
6772 mention_regs (SET_DEST (sets[i].rtl));
6773
6774 /* We may have just removed some of the src_elt's from the hash table.
6775 So replace each one with the current head of the same class. */
6776
6777 for (i = 0; i < n_sets; i++)
6778 if (sets[i].rtl)
6779 {
6780 if (sets[i].src_elt && sets[i].src_elt->first_same_value == 0)
6781 /* If elt was removed, find current head of same class,
6782 or 0 if nothing remains of that class. */
6783 {
6784 register struct table_elt *elt = sets[i].src_elt;
6785
6786 while (elt && elt->prev_same_value)
6787 elt = elt->prev_same_value;
6788
6789 while (elt && elt->first_same_value == 0)
6790 elt = elt->next_same_value;
6791 sets[i].src_elt = elt ? elt->first_same_value : 0;
6792 }
6793 }
6794
6795 /* Now insert the destinations into their equivalence classes. */
6796
6797 for (i = 0; i < n_sets; i++)
6798 if (sets[i].rtl)
6799 {
6800 register rtx dest = SET_DEST (sets[i].rtl);
6801 register struct table_elt *elt;
6802
6803 /* Don't record value if we are not supposed to risk allocating
6804 floating-point values in registers that might be wider than
6805 memory. */
6806 if ((flag_float_store
6807 && GET_CODE (dest) == MEM
6808 && GET_MODE_CLASS (GET_MODE (dest)) == MODE_FLOAT)
6809 /* Don't record values of destinations set inside a libcall block
6810 since we might delete the libcall. Things should have been set
6811 up so we won't want to reuse such a value, but we play it safe
6812 here. */
6813 || in_libcall_block
6814 /* If we didn't put a REG_EQUAL value or a source into the hash
6815 table, there is no point is recording DEST. */
6816 || sets[i].src_elt == 0)
6817 continue;
6818
6819 /* STRICT_LOW_PART isn't part of the value BEING set,
6820 and neither is the SUBREG inside it.
6821 Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT. */
6822 if (GET_CODE (dest) == STRICT_LOW_PART)
6823 dest = SUBREG_REG (XEXP (dest, 0));
6824
6825 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
6826 /* Registers must also be inserted into chains for quantities. */
6827 if (insert_regs (dest, sets[i].src_elt, 1))
6828 /* If `insert_regs' changes something, the hash code must be
6829 recalculated. */
6830 sets[i].dest_hash_code = HASH (dest, GET_MODE (dest));
6831
6832 elt = insert (dest, sets[i].src_elt,
6833 sets[i].dest_hash_code, GET_MODE (dest));
6834 elt->in_memory = GET_CODE (sets[i].inner_dest) == MEM;
6835 if (elt->in_memory)
6836 {
6837 /* This implicitly assumes a whole struct
6838 need not have MEM_IN_STRUCT_P.
6839 But a whole struct is *supposed* to have MEM_IN_STRUCT_P. */
6840 elt->in_struct = (MEM_IN_STRUCT_P (sets[i].inner_dest)
6841 || sets[i].inner_dest != SET_DEST (sets[i].rtl));
6842 }
6843
6844 /* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
6845 narrower than M2, and both M1 and M2 are the same number of words,
6846 we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
6847 make that equivalence as well.
6848
6849 However, BAR may have equivalences for which gen_lowpart_if_possible
6850 will produce a simpler value than gen_lowpart_if_possible applied to
6851 BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
6852 BAR's equivalences. If we don't get a simplified form, make
6853 the SUBREG. It will not be used in an equivalence, but will
6854 cause two similar assignments to be detected.
6855
6856 Note the loop below will find SUBREG_REG (DEST) since we have
6857 already entered SRC and DEST of the SET in the table. */
6858
6859 if (GET_CODE (dest) == SUBREG
6860 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) / UNITS_PER_WORD
6861 == GET_MODE_SIZE (GET_MODE (dest)) / UNITS_PER_WORD)
6862 && (GET_MODE_SIZE (GET_MODE (dest))
6863 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
6864 && sets[i].src_elt != 0)
6865 {
6866 enum machine_mode new_mode = GET_MODE (SUBREG_REG (dest));
6867 struct table_elt *elt, *classp = 0;
6868
6869 for (elt = sets[i].src_elt->first_same_value; elt;
6870 elt = elt->next_same_value)
6871 {
6872 rtx new_src = 0;
6873 int src_hash;
6874 struct table_elt *src_elt;
6875
6876 /* Ignore invalid entries. */
6877 if (GET_CODE (elt->exp) != REG
6878 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
6879 continue;
6880
6881 new_src = gen_lowpart_if_possible (new_mode, elt->exp);
6882 if (new_src == 0)
6883 new_src = gen_rtx (SUBREG, new_mode, elt->exp, 0);
6884
6885 src_hash = HASH (new_src, new_mode);
6886 src_elt = lookup (new_src, src_hash, new_mode);
6887
6888 /* Put the new source in the hash table is if isn't
6889 already. */
6890 if (src_elt == 0)
6891 {
6892 if (insert_regs (new_src, classp, 0))
6893 src_hash = HASH (new_src, new_mode);
6894 src_elt = insert (new_src, classp, src_hash, new_mode);
6895 src_elt->in_memory = elt->in_memory;
6896 src_elt->in_struct = elt->in_struct;
6897 }
6898 else if (classp && classp != src_elt->first_same_value)
6899 /* Show that two things that we've seen before are
6900 actually the same. */
6901 merge_equiv_classes (src_elt, classp);
6902
6903 classp = src_elt->first_same_value;
6904 }
6905 }
6906 }
6907
6908 /* Special handling for (set REG0 REG1)
6909 where REG0 is the "cheapest", cheaper than REG1.
6910 After cse, REG1 will probably not be used in the sequel,
6911 so (if easily done) change this insn to (set REG1 REG0) and
6912 replace REG1 with REG0 in the previous insn that computed their value.
6913 Then REG1 will become a dead store and won't cloud the situation
6914 for later optimizations.
6915
6916 Do not make this change if REG1 is a hard register, because it will
6917 then be used in the sequel and we may be changing a two-operand insn
6918 into a three-operand insn.
6919
6920 Also do not do this if we are operating on a copy of INSN. */
6921
6922 if (n_sets == 1 && sets[0].rtl && GET_CODE (SET_DEST (sets[0].rtl)) == REG
6923 && NEXT_INSN (PREV_INSN (insn)) == insn
6924 && GET_CODE (SET_SRC (sets[0].rtl)) == REG
6925 && REGNO (SET_SRC (sets[0].rtl)) >= FIRST_PSEUDO_REGISTER
6926 && REGNO_QTY_VALID_P (REGNO (SET_SRC (sets[0].rtl)))
6927 && (qty_first_reg[reg_qty[REGNO (SET_SRC (sets[0].rtl))]]
6928 == REGNO (SET_DEST (sets[0].rtl))))
6929 {
6930 rtx prev = PREV_INSN (insn);
6931 while (prev && GET_CODE (prev) == NOTE)
6932 prev = PREV_INSN (prev);
6933
6934 if (prev && GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SET
6935 && SET_DEST (PATTERN (prev)) == SET_SRC (sets[0].rtl))
6936 {
6937 rtx dest = SET_DEST (sets[0].rtl);
6938 rtx note = find_reg_note (prev, REG_EQUIV, NULL_RTX);
6939
6940 validate_change (prev, & SET_DEST (PATTERN (prev)), dest, 1);
6941 validate_change (insn, & SET_DEST (sets[0].rtl),
6942 SET_SRC (sets[0].rtl), 1);
6943 validate_change (insn, & SET_SRC (sets[0].rtl), dest, 1);
6944 apply_change_group ();
6945
6946 /* If REG1 was equivalent to a constant, REG0 is not. */
6947 if (note)
6948 PUT_REG_NOTE_KIND (note, REG_EQUAL);
6949
6950 /* If there was a REG_WAS_0 note on PREV, remove it. Move
6951 any REG_WAS_0 note on INSN to PREV. */
6952 note = find_reg_note (prev, REG_WAS_0, NULL_RTX);
6953 if (note)
6954 remove_note (prev, note);
6955
6956 note = find_reg_note (insn, REG_WAS_0, NULL_RTX);
6957 if (note)
6958 {
6959 remove_note (insn, note);
6960 XEXP (note, 1) = REG_NOTES (prev);
6961 REG_NOTES (prev) = note;
6962 }
6963 }
6964 }
6965
6966 /* If this is a conditional jump insn, record any known equivalences due to
6967 the condition being tested. */
6968
6969 last_jump_equiv_class = 0;
6970 if (GET_CODE (insn) == JUMP_INSN
6971 && n_sets == 1 && GET_CODE (x) == SET
6972 && GET_CODE (SET_SRC (x)) == IF_THEN_ELSE)
6973 record_jump_equiv (insn, 0);
6974
6975 #ifdef HAVE_cc0
6976 /* If the previous insn set CC0 and this insn no longer references CC0,
6977 delete the previous insn. Here we use the fact that nothing expects CC0
6978 to be valid over an insn, which is true until the final pass. */
6979 if (prev_insn && GET_CODE (prev_insn) == INSN
6980 && (tem = single_set (prev_insn)) != 0
6981 && SET_DEST (tem) == cc0_rtx
6982 && ! reg_mentioned_p (cc0_rtx, x))
6983 {
6984 PUT_CODE (prev_insn, NOTE);
6985 NOTE_LINE_NUMBER (prev_insn) = NOTE_INSN_DELETED;
6986 NOTE_SOURCE_FILE (prev_insn) = 0;
6987 }
6988
6989 prev_insn_cc0 = this_insn_cc0;
6990 prev_insn_cc0_mode = this_insn_cc0_mode;
6991 #endif
6992
6993 prev_insn = insn;
6994 }
6995 \f
6996 /* Store 1 in *WRITES_PTR for those categories of memory ref
6997 that must be invalidated when the expression WRITTEN is stored in.
6998 If WRITTEN is null, say everything must be invalidated. */
6999
7000 static void
7001 note_mem_written (written, writes_ptr)
7002 rtx written;
7003 struct write_data *writes_ptr;
7004 {
7005 static struct write_data everything = {0, 1, 1, 1};
7006
7007 if (written == 0)
7008 *writes_ptr = everything;
7009 else if (GET_CODE (written) == MEM)
7010 {
7011 /* Pushing or popping the stack invalidates just the stack pointer. */
7012 rtx addr = XEXP (written, 0);
7013 if ((GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
7014 || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
7015 && GET_CODE (XEXP (addr, 0)) == REG
7016 && REGNO (XEXP (addr, 0)) == STACK_POINTER_REGNUM)
7017 {
7018 writes_ptr->sp = 1;
7019 return;
7020 }
7021 else if (GET_MODE (written) == BLKmode)
7022 *writes_ptr = everything;
7023 else if (cse_rtx_addr_varies_p (written))
7024 {
7025 /* A varying address that is a sum indicates an array element,
7026 and that's just as good as a structure element
7027 in implying that we need not invalidate scalar variables.
7028 However, we must allow QImode aliasing of scalars, because the
7029 ANSI C standard allows character pointers to alias anything. */
7030 if (! ((MEM_IN_STRUCT_P (written)
7031 || GET_CODE (XEXP (written, 0)) == PLUS)
7032 && GET_MODE (written) != QImode))
7033 writes_ptr->all = 1;
7034 writes_ptr->nonscalar = 1;
7035 }
7036 writes_ptr->var = 1;
7037 }
7038 }
7039
7040 /* Perform invalidation on the basis of everything about an insn
7041 except for invalidating the actual places that are SET in it.
7042 This includes the places CLOBBERed, and anything that might
7043 alias with something that is SET or CLOBBERed.
7044
7045 W points to the writes_memory for this insn, a struct write_data
7046 saying which kinds of memory references must be invalidated.
7047 X is the pattern of the insn. */
7048
7049 static void
7050 invalidate_from_clobbers (w, x)
7051 struct write_data *w;
7052 rtx x;
7053 {
7054 /* If W->var is not set, W specifies no action.
7055 If W->all is set, this step gets all memory refs
7056 so they can be ignored in the rest of this function. */
7057 if (w->var)
7058 invalidate_memory (w);
7059
7060 if (w->sp)
7061 {
7062 if (reg_tick[STACK_POINTER_REGNUM] >= 0)
7063 reg_tick[STACK_POINTER_REGNUM]++;
7064
7065 /* This should be *very* rare. */
7066 if (TEST_HARD_REG_BIT (hard_regs_in_table, STACK_POINTER_REGNUM))
7067 invalidate (stack_pointer_rtx);
7068 }
7069
7070 if (GET_CODE (x) == CLOBBER)
7071 {
7072 rtx ref = XEXP (x, 0);
7073 if (ref
7074 && (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
7075 || (GET_CODE (ref) == MEM && ! w->all)))
7076 invalidate (ref);
7077 }
7078 else if (GET_CODE (x) == PARALLEL)
7079 {
7080 register int i;
7081 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
7082 {
7083 register rtx y = XVECEXP (x, 0, i);
7084 if (GET_CODE (y) == CLOBBER)
7085 {
7086 rtx ref = XEXP (y, 0);
7087 if (ref
7088 &&(GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
7089 || (GET_CODE (ref) == MEM && !w->all)))
7090 invalidate (ref);
7091 }
7092 }
7093 }
7094 }
7095 \f
7096 /* Process X, part of the REG_NOTES of an insn. Look at any REG_EQUAL notes
7097 and replace any registers in them with either an equivalent constant
7098 or the canonical form of the register. If we are inside an address,
7099 only do this if the address remains valid.
7100
7101 OBJECT is 0 except when within a MEM in which case it is the MEM.
7102
7103 Return the replacement for X. */
7104
7105 static rtx
7106 cse_process_notes (x, object)
7107 rtx x;
7108 rtx object;
7109 {
7110 enum rtx_code code = GET_CODE (x);
7111 char *fmt = GET_RTX_FORMAT (code);
7112 int qty;
7113 int i;
7114
7115 switch (code)
7116 {
7117 case CONST_INT:
7118 case CONST:
7119 case SYMBOL_REF:
7120 case LABEL_REF:
7121 case CONST_DOUBLE:
7122 case PC:
7123 case CC0:
7124 case LO_SUM:
7125 return x;
7126
7127 case MEM:
7128 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), x);
7129 return x;
7130
7131 case EXPR_LIST:
7132 case INSN_LIST:
7133 if (REG_NOTE_KIND (x) == REG_EQUAL)
7134 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), NULL_RTX);
7135 if (XEXP (x, 1))
7136 XEXP (x, 1) = cse_process_notes (XEXP (x, 1), NULL_RTX);
7137 return x;
7138
7139 case SIGN_EXTEND:
7140 case ZERO_EXTEND:
7141 {
7142 rtx new = cse_process_notes (XEXP (x, 0), object);
7143 /* We don't substitute VOIDmode constants into these rtx,
7144 since they would impede folding. */
7145 if (GET_MODE (new) != VOIDmode)
7146 validate_change (object, &XEXP (x, 0), new, 0);
7147 return x;
7148 }
7149
7150 case REG:
7151 i = reg_qty[REGNO (x)];
7152
7153 /* Return a constant or a constant register. */
7154 if (REGNO_QTY_VALID_P (REGNO (x))
7155 && qty_const[i] != 0
7156 && (CONSTANT_P (qty_const[i])
7157 || GET_CODE (qty_const[i]) == REG))
7158 {
7159 rtx new = gen_lowpart_if_possible (GET_MODE (x), qty_const[i]);
7160 if (new)
7161 return new;
7162 }
7163
7164 /* Otherwise, canonicalize this register. */
7165 return canon_reg (x, NULL_RTX);
7166 }
7167
7168 for (i = 0; i < GET_RTX_LENGTH (code); i++)
7169 if (fmt[i] == 'e')
7170 validate_change (object, &XEXP (x, i),
7171 cse_process_notes (XEXP (x, i), object), NULL_RTX);
7172
7173 return x;
7174 }
7175 \f
7176 /* Find common subexpressions between the end test of a loop and the beginning
7177 of the loop. LOOP_START is the CODE_LABEL at the start of a loop.
7178
7179 Often we have a loop where an expression in the exit test is used
7180 in the body of the loop. For example "while (*p) *q++ = *p++;".
7181 Because of the way we duplicate the loop exit test in front of the loop,
7182 however, we don't detect that common subexpression. This will be caught
7183 when global cse is implemented, but this is a quite common case.
7184
7185 This function handles the most common cases of these common expressions.
7186 It is called after we have processed the basic block ending with the
7187 NOTE_INSN_LOOP_END note that ends a loop and the previous JUMP_INSN
7188 jumps to a label used only once. */
7189
7190 static void
7191 cse_around_loop (loop_start)
7192 rtx loop_start;
7193 {
7194 rtx insn;
7195 int i;
7196 struct table_elt *p;
7197
7198 /* If the jump at the end of the loop doesn't go to the start, we don't
7199 do anything. */
7200 for (insn = PREV_INSN (loop_start);
7201 insn && (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) >= 0);
7202 insn = PREV_INSN (insn))
7203 ;
7204
7205 if (insn == 0
7206 || GET_CODE (insn) != NOTE
7207 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_LOOP_BEG)
7208 return;
7209
7210 /* If the last insn of the loop (the end test) was an NE comparison,
7211 we will interpret it as an EQ comparison, since we fell through
7212 the loop. Any equivalences resulting from that comparison are
7213 therefore not valid and must be invalidated. */
7214 if (last_jump_equiv_class)
7215 for (p = last_jump_equiv_class->first_same_value; p;
7216 p = p->next_same_value)
7217 if (GET_CODE (p->exp) == MEM || GET_CODE (p->exp) == REG
7218 || GET_CODE (p->exp) == SUBREG)
7219 invalidate (p->exp);
7220
7221 /* Process insns starting after LOOP_START until we hit a CALL_INSN or
7222 a CODE_LABEL (we could handle a CALL_INSN, but it isn't worth it).
7223
7224 The only thing we do with SET_DEST is invalidate entries, so we
7225 can safely process each SET in order. It is slightly less efficient
7226 to do so, but we only want to handle the most common cases. */
7227
7228 for (insn = NEXT_INSN (loop_start);
7229 GET_CODE (insn) != CALL_INSN && GET_CODE (insn) != CODE_LABEL
7230 && ! (GET_CODE (insn) == NOTE
7231 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
7232 insn = NEXT_INSN (insn))
7233 {
7234 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
7235 && (GET_CODE (PATTERN (insn)) == SET
7236 || GET_CODE (PATTERN (insn)) == CLOBBER))
7237 cse_set_around_loop (PATTERN (insn), insn, loop_start);
7238 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
7239 && GET_CODE (PATTERN (insn)) == PARALLEL)
7240 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
7241 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET
7242 || GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == CLOBBER)
7243 cse_set_around_loop (XVECEXP (PATTERN (insn), 0, i), insn,
7244 loop_start);
7245 }
7246 }
7247 \f
7248 /* Variable used for communications between the next two routines. */
7249
7250 static struct write_data skipped_writes_memory;
7251
7252 /* Process one SET of an insn that was skipped. We ignore CLOBBERs
7253 since they are done elsewhere. This function is called via note_stores. */
7254
7255 static void
7256 invalidate_skipped_set (dest, set)
7257 rtx set;
7258 rtx dest;
7259 {
7260 if (GET_CODE (set) == CLOBBER
7261 #ifdef HAVE_cc0
7262 || dest == cc0_rtx
7263 #endif
7264 || dest == pc_rtx)
7265 return;
7266
7267 if (GET_CODE (dest) == MEM)
7268 note_mem_written (dest, &skipped_writes_memory);
7269
7270 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG
7271 || (! skipped_writes_memory.all && ! cse_rtx_addr_varies_p (dest)))
7272 invalidate (dest);
7273 }
7274
7275 /* Invalidate all insns from START up to the end of the function or the
7276 next label. This called when we wish to CSE around a block that is
7277 conditionally executed. */
7278
7279 static void
7280 invalidate_skipped_block (start)
7281 rtx start;
7282 {
7283 rtx insn;
7284 int i;
7285 static struct write_data init = {0, 0, 0, 0};
7286 static struct write_data everything = {0, 1, 1, 1};
7287
7288 for (insn = start; insn && GET_CODE (insn) != CODE_LABEL;
7289 insn = NEXT_INSN (insn))
7290 {
7291 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
7292 continue;
7293
7294 skipped_writes_memory = init;
7295
7296 if (GET_CODE (insn) == CALL_INSN)
7297 {
7298 invalidate_for_call ();
7299 skipped_writes_memory = everything;
7300 }
7301
7302 note_stores (PATTERN (insn), invalidate_skipped_set);
7303 invalidate_from_clobbers (&skipped_writes_memory, PATTERN (insn));
7304 }
7305 }
7306 \f
7307 /* Used for communication between the following two routines; contains a
7308 value to be checked for modification. */
7309
7310 static rtx cse_check_loop_start_value;
7311
7312 /* If modifying X will modify the value in CSE_CHECK_LOOP_START_VALUE,
7313 indicate that fact by setting CSE_CHECK_LOOP_START_VALUE to 0. */
7314
7315 static void
7316 cse_check_loop_start (x, set)
7317 rtx x;
7318 rtx set;
7319 {
7320 if (cse_check_loop_start_value == 0
7321 || GET_CODE (x) == CC0 || GET_CODE (x) == PC)
7322 return;
7323
7324 if ((GET_CODE (x) == MEM && GET_CODE (cse_check_loop_start_value) == MEM)
7325 || reg_overlap_mentioned_p (x, cse_check_loop_start_value))
7326 cse_check_loop_start_value = 0;
7327 }
7328
7329 /* X is a SET or CLOBBER contained in INSN that was found near the start of
7330 a loop that starts with the label at LOOP_START.
7331
7332 If X is a SET, we see if its SET_SRC is currently in our hash table.
7333 If so, we see if it has a value equal to some register used only in the
7334 loop exit code (as marked by jump.c).
7335
7336 If those two conditions are true, we search backwards from the start of
7337 the loop to see if that same value was loaded into a register that still
7338 retains its value at the start of the loop.
7339
7340 If so, we insert an insn after the load to copy the destination of that
7341 load into the equivalent register and (try to) replace our SET_SRC with that
7342 register.
7343
7344 In any event, we invalidate whatever this SET or CLOBBER modifies. */
7345
7346 static void
7347 cse_set_around_loop (x, insn, loop_start)
7348 rtx x;
7349 rtx insn;
7350 rtx loop_start;
7351 {
7352 rtx p;
7353 struct table_elt *src_elt;
7354 static struct write_data init = {0, 0, 0, 0};
7355 struct write_data writes_memory;
7356
7357 writes_memory = init;
7358
7359 /* If this is a SET, see if we can replace SET_SRC, but ignore SETs that
7360 are setting PC or CC0 or whose SET_SRC is already a register. */
7361 if (GET_CODE (x) == SET
7362 && GET_CODE (SET_DEST (x)) != PC && GET_CODE (SET_DEST (x)) != CC0
7363 && GET_CODE (SET_SRC (x)) != REG)
7364 {
7365 src_elt = lookup (SET_SRC (x),
7366 HASH (SET_SRC (x), GET_MODE (SET_DEST (x))),
7367 GET_MODE (SET_DEST (x)));
7368
7369 if (src_elt)
7370 for (src_elt = src_elt->first_same_value; src_elt;
7371 src_elt = src_elt->next_same_value)
7372 if (GET_CODE (src_elt->exp) == REG && REG_LOOP_TEST_P (src_elt->exp)
7373 && COST (src_elt->exp) < COST (SET_SRC (x)))
7374 {
7375 rtx p, set;
7376
7377 /* Look for an insn in front of LOOP_START that sets
7378 something in the desired mode to SET_SRC (x) before we hit
7379 a label or CALL_INSN. */
7380
7381 for (p = prev_nonnote_insn (loop_start);
7382 p && GET_CODE (p) != CALL_INSN
7383 && GET_CODE (p) != CODE_LABEL;
7384 p = prev_nonnote_insn (p))
7385 if ((set = single_set (p)) != 0
7386 && GET_CODE (SET_DEST (set)) == REG
7387 && GET_MODE (SET_DEST (set)) == src_elt->mode
7388 && rtx_equal_p (SET_SRC (set), SET_SRC (x)))
7389 {
7390 /* We now have to ensure that nothing between P
7391 and LOOP_START modified anything referenced in
7392 SET_SRC (x). We know that nothing within the loop
7393 can modify it, or we would have invalidated it in
7394 the hash table. */
7395 rtx q;
7396
7397 cse_check_loop_start_value = SET_SRC (x);
7398 for (q = p; q != loop_start; q = NEXT_INSN (q))
7399 if (GET_RTX_CLASS (GET_CODE (q)) == 'i')
7400 note_stores (PATTERN (q), cse_check_loop_start);
7401
7402 /* If nothing was changed and we can replace our
7403 SET_SRC, add an insn after P to copy its destination
7404 to what we will be replacing SET_SRC with. */
7405 if (cse_check_loop_start_value
7406 && validate_change (insn, &SET_SRC (x),
7407 src_elt->exp, 0))
7408 emit_insn_after (gen_move_insn (src_elt->exp,
7409 SET_DEST (set)),
7410 p);
7411 break;
7412 }
7413 }
7414 }
7415
7416 /* Now invalidate anything modified by X. */
7417 note_mem_written (SET_DEST (x), &writes_memory);
7418
7419 if (writes_memory.var)
7420 invalidate_memory (&writes_memory);
7421
7422 /* See comment on similar code in cse_insn for explanation of these tests. */
7423 if (GET_CODE (SET_DEST (x)) == REG || GET_CODE (SET_DEST (x)) == SUBREG
7424 || (GET_CODE (SET_DEST (x)) == MEM && ! writes_memory.all
7425 && ! cse_rtx_addr_varies_p (SET_DEST (x))))
7426 invalidate (SET_DEST (x));
7427 }
7428 \f
7429 /* Find the end of INSN's basic block and return its range,
7430 the total number of SETs in all the insns of the block, the last insn of the
7431 block, and the branch path.
7432
7433 The branch path indicates which branches should be followed. If a non-zero
7434 path size is specified, the block should be rescanned and a different set
7435 of branches will be taken. The branch path is only used if
7436 FLAG_CSE_FOLLOW_JUMPS or FLAG_CSE_SKIP_BLOCKS is non-zero.
7437
7438 DATA is a pointer to a struct cse_basic_block_data, defined below, that is
7439 used to describe the block. It is filled in with the information about
7440 the current block. The incoming structure's branch path, if any, is used
7441 to construct the output branch path. */
7442
7443 void
7444 cse_end_of_basic_block (insn, data, follow_jumps, after_loop, skip_blocks)
7445 rtx insn;
7446 struct cse_basic_block_data *data;
7447 int follow_jumps;
7448 int after_loop;
7449 int skip_blocks;
7450 {
7451 rtx p = insn, q;
7452 int nsets = 0;
7453 int low_cuid = INSN_CUID (insn), high_cuid = INSN_CUID (insn);
7454 rtx next = GET_RTX_CLASS (GET_CODE (insn)) == 'i' ? insn : next_real_insn (insn);
7455 int path_size = data->path_size;
7456 int path_entry = 0;
7457 int i;
7458
7459 /* Update the previous branch path, if any. If the last branch was
7460 previously TAKEN, mark it NOT_TAKEN. If it was previously NOT_TAKEN,
7461 shorten the path by one and look at the previous branch. We know that
7462 at least one branch must have been taken if PATH_SIZE is non-zero. */
7463 while (path_size > 0)
7464 {
7465 if (data->path[path_size - 1].status != NOT_TAKEN)
7466 {
7467 data->path[path_size - 1].status = NOT_TAKEN;
7468 break;
7469 }
7470 else
7471 path_size--;
7472 }
7473
7474 /* Scan to end of this basic block. */
7475 while (p && GET_CODE (p) != CODE_LABEL)
7476 {
7477 /* Don't cse out the end of a loop. This makes a difference
7478 only for the unusual loops that always execute at least once;
7479 all other loops have labels there so we will stop in any case.
7480 Cse'ing out the end of the loop is dangerous because it
7481 might cause an invariant expression inside the loop
7482 to be reused after the end of the loop. This would make it
7483 hard to move the expression out of the loop in loop.c,
7484 especially if it is one of several equivalent expressions
7485 and loop.c would like to eliminate it.
7486
7487 If we are running after loop.c has finished, we can ignore
7488 the NOTE_INSN_LOOP_END. */
7489
7490 if (! after_loop && GET_CODE (p) == NOTE
7491 && NOTE_LINE_NUMBER (p) == NOTE_INSN_LOOP_END)
7492 break;
7493
7494 /* Don't cse over a call to setjmp; on some machines (eg vax)
7495 the regs restored by the longjmp come from
7496 a later time than the setjmp. */
7497 if (GET_CODE (p) == NOTE
7498 && NOTE_LINE_NUMBER (p) == NOTE_INSN_SETJMP)
7499 break;
7500
7501 /* A PARALLEL can have lots of SETs in it,
7502 especially if it is really an ASM_OPERANDS. */
7503 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
7504 && GET_CODE (PATTERN (p)) == PARALLEL)
7505 nsets += XVECLEN (PATTERN (p), 0);
7506 else if (GET_CODE (p) != NOTE)
7507 nsets += 1;
7508
7509 /* Ignore insns made by CSE; they cannot affect the boundaries of
7510 the basic block. */
7511
7512 if (INSN_UID (p) <= max_uid && INSN_CUID (p) > high_cuid)
7513 high_cuid = INSN_CUID (p);
7514 if (INSN_UID (p) <= max_uid && INSN_CUID (p) < low_cuid)
7515 low_cuid = INSN_CUID (p);
7516
7517 /* See if this insn is in our branch path. If it is and we are to
7518 take it, do so. */
7519 if (path_entry < path_size && data->path[path_entry].branch == p)
7520 {
7521 if (data->path[path_entry].status != NOT_TAKEN)
7522 p = JUMP_LABEL (p);
7523
7524 /* Point to next entry in path, if any. */
7525 path_entry++;
7526 }
7527
7528 /* If this is a conditional jump, we can follow it if -fcse-follow-jumps
7529 was specified, we haven't reached our maximum path length, there are
7530 insns following the target of the jump, this is the only use of the
7531 jump label, and the target label is preceded by a BARRIER.
7532
7533 Alternatively, we can follow the jump if it branches around a
7534 block of code and there are no other branches into the block.
7535 In this case invalidate_skipped_block will be called to invalidate any
7536 registers set in the block when following the jump. */
7537
7538 else if ((follow_jumps || skip_blocks) && path_size < PATHLENGTH - 1
7539 && GET_CODE (p) == JUMP_INSN
7540 && GET_CODE (PATTERN (p)) == SET
7541 && GET_CODE (SET_SRC (PATTERN (p))) == IF_THEN_ELSE
7542 && LABEL_NUSES (JUMP_LABEL (p)) == 1
7543 && NEXT_INSN (JUMP_LABEL (p)) != 0)
7544 {
7545 for (q = PREV_INSN (JUMP_LABEL (p)); q; q = PREV_INSN (q))
7546 if ((GET_CODE (q) != NOTE
7547 || NOTE_LINE_NUMBER (q) == NOTE_INSN_LOOP_END
7548 || NOTE_LINE_NUMBER (q) == NOTE_INSN_SETJMP)
7549 && (GET_CODE (q) != CODE_LABEL || LABEL_NUSES (q) != 0))
7550 break;
7551
7552 /* If we ran into a BARRIER, this code is an extension of the
7553 basic block when the branch is taken. */
7554 if (follow_jumps && q != 0 && GET_CODE (q) == BARRIER)
7555 {
7556 /* Don't allow ourself to keep walking around an
7557 always-executed loop. */
7558 if (next_real_insn (q) == next)
7559 {
7560 p = NEXT_INSN (p);
7561 continue;
7562 }
7563
7564 /* Similarly, don't put a branch in our path more than once. */
7565 for (i = 0; i < path_entry; i++)
7566 if (data->path[i].branch == p)
7567 break;
7568
7569 if (i != path_entry)
7570 break;
7571
7572 data->path[path_entry].branch = p;
7573 data->path[path_entry++].status = TAKEN;
7574
7575 /* This branch now ends our path. It was possible that we
7576 didn't see this branch the last time around (when the
7577 insn in front of the target was a JUMP_INSN that was
7578 turned into a no-op). */
7579 path_size = path_entry;
7580
7581 p = JUMP_LABEL (p);
7582 /* Mark block so we won't scan it again later. */
7583 PUT_MODE (NEXT_INSN (p), QImode);
7584 }
7585 /* Detect a branch around a block of code. */
7586 else if (skip_blocks && q != 0 && GET_CODE (q) != CODE_LABEL)
7587 {
7588 register rtx tmp;
7589
7590 if (next_real_insn (q) == next)
7591 {
7592 p = NEXT_INSN (p);
7593 continue;
7594 }
7595
7596 for (i = 0; i < path_entry; i++)
7597 if (data->path[i].branch == p)
7598 break;
7599
7600 if (i != path_entry)
7601 break;
7602
7603 /* This is no_labels_between_p (p, q) with an added check for
7604 reaching the end of a function (in case Q precedes P). */
7605 for (tmp = NEXT_INSN (p); tmp && tmp != q; tmp = NEXT_INSN (tmp))
7606 if (GET_CODE (tmp) == CODE_LABEL)
7607 break;
7608
7609 if (tmp == q)
7610 {
7611 data->path[path_entry].branch = p;
7612 data->path[path_entry++].status = AROUND;
7613
7614 path_size = path_entry;
7615
7616 p = JUMP_LABEL (p);
7617 /* Mark block so we won't scan it again later. */
7618 PUT_MODE (NEXT_INSN (p), QImode);
7619 }
7620 }
7621 }
7622 p = NEXT_INSN (p);
7623 }
7624
7625 data->low_cuid = low_cuid;
7626 data->high_cuid = high_cuid;
7627 data->nsets = nsets;
7628 data->last = p;
7629
7630 /* If all jumps in the path are not taken, set our path length to zero
7631 so a rescan won't be done. */
7632 for (i = path_size - 1; i >= 0; i--)
7633 if (data->path[i].status != NOT_TAKEN)
7634 break;
7635
7636 if (i == -1)
7637 data->path_size = 0;
7638 else
7639 data->path_size = path_size;
7640
7641 /* End the current branch path. */
7642 data->path[path_size].branch = 0;
7643 }
7644 \f
7645 /* Perform cse on the instructions of a function.
7646 F is the first instruction.
7647 NREGS is one plus the highest pseudo-reg number used in the instruction.
7648
7649 AFTER_LOOP is 1 if this is the cse call done after loop optimization
7650 (only if -frerun-cse-after-loop).
7651
7652 Returns 1 if jump_optimize should be redone due to simplifications
7653 in conditional jump instructions. */
7654
7655 int
7656 cse_main (f, nregs, after_loop, file)
7657 rtx f;
7658 int nregs;
7659 int after_loop;
7660 FILE *file;
7661 {
7662 struct cse_basic_block_data val;
7663 register rtx insn = f;
7664 register int i;
7665
7666 cse_jumps_altered = 0;
7667 constant_pool_entries_cost = 0;
7668 val.path_size = 0;
7669
7670 init_recog ();
7671
7672 max_reg = nregs;
7673
7674 all_minus_one = (int *) alloca (nregs * sizeof (int));
7675 consec_ints = (int *) alloca (nregs * sizeof (int));
7676
7677 for (i = 0; i < nregs; i++)
7678 {
7679 all_minus_one[i] = -1;
7680 consec_ints[i] = i;
7681 }
7682
7683 reg_next_eqv = (int *) alloca (nregs * sizeof (int));
7684 reg_prev_eqv = (int *) alloca (nregs * sizeof (int));
7685 reg_qty = (int *) alloca (nregs * sizeof (int));
7686 reg_in_table = (int *) alloca (nregs * sizeof (int));
7687 reg_tick = (int *) alloca (nregs * sizeof (int));
7688
7689 /* Discard all the free elements of the previous function
7690 since they are allocated in the temporarily obstack. */
7691 bzero (table, sizeof table);
7692 free_element_chain = 0;
7693 n_elements_made = 0;
7694
7695 /* Find the largest uid. */
7696
7697 max_uid = get_max_uid ();
7698 uid_cuid = (int *) alloca ((max_uid + 1) * sizeof (int));
7699 bzero (uid_cuid, (max_uid + 1) * sizeof (int));
7700
7701 /* Compute the mapping from uids to cuids.
7702 CUIDs are numbers assigned to insns, like uids,
7703 except that cuids increase monotonically through the code.
7704 Don't assign cuids to line-number NOTEs, so that the distance in cuids
7705 between two insns is not affected by -g. */
7706
7707 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
7708 {
7709 if (GET_CODE (insn) != NOTE
7710 || NOTE_LINE_NUMBER (insn) < 0)
7711 INSN_CUID (insn) = ++i;
7712 else
7713 /* Give a line number note the same cuid as preceding insn. */
7714 INSN_CUID (insn) = i;
7715 }
7716
7717 /* Initialize which registers are clobbered by calls. */
7718
7719 CLEAR_HARD_REG_SET (regs_invalidated_by_call);
7720
7721 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
7722 if ((call_used_regs[i]
7723 /* Used to check !fixed_regs[i] here, but that isn't safe;
7724 fixed regs are still call-clobbered, and sched can get
7725 confused if they can "live across calls".
7726
7727 The frame pointer is always preserved across calls. The arg
7728 pointer is if it is fixed. The stack pointer usually is, unless
7729 RETURN_POPS_ARGS, in which case an explicit CLOBBER
7730 will be present. If we are generating PIC code, the PIC offset
7731 table register is preserved across calls. */
7732
7733 && i != STACK_POINTER_REGNUM
7734 && i != FRAME_POINTER_REGNUM
7735 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
7736 && ! (i == ARG_POINTER_REGNUM && fixed_regs[i])
7737 #endif
7738 #ifdef PIC_OFFSET_TABLE_REGNUM
7739 && ! (i == PIC_OFFSET_TABLE_REGNUM && flag_pic)
7740 #endif
7741 )
7742 || global_regs[i])
7743 SET_HARD_REG_BIT (regs_invalidated_by_call, i);
7744
7745 /* Loop over basic blocks.
7746 Compute the maximum number of qty's needed for each basic block
7747 (which is 2 for each SET). */
7748 insn = f;
7749 while (insn)
7750 {
7751 cse_end_of_basic_block (insn, &val, flag_cse_follow_jumps, after_loop,
7752 flag_cse_skip_blocks);
7753
7754 /* If this basic block was already processed or has no sets, skip it. */
7755 if (val.nsets == 0 || GET_MODE (insn) == QImode)
7756 {
7757 PUT_MODE (insn, VOIDmode);
7758 insn = (val.last ? NEXT_INSN (val.last) : 0);
7759 val.path_size = 0;
7760 continue;
7761 }
7762
7763 cse_basic_block_start = val.low_cuid;
7764 cse_basic_block_end = val.high_cuid;
7765 max_qty = val.nsets * 2;
7766
7767 if (file)
7768 fprintf (file, ";; Processing block from %d to %d, %d sets.\n",
7769 INSN_UID (insn), val.last ? INSN_UID (val.last) : 0,
7770 val.nsets);
7771
7772 /* Make MAX_QTY bigger to give us room to optimize
7773 past the end of this basic block, if that should prove useful. */
7774 if (max_qty < 500)
7775 max_qty = 500;
7776
7777 max_qty += max_reg;
7778
7779 /* If this basic block is being extended by following certain jumps,
7780 (see `cse_end_of_basic_block'), we reprocess the code from the start.
7781 Otherwise, we start after this basic block. */
7782 if (val.path_size > 0)
7783 cse_basic_block (insn, val.last, val.path, 0);
7784 else
7785 {
7786 int old_cse_jumps_altered = cse_jumps_altered;
7787 rtx temp;
7788
7789 /* When cse changes a conditional jump to an unconditional
7790 jump, we want to reprocess the block, since it will give
7791 us a new branch path to investigate. */
7792 cse_jumps_altered = 0;
7793 temp = cse_basic_block (insn, val.last, val.path, ! after_loop);
7794 if (cse_jumps_altered == 0
7795 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
7796 insn = temp;
7797
7798 cse_jumps_altered |= old_cse_jumps_altered;
7799 }
7800
7801 #ifdef USE_C_ALLOCA
7802 alloca (0);
7803 #endif
7804 }
7805
7806 /* Tell refers_to_mem_p that qty_const info is not available. */
7807 qty_const = 0;
7808
7809 if (max_elements_made < n_elements_made)
7810 max_elements_made = n_elements_made;
7811
7812 return cse_jumps_altered;
7813 }
7814
7815 /* Process a single basic block. FROM and TO and the limits of the basic
7816 block. NEXT_BRANCH points to the branch path when following jumps or
7817 a null path when not following jumps.
7818
7819 AROUND_LOOP is non-zero if we are to try to cse around to the start of a
7820 loop. This is true when we are being called for the last time on a
7821 block and this CSE pass is before loop.c. */
7822
7823 static rtx
7824 cse_basic_block (from, to, next_branch, around_loop)
7825 register rtx from, to;
7826 struct branch_path *next_branch;
7827 int around_loop;
7828 {
7829 register rtx insn;
7830 int to_usage = 0;
7831 int in_libcall_block = 0;
7832
7833 /* Each of these arrays is undefined before max_reg, so only allocate
7834 the space actually needed and adjust the start below. */
7835
7836 qty_first_reg = (int *) alloca ((max_qty - max_reg) * sizeof (int));
7837 qty_last_reg = (int *) alloca ((max_qty - max_reg) * sizeof (int));
7838 qty_mode= (enum machine_mode *) alloca ((max_qty - max_reg) * sizeof (enum machine_mode));
7839 qty_const = (rtx *) alloca ((max_qty - max_reg) * sizeof (rtx));
7840 qty_const_insn = (rtx *) alloca ((max_qty - max_reg) * sizeof (rtx));
7841 qty_comparison_code
7842 = (enum rtx_code *) alloca ((max_qty - max_reg) * sizeof (enum rtx_code));
7843 qty_comparison_qty = (int *) alloca ((max_qty - max_reg) * sizeof (int));
7844 qty_comparison_const = (rtx *) alloca ((max_qty - max_reg) * sizeof (rtx));
7845
7846 qty_first_reg -= max_reg;
7847 qty_last_reg -= max_reg;
7848 qty_mode -= max_reg;
7849 qty_const -= max_reg;
7850 qty_const_insn -= max_reg;
7851 qty_comparison_code -= max_reg;
7852 qty_comparison_qty -= max_reg;
7853 qty_comparison_const -= max_reg;
7854
7855 new_basic_block ();
7856
7857 /* TO might be a label. If so, protect it from being deleted. */
7858 if (to != 0 && GET_CODE (to) == CODE_LABEL)
7859 ++LABEL_NUSES (to);
7860
7861 for (insn = from; insn != to; insn = NEXT_INSN (insn))
7862 {
7863 register enum rtx_code code;
7864
7865 /* See if this is a branch that is part of the path. If so, and it is
7866 to be taken, do so. */
7867 if (next_branch->branch == insn)
7868 {
7869 enum taken status = next_branch++->status;
7870 if (status != NOT_TAKEN)
7871 {
7872 if (status == TAKEN)
7873 record_jump_equiv (insn, 1);
7874 else
7875 invalidate_skipped_block (NEXT_INSN (insn));
7876
7877 /* Set the last insn as the jump insn; it doesn't affect cc0.
7878 Then follow this branch. */
7879 #ifdef HAVE_cc0
7880 prev_insn_cc0 = 0;
7881 #endif
7882 prev_insn = insn;
7883 insn = JUMP_LABEL (insn);
7884 continue;
7885 }
7886 }
7887
7888 code = GET_CODE (insn);
7889 if (GET_MODE (insn) == QImode)
7890 PUT_MODE (insn, VOIDmode);
7891
7892 if (GET_RTX_CLASS (code) == 'i')
7893 {
7894 /* Process notes first so we have all notes in canonical forms when
7895 looking for duplicate operations. */
7896
7897 if (REG_NOTES (insn))
7898 REG_NOTES (insn) = cse_process_notes (REG_NOTES (insn), NULL_RTX);
7899
7900 /* Track when we are inside in LIBCALL block. Inside such a block,
7901 we do not want to record destinations. The last insn of a
7902 LIBCALL block is not considered to be part of the block, since
7903 its destination is the result of the block and hence should be
7904 recorded. */
7905
7906 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
7907 in_libcall_block = 1;
7908 else if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
7909 in_libcall_block = 0;
7910
7911 cse_insn (insn, in_libcall_block);
7912 }
7913
7914 /* If INSN is now an unconditional jump, skip to the end of our
7915 basic block by pretending that we just did the last insn in the
7916 basic block. If we are jumping to the end of our block, show
7917 that we can have one usage of TO. */
7918
7919 if (simplejump_p (insn))
7920 {
7921 if (to == 0)
7922 return 0;
7923
7924 if (JUMP_LABEL (insn) == to)
7925 to_usage = 1;
7926
7927 /* Maybe TO was deleted because the jump is unconditional.
7928 If so, there is nothing left in this basic block. */
7929 /* ??? Perhaps it would be smarter to set TO
7930 to whatever follows this insn,
7931 and pretend the basic block had always ended here. */
7932 if (INSN_DELETED_P (to))
7933 break;
7934
7935 insn = PREV_INSN (to);
7936 }
7937
7938 /* See if it is ok to keep on going past the label
7939 which used to end our basic block. Remember that we incremented
7940 the count of that label, so we decrement it here. If we made
7941 a jump unconditional, TO_USAGE will be one; in that case, we don't
7942 want to count the use in that jump. */
7943
7944 if (to != 0 && NEXT_INSN (insn) == to
7945 && GET_CODE (to) == CODE_LABEL && --LABEL_NUSES (to) == to_usage)
7946 {
7947 struct cse_basic_block_data val;
7948
7949 insn = NEXT_INSN (to);
7950
7951 if (LABEL_NUSES (to) == 0)
7952 delete_insn (to);
7953
7954 /* Find the end of the following block. Note that we won't be
7955 following branches in this case. If TO was the last insn
7956 in the function, we are done. Similarly, if we deleted the
7957 insn after TO, it must have been because it was preceded by
7958 a BARRIER. In that case, we are done with this block because it
7959 has no continuation. */
7960
7961 if (insn == 0 || INSN_DELETED_P (insn))
7962 return 0;
7963
7964 to_usage = 0;
7965 val.path_size = 0;
7966 cse_end_of_basic_block (insn, &val, 0, 0, 0);
7967
7968 /* If the tables we allocated have enough space left
7969 to handle all the SETs in the next basic block,
7970 continue through it. Otherwise, return,
7971 and that block will be scanned individually. */
7972 if (val.nsets * 2 + next_qty > max_qty)
7973 break;
7974
7975 cse_basic_block_start = val.low_cuid;
7976 cse_basic_block_end = val.high_cuid;
7977 to = val.last;
7978
7979 /* Prevent TO from being deleted if it is a label. */
7980 if (to != 0 && GET_CODE (to) == CODE_LABEL)
7981 ++LABEL_NUSES (to);
7982
7983 /* Back up so we process the first insn in the extension. */
7984 insn = PREV_INSN (insn);
7985 }
7986 }
7987
7988 if (next_qty > max_qty)
7989 abort ();
7990
7991 /* If we are running before loop.c, we stopped on a NOTE_INSN_LOOP_END, and
7992 the previous insn is the only insn that branches to the head of a loop,
7993 we can cse into the loop. Don't do this if we changed the jump
7994 structure of a loop unless we aren't going to be following jumps. */
7995
7996 if ((cse_jumps_altered == 0
7997 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
7998 && around_loop && to != 0
7999 && GET_CODE (to) == NOTE && NOTE_LINE_NUMBER (to) == NOTE_INSN_LOOP_END
8000 && GET_CODE (PREV_INSN (to)) == JUMP_INSN
8001 && JUMP_LABEL (PREV_INSN (to)) != 0
8002 && LABEL_NUSES (JUMP_LABEL (PREV_INSN (to))) == 1)
8003 cse_around_loop (JUMP_LABEL (PREV_INSN (to)));
8004
8005 return to ? NEXT_INSN (to) : 0;
8006 }
8007 \f
8008 /* Count the number of times registers are used (not set) in X.
8009 COUNTS is an array in which we accumulate the count, INCR is how much
8010 we count each register usage. */
8011
8012 static void
8013 count_reg_usage (x, counts, incr)
8014 rtx x;
8015 int *counts;
8016 int incr;
8017 {
8018 enum rtx_code code = GET_CODE (x);
8019 char *fmt;
8020 int i, j;
8021
8022 switch (code)
8023 {
8024 case REG:
8025 counts[REGNO (x)] += incr;
8026 return;
8027
8028 case PC:
8029 case CC0:
8030 case CONST:
8031 case CONST_INT:
8032 case CONST_DOUBLE:
8033 case SYMBOL_REF:
8034 case LABEL_REF:
8035 case CLOBBER:
8036 return;
8037
8038 case SET:
8039 /* Unless we are setting a REG, count everything in SET_DEST. */
8040 if (GET_CODE (SET_DEST (x)) != REG)
8041 count_reg_usage (SET_DEST (x), counts, incr);
8042 count_reg_usage (SET_SRC (x), counts, incr);
8043 return;
8044
8045 case INSN:
8046 case JUMP_INSN:
8047 case CALL_INSN:
8048 count_reg_usage (PATTERN (x), counts, incr);
8049
8050 /* Things used in a REG_EQUAL note aren't dead since loop may try to
8051 use them. */
8052
8053 if (REG_NOTES (x))
8054 count_reg_usage (REG_NOTES (x), counts, incr);
8055 return;
8056
8057 case EXPR_LIST:
8058 case INSN_LIST:
8059 if (REG_NOTE_KIND (x) == REG_EQUAL)
8060 count_reg_usage (XEXP (x, 0), counts, incr);
8061 if (XEXP (x, 1))
8062 count_reg_usage (XEXP (x, 1), counts, incr);
8063 return;
8064 }
8065
8066 fmt = GET_RTX_FORMAT (code);
8067 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
8068 {
8069 if (fmt[i] == 'e')
8070 count_reg_usage (XEXP (x, i), counts, incr);
8071 else if (fmt[i] == 'E')
8072 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
8073 count_reg_usage (XVECEXP (x, i, j), counts, incr);
8074 }
8075 }
8076 \f
8077 /* Scan all the insns and delete any that are dead; i.e., they store a register
8078 that is never used or they copy a register to itself.
8079
8080 This is used to remove insns made obviously dead by cse. It improves the
8081 heuristics in loop since it won't try to move dead invariants out of loops
8082 or make givs for dead quantities. The remaining passes of the compilation
8083 are also sped up. */
8084
8085 void
8086 delete_dead_from_cse (insns, nreg)
8087 rtx insns;
8088 int nreg;
8089 {
8090 int *counts = (int *) alloca (nreg * sizeof (int));
8091 rtx insn, prev;
8092 rtx tem;
8093 int i;
8094 int in_libcall = 0;
8095
8096 /* First count the number of times each register is used. */
8097 bzero (counts, sizeof (int) * nreg);
8098 for (insn = next_real_insn (insns); insn; insn = next_real_insn (insn))
8099 count_reg_usage (insn, counts, 1);
8100
8101 /* Go from the last insn to the first and delete insns that only set unused
8102 registers or copy a register to itself. As we delete an insn, remove
8103 usage counts for registers it uses. */
8104 for (insn = prev_real_insn (get_last_insn ()); insn; insn = prev)
8105 {
8106 int live_insn = 0;
8107
8108 prev = prev_real_insn (insn);
8109
8110 /* Don't delete any insns that are part of a libcall block.
8111 Flow or loop might get confused if we did that. Remember
8112 that we are scanning backwards. */
8113 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
8114 in_libcall = 1;
8115
8116 if (in_libcall)
8117 live_insn = 1;
8118 else if (GET_CODE (PATTERN (insn)) == SET)
8119 {
8120 if (GET_CODE (SET_DEST (PATTERN (insn))) == REG
8121 && SET_DEST (PATTERN (insn)) == SET_SRC (PATTERN (insn)))
8122 ;
8123
8124 #ifdef HAVE_cc0
8125 else if (GET_CODE (SET_DEST (PATTERN (insn))) == CC0
8126 && ! side_effects_p (SET_SRC (PATTERN (insn)))
8127 && ((tem = next_nonnote_insn (insn)) == 0
8128 || GET_RTX_CLASS (GET_CODE (tem)) != 'i'
8129 || ! reg_referenced_p (cc0_rtx, PATTERN (tem))))
8130 ;
8131 #endif
8132 else if (GET_CODE (SET_DEST (PATTERN (insn))) != REG
8133 || REGNO (SET_DEST (PATTERN (insn))) < FIRST_PSEUDO_REGISTER
8134 || counts[REGNO (SET_DEST (PATTERN (insn)))] != 0
8135 || side_effects_p (SET_SRC (PATTERN (insn))))
8136 live_insn = 1;
8137 }
8138 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
8139 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
8140 {
8141 rtx elt = XVECEXP (PATTERN (insn), 0, i);
8142
8143 if (GET_CODE (elt) == SET)
8144 {
8145 if (GET_CODE (SET_DEST (elt)) == REG
8146 && SET_DEST (elt) == SET_SRC (elt))
8147 ;
8148
8149 #ifdef HAVE_cc0
8150 else if (GET_CODE (SET_DEST (elt)) == CC0
8151 && ! side_effects_p (SET_SRC (elt))
8152 && ((tem = next_nonnote_insn (insn)) == 0
8153 || GET_RTX_CLASS (GET_CODE (tem)) != 'i'
8154 || ! reg_referenced_p (cc0_rtx, PATTERN (tem))))
8155 ;
8156 #endif
8157 else if (GET_CODE (SET_DEST (elt)) != REG
8158 || REGNO (SET_DEST (elt)) < FIRST_PSEUDO_REGISTER
8159 || counts[REGNO (SET_DEST (elt))] != 0
8160 || side_effects_p (SET_SRC (elt)))
8161 live_insn = 1;
8162 }
8163 else if (GET_CODE (elt) != CLOBBER && GET_CODE (elt) != USE)
8164 live_insn = 1;
8165 }
8166 else
8167 live_insn = 1;
8168
8169 /* If this is a dead insn, delete it and show registers in it aren't
8170 being used. */
8171
8172 if (! live_insn)
8173 {
8174 count_reg_usage (insn, counts, -1);
8175 delete_insn (insn);
8176 }
8177
8178 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
8179 in_libcall = 0;
8180 }
8181 }
This page took 5.200216 seconds and 6 git commands to generate.