]>
Commit | Line | Data |
---|---|---|
230d793d | 1 | /* Optimize by combining instructions for GNU compiler. |
3c71940f JL |
2 | Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, |
3 | 1999, 2000 Free Software Foundation, Inc. | |
230d793d RS |
4 | |
5 | This file is part of GNU CC. | |
6 | ||
7 | GNU CC is free software; you can redistribute it and/or modify | |
8 | it under the terms of the GNU General Public License as published by | |
9 | the Free Software Foundation; either version 2, or (at your option) | |
10 | any later version. | |
11 | ||
12 | GNU CC is distributed in the hope that it will be useful, | |
13 | but WITHOUT ANY WARRANTY; without even the implied warranty of | |
14 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
15 | GNU General Public License for more details. | |
16 | ||
17 | You should have received a copy of the GNU General Public License | |
18 | along with GNU CC; see the file COPYING. If not, write to | |
940d9d63 RK |
19 | the Free Software Foundation, 59 Temple Place - Suite 330, |
20 | Boston, MA 02111-1307, USA. */ | |
230d793d | 21 | |
230d793d RS |
22 | /* This module is essentially the "combiner" phase of the U. of Arizona |
23 | Portable Optimizer, but redone to work on our list-structured | |
24 | representation for RTL instead of their string representation. | |
25 | ||
26 | The LOG_LINKS of each insn identify the most recent assignment | |
27 | to each REG used in the insn. It is a list of previous insns, | |
28 | each of which contains a SET for a REG that is used in this insn | |
29 | and not used or set in between. LOG_LINKs never cross basic blocks. | |
30 | They were set up by the preceding pass (lifetime analysis). | |
31 | ||
32 | We try to combine each pair of insns joined by a logical link. | |
33 | We also try to combine triples of insns A, B and C when | |
34 | C has a link back to B and B has a link back to A. | |
35 | ||
36 | LOG_LINKS does not have links for use of the CC0. They don't | |
37 | need to, because the insn that sets the CC0 is always immediately | |
38 | before the insn that tests it. So we always regard a branch | |
39 | insn as having a logical link to the preceding insn. The same is true | |
40 | for an insn explicitly using CC0. | |
41 | ||
42 | We check (with use_crosses_set_p) to avoid combining in such a way | |
43 | as to move a computation to a place where its value would be different. | |
44 | ||
45 | Combination is done by mathematically substituting the previous | |
46 | insn(s) values for the regs they set into the expressions in | |
47 | the later insns that refer to these regs. If the result is a valid insn | |
48 | for our target machine, according to the machine description, | |
49 | we install it, delete the earlier insns, and update the data flow | |
50 | information (LOG_LINKS and REG_NOTES) for what we did. | |
51 | ||
52 | There are a few exceptions where the dataflow information created by | |
53 | flow.c aren't completely updated: | |
54 | ||
55 | - reg_live_length is not updated | |
56 | - reg_n_refs is not adjusted in the rare case when a register is | |
57 | no longer required in a computation | |
58 | - there are extremely rare cases (see distribute_regnotes) when a | |
59 | REG_DEAD note is lost | |
60 | - a LOG_LINKS entry that refers to an insn with multiple SETs may be | |
663522cb | 61 | removed because there is no way to know which register it was |
230d793d RS |
62 | linking |
63 | ||
64 | To simplify substitution, we combine only when the earlier insn(s) | |
65 | consist of only a single assignment. To simplify updating afterward, | |
66 | we never combine when a subroutine call appears in the middle. | |
67 | ||
68 | Since we do not represent assignments to CC0 explicitly except when that | |
69 | is all an insn does, there is no LOG_LINKS entry in an insn that uses | |
70 | the condition code for the insn that set the condition code. | |
71 | Fortunately, these two insns must be consecutive. | |
72 | Therefore, every JUMP_INSN is taken to have an implicit logical link | |
73 | to the preceding insn. This is not quite right, since non-jumps can | |
74 | also use the condition code; but in practice such insns would not | |
75 | combine anyway. */ | |
76 | ||
230d793d | 77 | #include "config.h" |
670ee920 | 78 | #include "system.h" |
c5c76735 | 79 | #include "rtl.h" |
a091679a | 80 | #include "tm_p.h" |
230d793d RS |
81 | #include "flags.h" |
82 | #include "regs.h" | |
55310dad | 83 | #include "hard-reg-set.h" |
230d793d RS |
84 | #include "basic-block.h" |
85 | #include "insn-config.h" | |
49ad7cfa | 86 | #include "function.h" |
d6f4ec51 KG |
87 | /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */ |
88 | #include "expr.h" | |
230d793d RS |
89 | #include "insn-flags.h" |
90 | #include "insn-codes.h" | |
91 | #include "insn-attr.h" | |
92 | #include "recog.h" | |
93 | #include "real.h" | |
2e107e9e | 94 | #include "toplev.h" |
f73ad30e JH |
95 | #include "defaults.h" |
96 | ||
97 | #ifndef ACCUMULATE_OUTGOING_ARGS | |
98 | #define ACCUMULATE_OUTGOING_ARGS 0 | |
99 | #endif | |
100 | ||
101 | /* Supply a default definition for PUSH_ARGS. */ | |
102 | #ifndef PUSH_ARGS | |
103 | #ifdef PUSH_ROUNDING | |
104 | #define PUSH_ARGS !ACCUMULATE_OUTGOING_ARGS | |
105 | #else | |
106 | #define PUSH_ARGS 0 | |
107 | #endif | |
108 | #endif | |
230d793d RS |
109 | |
110 | /* It is not safe to use ordinary gen_lowpart in combine. | |
111 | Use gen_lowpart_for_combine instead. See comments there. */ | |
112 | #define gen_lowpart dont_use_gen_lowpart_you_dummy | |
113 | ||
114 | /* Number of attempts to combine instructions in this function. */ | |
115 | ||
116 | static int combine_attempts; | |
117 | ||
118 | /* Number of attempts that got as far as substitution in this function. */ | |
119 | ||
120 | static int combine_merges; | |
121 | ||
122 | /* Number of instructions combined with added SETs in this function. */ | |
123 | ||
124 | static int combine_extras; | |
125 | ||
126 | /* Number of instructions combined in this function. */ | |
127 | ||
128 | static int combine_successes; | |
129 | ||
130 | /* Totals over entire compilation. */ | |
131 | ||
132 | static int total_attempts, total_merges, total_extras, total_successes; | |
9210df58 | 133 | |
ddd5a7c1 | 134 | /* Define a default value for REVERSIBLE_CC_MODE. |
9210df58 RK |
135 | We can never assume that a condition code mode is safe to reverse unless |
136 | the md tells us so. */ | |
137 | #ifndef REVERSIBLE_CC_MODE | |
138 | #define REVERSIBLE_CC_MODE(MODE) 0 | |
139 | #endif | |
230d793d RS |
140 | \f |
141 | /* Vector mapping INSN_UIDs to cuids. | |
5089e22e | 142 | The cuids are like uids but increase monotonically always. |
230d793d RS |
143 | Combine always uses cuids so that it can compare them. |
144 | But actually renumbering the uids, which we used to do, | |
145 | proves to be a bad idea because it makes it hard to compare | |
146 | the dumps produced by earlier passes with those from later passes. */ | |
147 | ||
148 | static int *uid_cuid; | |
4255220d | 149 | static int max_uid_cuid; |
230d793d RS |
150 | |
151 | /* Get the cuid of an insn. */ | |
152 | ||
1427d6d2 RK |
153 | #define INSN_CUID(INSN) \ |
154 | (INSN_UID (INSN) > max_uid_cuid ? insn_cuid (INSN) : uid_cuid[INSN_UID (INSN)]) | |
230d793d RS |
155 | |
156 | /* Maximum register number, which is the size of the tables below. */ | |
157 | ||
770ae6cc | 158 | static unsigned int combine_max_regno; |
230d793d RS |
159 | |
160 | /* Record last point of death of (hard or pseudo) register n. */ | |
161 | ||
162 | static rtx *reg_last_death; | |
163 | ||
164 | /* Record last point of modification of (hard or pseudo) register n. */ | |
165 | ||
166 | static rtx *reg_last_set; | |
167 | ||
168 | /* Record the cuid of the last insn that invalidated memory | |
169 | (anything that writes memory, and subroutine calls, but not pushes). */ | |
170 | ||
171 | static int mem_last_set; | |
172 | ||
173 | /* Record the cuid of the last CALL_INSN | |
174 | so we can tell whether a potential combination crosses any calls. */ | |
175 | ||
176 | static int last_call_cuid; | |
177 | ||
178 | /* When `subst' is called, this is the insn that is being modified | |
179 | (by combining in a previous insn). The PATTERN of this insn | |
180 | is still the old pattern partially modified and it should not be | |
181 | looked at, but this may be used to examine the successors of the insn | |
182 | to judge whether a simplification is valid. */ | |
183 | ||
184 | static rtx subst_insn; | |
185 | ||
0d9641d1 JW |
186 | /* This is an insn that belongs before subst_insn, but is not currently |
187 | on the insn chain. */ | |
188 | ||
189 | static rtx subst_prev_insn; | |
190 | ||
230d793d RS |
191 | /* This is the lowest CUID that `subst' is currently dealing with. |
192 | get_last_value will not return a value if the register was set at or | |
193 | after this CUID. If not for this mechanism, we could get confused if | |
194 | I2 or I1 in try_combine were an insn that used the old value of a register | |
195 | to obtain a new value. In that case, we might erroneously get the | |
196 | new value of the register when we wanted the old one. */ | |
197 | ||
198 | static int subst_low_cuid; | |
199 | ||
6e25d159 RK |
200 | /* This contains any hard registers that are used in newpat; reg_dead_at_p |
201 | must consider all these registers to be always live. */ | |
202 | ||
203 | static HARD_REG_SET newpat_used_regs; | |
204 | ||
abe6e52f RK |
205 | /* This is an insn to which a LOG_LINKS entry has been added. If this |
206 | insn is the earlier than I2 or I3, combine should rescan starting at | |
207 | that location. */ | |
208 | ||
209 | static rtx added_links_insn; | |
210 | ||
0d4d42c3 RK |
211 | /* Basic block number of the block in which we are performing combines. */ |
212 | static int this_basic_block; | |
715e7fbc | 213 | |
663522cb KH |
214 | /* A bitmap indicating which blocks had registers go dead at entry. |
215 | After combine, we'll need to re-do global life analysis with | |
715e7fbc RH |
216 | those blocks as starting points. */ |
217 | static sbitmap refresh_blocks; | |
218 | static int need_refresh; | |
230d793d RS |
219 | \f |
220 | /* The next group of arrays allows the recording of the last value assigned | |
221 | to (hard or pseudo) register n. We use this information to see if a | |
5089e22e | 222 | operation being processed is redundant given a prior operation performed |
230d793d RS |
223 | on the register. For example, an `and' with a constant is redundant if |
224 | all the zero bits are already known to be turned off. | |
225 | ||
226 | We use an approach similar to that used by cse, but change it in the | |
227 | following ways: | |
228 | ||
229 | (1) We do not want to reinitialize at each label. | |
230 | (2) It is useful, but not critical, to know the actual value assigned | |
231 | to a register. Often just its form is helpful. | |
232 | ||
233 | Therefore, we maintain the following arrays: | |
234 | ||
235 | reg_last_set_value the last value assigned | |
236 | reg_last_set_label records the value of label_tick when the | |
237 | register was assigned | |
238 | reg_last_set_table_tick records the value of label_tick when a | |
239 | value using the register is assigned | |
240 | reg_last_set_invalid set to non-zero when it is not valid | |
241 | to use the value of this register in some | |
242 | register's value | |
243 | ||
244 | To understand the usage of these tables, it is important to understand | |
245 | the distinction between the value in reg_last_set_value being valid | |
246 | and the register being validly contained in some other expression in the | |
247 | table. | |
248 | ||
249 | Entry I in reg_last_set_value is valid if it is non-zero, and either | |
250 | reg_n_sets[i] is 1 or reg_last_set_label[i] == label_tick. | |
251 | ||
252 | Register I may validly appear in any expression returned for the value | |
253 | of another register if reg_n_sets[i] is 1. It may also appear in the | |
254 | value for register J if reg_last_set_label[i] < reg_last_set_label[j] or | |
255 | reg_last_set_invalid[j] is zero. | |
256 | ||
257 | If an expression is found in the table containing a register which may | |
258 | not validly appear in an expression, the register is replaced by | |
259 | something that won't match, (clobber (const_int 0)). | |
260 | ||
261 | reg_last_set_invalid[i] is set non-zero when register I is being assigned | |
262 | to and reg_last_set_table_tick[i] == label_tick. */ | |
263 | ||
0f41302f | 264 | /* Record last value assigned to (hard or pseudo) register n. */ |
230d793d RS |
265 | |
266 | static rtx *reg_last_set_value; | |
267 | ||
268 | /* Record the value of label_tick when the value for register n is placed in | |
269 | reg_last_set_value[n]. */ | |
270 | ||
568356af | 271 | static int *reg_last_set_label; |
230d793d RS |
272 | |
273 | /* Record the value of label_tick when an expression involving register n | |
0f41302f | 274 | is placed in reg_last_set_value. */ |
230d793d | 275 | |
568356af | 276 | static int *reg_last_set_table_tick; |
230d793d RS |
277 | |
278 | /* Set non-zero if references to register n in expressions should not be | |
279 | used. */ | |
280 | ||
281 | static char *reg_last_set_invalid; | |
282 | ||
0f41302f | 283 | /* Incremented for each label. */ |
230d793d | 284 | |
568356af | 285 | static int label_tick; |
230d793d RS |
286 | |
287 | /* Some registers that are set more than once and used in more than one | |
288 | basic block are nevertheless always set in similar ways. For example, | |
289 | a QImode register may be loaded from memory in two places on a machine | |
290 | where byte loads zero extend. | |
291 | ||
951553af | 292 | We record in the following array what we know about the nonzero |
230d793d RS |
293 | bits of a register, specifically which bits are known to be zero. |
294 | ||
295 | If an entry is zero, it means that we don't know anything special. */ | |
296 | ||
55310dad | 297 | static unsigned HOST_WIDE_INT *reg_nonzero_bits; |
230d793d | 298 | |
951553af | 299 | /* Mode used to compute significance in reg_nonzero_bits. It is the largest |
5f4f0e22 | 300 | integer mode that can fit in HOST_BITS_PER_WIDE_INT. */ |
230d793d | 301 | |
951553af | 302 | static enum machine_mode nonzero_bits_mode; |
230d793d | 303 | |
d0ab8cd3 RK |
304 | /* Nonzero if we know that a register has some leading bits that are always |
305 | equal to the sign bit. */ | |
306 | ||
770ae6cc | 307 | static unsigned char *reg_sign_bit_copies; |
d0ab8cd3 | 308 | |
951553af | 309 | /* Nonzero when reg_nonzero_bits and reg_sign_bit_copies can be safely used. |
1a26b032 RK |
310 | It is zero while computing them and after combine has completed. This |
311 | former test prevents propagating values based on previously set values, | |
312 | which can be incorrect if a variable is modified in a loop. */ | |
230d793d | 313 | |
951553af | 314 | static int nonzero_sign_valid; |
55310dad RK |
315 | |
316 | /* These arrays are maintained in parallel with reg_last_set_value | |
317 | and are used to store the mode in which the register was last set, | |
318 | the bits that were known to be zero when it was last set, and the | |
319 | number of sign bits copies it was known to have when it was last set. */ | |
320 | ||
321 | static enum machine_mode *reg_last_set_mode; | |
322 | static unsigned HOST_WIDE_INT *reg_last_set_nonzero_bits; | |
323 | static char *reg_last_set_sign_bit_copies; | |
230d793d RS |
324 | \f |
325 | /* Record one modification to rtl structure | |
326 | to be undone by storing old_contents into *where. | |
327 | is_int is 1 if the contents are an int. */ | |
328 | ||
329 | struct undo | |
330 | { | |
241cea85 | 331 | struct undo *next; |
230d793d | 332 | int is_int; |
f5393ab9 RS |
333 | union {rtx r; int i;} old_contents; |
334 | union {rtx *r; int *i;} where; | |
230d793d RS |
335 | }; |
336 | ||
337 | /* Record a bunch of changes to be undone, up to MAX_UNDO of them. | |
338 | num_undo says how many are currently recorded. | |
339 | ||
340 | storage is nonzero if we must undo the allocation of new storage. | |
341 | The value of storage is what to pass to obfree. | |
342 | ||
343 | other_insn is nonzero if we have modified some other insn in the process | |
241cea85 | 344 | of working on subst_insn. It must be verified too. |
230d793d | 345 | |
241cea85 RK |
346 | previous_undos is the value of undobuf.undos when we started processing |
347 | this substitution. This will prevent gen_rtx_combine from re-used a piece | |
348 | from the previous expression. Doing so can produce circular rtl | |
349 | structures. */ | |
230d793d RS |
350 | |
351 | struct undobuf | |
352 | { | |
230d793d | 353 | char *storage; |
241cea85 RK |
354 | struct undo *undos; |
355 | struct undo *frees; | |
356 | struct undo *previous_undos; | |
230d793d RS |
357 | rtx other_insn; |
358 | }; | |
359 | ||
360 | static struct undobuf undobuf; | |
361 | ||
230d793d RS |
362 | /* Number of times the pseudo being substituted for |
363 | was found and replaced. */ | |
364 | ||
365 | static int n_occurrences; | |
366 | ||
83d2b3b9 KG |
367 | static void do_SUBST PARAMS ((rtx *, rtx)); |
368 | static void do_SUBST_INT PARAMS ((int *, int)); | |
369 | static void init_reg_last_arrays PARAMS ((void)); | |
370 | static void setup_incoming_promotions PARAMS ((void)); | |
371 | static void set_nonzero_bits_and_sign_copies PARAMS ((rtx, rtx, void *)); | |
372 | static int can_combine_p PARAMS ((rtx, rtx, rtx, rtx, rtx *, rtx *)); | |
373 | static int sets_function_arg_p PARAMS ((rtx)); | |
374 | static int combinable_i3pat PARAMS ((rtx, rtx *, rtx, rtx, int, rtx *)); | |
375 | static int contains_muldiv PARAMS ((rtx)); | |
44a76fc8 | 376 | static rtx try_combine PARAMS ((rtx, rtx, rtx, int *)); |
83d2b3b9 KG |
377 | static void undo_all PARAMS ((void)); |
378 | static void undo_commit PARAMS ((void)); | |
379 | static rtx *find_split_point PARAMS ((rtx *, rtx)); | |
380 | static rtx subst PARAMS ((rtx, rtx, rtx, int, int)); | |
381 | static rtx combine_simplify_rtx PARAMS ((rtx, enum machine_mode, int, int)); | |
382 | static rtx simplify_if_then_else PARAMS ((rtx)); | |
383 | static rtx simplify_set PARAMS ((rtx)); | |
384 | static rtx simplify_logical PARAMS ((rtx, int)); | |
385 | static rtx expand_compound_operation PARAMS ((rtx)); | |
386 | static rtx expand_field_assignment PARAMS ((rtx)); | |
770ae6cc RK |
387 | static rtx make_extraction PARAMS ((enum machine_mode, rtx, HOST_WIDE_INT, |
388 | rtx, unsigned HOST_WIDE_INT, int, | |
389 | int, int)); | |
83d2b3b9 KG |
390 | static rtx extract_left_shift PARAMS ((rtx, int)); |
391 | static rtx make_compound_operation PARAMS ((rtx, enum rtx_code)); | |
770ae6cc RK |
392 | static int get_pos_from_mask PARAMS ((unsigned HOST_WIDE_INT, |
393 | unsigned HOST_WIDE_INT *)); | |
83d2b3b9 KG |
394 | static rtx force_to_mode PARAMS ((rtx, enum machine_mode, |
395 | unsigned HOST_WIDE_INT, rtx, int)); | |
396 | static rtx if_then_else_cond PARAMS ((rtx, rtx *, rtx *)); | |
397 | static rtx known_cond PARAMS ((rtx, enum rtx_code, rtx, rtx)); | |
398 | static int rtx_equal_for_field_assignment_p PARAMS ((rtx, rtx)); | |
399 | static rtx make_field_assignment PARAMS ((rtx)); | |
400 | static rtx apply_distributive_law PARAMS ((rtx)); | |
401 | static rtx simplify_and_const_int PARAMS ((rtx, enum machine_mode, rtx, | |
402 | unsigned HOST_WIDE_INT)); | |
403 | static unsigned HOST_WIDE_INT nonzero_bits PARAMS ((rtx, enum machine_mode)); | |
770ae6cc | 404 | static unsigned int num_sign_bit_copies PARAMS ((rtx, enum machine_mode)); |
83d2b3b9 KG |
405 | static int merge_outer_ops PARAMS ((enum rtx_code *, HOST_WIDE_INT *, |
406 | enum rtx_code, HOST_WIDE_INT, | |
407 | enum machine_mode, int *)); | |
408 | static rtx simplify_shift_const PARAMS ((rtx, enum rtx_code, enum machine_mode, | |
409 | rtx, int)); | |
410 | static int recog_for_combine PARAMS ((rtx *, rtx, rtx *)); | |
411 | static rtx gen_lowpart_for_combine PARAMS ((enum machine_mode, rtx)); | |
412 | static rtx gen_rtx_combine PARAMS ((enum rtx_code code, enum machine_mode mode, | |
413 | ...)); | |
414 | static rtx gen_binary PARAMS ((enum rtx_code, enum machine_mode, | |
415 | rtx, rtx)); | |
416 | static rtx gen_unary PARAMS ((enum rtx_code, enum machine_mode, | |
417 | enum machine_mode, rtx)); | |
418 | static enum rtx_code simplify_comparison PARAMS ((enum rtx_code, rtx *, rtx *)); | |
419 | static int reversible_comparison_p PARAMS ((rtx)); | |
420 | static void update_table_tick PARAMS ((rtx)); | |
421 | static void record_value_for_reg PARAMS ((rtx, rtx, rtx)); | |
422 | static void check_promoted_subreg PARAMS ((rtx, rtx)); | |
423 | static void record_dead_and_set_regs_1 PARAMS ((rtx, rtx, void *)); | |
424 | static void record_dead_and_set_regs PARAMS ((rtx)); | |
425 | static int get_last_value_validate PARAMS ((rtx *, rtx, int, int)); | |
426 | static rtx get_last_value PARAMS ((rtx)); | |
427 | static int use_crosses_set_p PARAMS ((rtx, int)); | |
428 | static void reg_dead_at_p_1 PARAMS ((rtx, rtx, void *)); | |
429 | static int reg_dead_at_p PARAMS ((rtx, rtx)); | |
430 | static void move_deaths PARAMS ((rtx, rtx, int, rtx, rtx *)); | |
431 | static int reg_bitfield_target_p PARAMS ((rtx, rtx)); | |
432 | static void distribute_notes PARAMS ((rtx, rtx, rtx, rtx, rtx, rtx)); | |
433 | static void distribute_links PARAMS ((rtx)); | |
434 | static void mark_used_regs_combine PARAMS ((rtx)); | |
435 | static int insn_cuid PARAMS ((rtx)); | |
c6991660 | 436 | static void record_promoted_value PARAMS ((rtx, rtx)); |
230d793d | 437 | \f |
76095e2f RH |
438 | /* Substitute NEWVAL, an rtx expression, into INTO, a place in some |
439 | insn. The substitution can be undone by undo_all. If INTO is already | |
440 | set to NEWVAL, do not record this change. Because computing NEWVAL might | |
441 | also call SUBST, we have to compute it before we put anything into | |
442 | the undo table. */ | |
443 | ||
444 | static void | |
663522cb | 445 | do_SUBST (into, newval) |
76095e2f RH |
446 | rtx *into, newval; |
447 | { | |
448 | struct undo *buf; | |
449 | rtx oldval = *into; | |
450 | ||
451 | if (oldval == newval) | |
452 | return; | |
453 | ||
454 | if (undobuf.frees) | |
455 | buf = undobuf.frees, undobuf.frees = buf->next; | |
456 | else | |
457 | buf = (struct undo *) xmalloc (sizeof (struct undo)); | |
458 | ||
459 | buf->is_int = 0; | |
460 | buf->where.r = into; | |
461 | buf->old_contents.r = oldval; | |
462 | *into = newval; | |
463 | ||
464 | buf->next = undobuf.undos, undobuf.undos = buf; | |
465 | } | |
466 | ||
467 | #define SUBST(INTO, NEWVAL) do_SUBST(&(INTO), (NEWVAL)) | |
468 | ||
469 | /* Similar to SUBST, but NEWVAL is an int expression. Note that substitution | |
470 | for the value of a HOST_WIDE_INT value (including CONST_INT) is | |
471 | not safe. */ | |
472 | ||
473 | static void | |
663522cb | 474 | do_SUBST_INT (into, newval) |
76095e2f RH |
475 | int *into, newval; |
476 | { | |
477 | struct undo *buf; | |
478 | int oldval = *into; | |
479 | ||
480 | if (oldval == newval) | |
481 | return; | |
482 | ||
483 | if (undobuf.frees) | |
484 | buf = undobuf.frees, undobuf.frees = buf->next; | |
485 | else | |
486 | buf = (struct undo *) xmalloc (sizeof (struct undo)); | |
487 | ||
488 | buf->is_int = 1; | |
489 | buf->where.i = into; | |
490 | buf->old_contents.i = oldval; | |
491 | *into = newval; | |
492 | ||
493 | buf->next = undobuf.undos, undobuf.undos = buf; | |
494 | } | |
495 | ||
496 | #define SUBST_INT(INTO, NEWVAL) do_SUBST_INT(&(INTO), (NEWVAL)) | |
497 | \f | |
230d793d | 498 | /* Main entry point for combiner. F is the first insn of the function. |
663522cb | 499 | NREGS is the first unused pseudo-reg number. |
230d793d | 500 | |
44a76fc8 AG |
501 | Return non-zero if the combiner has turned an indirect jump |
502 | instruction into a direct jump. */ | |
503 | int | |
230d793d RS |
504 | combine_instructions (f, nregs) |
505 | rtx f; | |
770ae6cc | 506 | unsigned int nregs; |
230d793d | 507 | { |
b729186a JL |
508 | register rtx insn, next; |
509 | #ifdef HAVE_cc0 | |
510 | register rtx prev; | |
511 | #endif | |
230d793d RS |
512 | register int i; |
513 | register rtx links, nextlinks; | |
514 | ||
44a76fc8 AG |
515 | int new_direct_jump_p = 0; |
516 | ||
230d793d RS |
517 | combine_attempts = 0; |
518 | combine_merges = 0; | |
519 | combine_extras = 0; | |
520 | combine_successes = 0; | |
521 | ||
522 | combine_max_regno = nregs; | |
523 | ||
663522cb | 524 | reg_nonzero_bits = ((unsigned HOST_WIDE_INT *) |
c05ddfa7 | 525 | xcalloc (nregs, sizeof (unsigned HOST_WIDE_INT))); |
770ae6cc RK |
526 | reg_sign_bit_copies |
527 | = (unsigned char *) xcalloc (nregs, sizeof (unsigned char)); | |
c05ddfa7 MM |
528 | |
529 | reg_last_death = (rtx *) xmalloc (nregs * sizeof (rtx)); | |
530 | reg_last_set = (rtx *) xmalloc (nregs * sizeof (rtx)); | |
531 | reg_last_set_value = (rtx *) xmalloc (nregs * sizeof (rtx)); | |
532 | reg_last_set_table_tick = (int *) xmalloc (nregs * sizeof (int)); | |
533 | reg_last_set_label = (int *) xmalloc (nregs * sizeof (int)); | |
534 | reg_last_set_invalid = (char *) xmalloc (nregs * sizeof (char)); | |
55310dad | 535 | reg_last_set_mode |
c05ddfa7 | 536 | = (enum machine_mode *) xmalloc (nregs * sizeof (enum machine_mode)); |
55310dad | 537 | reg_last_set_nonzero_bits |
c05ddfa7 | 538 | = (unsigned HOST_WIDE_INT *) xmalloc (nregs * sizeof (HOST_WIDE_INT)); |
55310dad | 539 | reg_last_set_sign_bit_copies |
c05ddfa7 | 540 | = (char *) xmalloc (nregs * sizeof (char)); |
55310dad | 541 | |
ef026f91 | 542 | init_reg_last_arrays (); |
230d793d RS |
543 | |
544 | init_recog_no_volatile (); | |
545 | ||
546 | /* Compute maximum uid value so uid_cuid can be allocated. */ | |
547 | ||
548 | for (insn = f, i = 0; insn; insn = NEXT_INSN (insn)) | |
549 | if (INSN_UID (insn) > i) | |
550 | i = INSN_UID (insn); | |
551 | ||
c05ddfa7 | 552 | uid_cuid = (int *) xmalloc ((i + 1) * sizeof (int)); |
4255220d | 553 | max_uid_cuid = i; |
230d793d | 554 | |
951553af | 555 | nonzero_bits_mode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0); |
230d793d | 556 | |
951553af | 557 | /* Don't use reg_nonzero_bits when computing it. This can cause problems |
230d793d RS |
558 | when, for example, we have j <<= 1 in a loop. */ |
559 | ||
951553af | 560 | nonzero_sign_valid = 0; |
230d793d RS |
561 | |
562 | /* Compute the mapping from uids to cuids. | |
563 | Cuids are numbers assigned to insns, like uids, | |
663522cb | 564 | except that cuids increase monotonically through the code. |
230d793d RS |
565 | |
566 | Scan all SETs and see if we can deduce anything about what | |
951553af | 567 | bits are known to be zero for some registers and how many copies |
d79f08e0 RK |
568 | of the sign bit are known to exist for those registers. |
569 | ||
570 | Also set any known values so that we can use it while searching | |
571 | for what bits are known to be set. */ | |
572 | ||
573 | label_tick = 1; | |
230d793d | 574 | |
bcd49eb7 JW |
575 | /* We need to initialize it here, because record_dead_and_set_regs may call |
576 | get_last_value. */ | |
577 | subst_prev_insn = NULL_RTX; | |
578 | ||
7988fd36 RK |
579 | setup_incoming_promotions (); |
580 | ||
715e7fbc RH |
581 | refresh_blocks = sbitmap_alloc (n_basic_blocks); |
582 | sbitmap_zero (refresh_blocks); | |
583 | need_refresh = 0; | |
584 | ||
230d793d RS |
585 | for (insn = f, i = 0; insn; insn = NEXT_INSN (insn)) |
586 | { | |
4255220d | 587 | uid_cuid[INSN_UID (insn)] = ++i; |
d79f08e0 RK |
588 | subst_low_cuid = i; |
589 | subst_insn = insn; | |
590 | ||
230d793d | 591 | if (GET_RTX_CLASS (GET_CODE (insn)) == 'i') |
d79f08e0 | 592 | { |
663522cb | 593 | note_stores (PATTERN (insn), set_nonzero_bits_and_sign_copies, |
84832317 | 594 | NULL); |
d79f08e0 | 595 | record_dead_and_set_regs (insn); |
2dab894a RK |
596 | |
597 | #ifdef AUTO_INC_DEC | |
598 | for (links = REG_NOTES (insn); links; links = XEXP (links, 1)) | |
599 | if (REG_NOTE_KIND (links) == REG_INC) | |
84832317 MM |
600 | set_nonzero_bits_and_sign_copies (XEXP (links, 0), NULL_RTX, |
601 | NULL); | |
2dab894a | 602 | #endif |
d79f08e0 RK |
603 | } |
604 | ||
605 | if (GET_CODE (insn) == CODE_LABEL) | |
606 | label_tick++; | |
230d793d RS |
607 | } |
608 | ||
951553af | 609 | nonzero_sign_valid = 1; |
230d793d RS |
610 | |
611 | /* Now scan all the insns in forward order. */ | |
612 | ||
0d4d42c3 | 613 | this_basic_block = -1; |
230d793d RS |
614 | label_tick = 1; |
615 | last_call_cuid = 0; | |
616 | mem_last_set = 0; | |
ef026f91 | 617 | init_reg_last_arrays (); |
7988fd36 RK |
618 | setup_incoming_promotions (); |
619 | ||
230d793d RS |
620 | for (insn = f; insn; insn = next ? next : NEXT_INSN (insn)) |
621 | { | |
622 | next = 0; | |
623 | ||
0d4d42c3 | 624 | /* If INSN starts a new basic block, update our basic block number. */ |
f085c9cd | 625 | if (this_basic_block + 1 < n_basic_blocks |
3b413743 | 626 | && BLOCK_HEAD (this_basic_block + 1) == insn) |
0d4d42c3 RK |
627 | this_basic_block++; |
628 | ||
230d793d RS |
629 | if (GET_CODE (insn) == CODE_LABEL) |
630 | label_tick++; | |
631 | ||
0d4d42c3 | 632 | else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i') |
230d793d | 633 | { |
732f2ac9 JJ |
634 | /* See if we know about function return values before this |
635 | insn based upon SUBREG flags. */ | |
636 | check_promoted_subreg (insn, PATTERN (insn)); | |
732f2ac9 | 637 | |
230d793d RS |
638 | /* Try this insn with each insn it links back to. */ |
639 | ||
640 | for (links = LOG_LINKS (insn); links; links = XEXP (links, 1)) | |
663522cb | 641 | if ((next = try_combine (insn, XEXP (links, 0), |
44a76fc8 | 642 | NULL_RTX, &new_direct_jump_p)) != 0) |
230d793d RS |
643 | goto retry; |
644 | ||
645 | /* Try each sequence of three linked insns ending with this one. */ | |
646 | ||
647 | for (links = LOG_LINKS (insn); links; links = XEXP (links, 1)) | |
aabb6c74 NC |
648 | { |
649 | rtx link = XEXP (links, 0); | |
650 | ||
651 | /* If the linked insn has been replaced by a note, then there | |
652 | is no point in persuing this chain any further. */ | |
653 | if (GET_CODE (link) == NOTE) | |
654 | break; | |
655 | ||
656 | for (nextlinks = LOG_LINKS (link); | |
657 | nextlinks; | |
658 | nextlinks = XEXP (nextlinks, 1)) | |
659 | if ((next = try_combine (insn, XEXP (links, 0), | |
865f50c5 RH |
660 | XEXP (nextlinks, 0), |
661 | &new_direct_jump_p)) != 0) | |
aabb6c74 NC |
662 | goto retry; |
663 | } | |
230d793d RS |
664 | |
665 | #ifdef HAVE_cc0 | |
666 | /* Try to combine a jump insn that uses CC0 | |
667 | with a preceding insn that sets CC0, and maybe with its | |
668 | logical predecessor as well. | |
669 | This is how we make decrement-and-branch insns. | |
670 | We need this special code because data flow connections | |
671 | via CC0 do not get entered in LOG_LINKS. */ | |
672 | ||
673 | if (GET_CODE (insn) == JUMP_INSN | |
674 | && (prev = prev_nonnote_insn (insn)) != 0 | |
675 | && GET_CODE (prev) == INSN | |
676 | && sets_cc0_p (PATTERN (prev))) | |
677 | { | |
663522cb | 678 | if ((next = try_combine (insn, prev, |
44a76fc8 | 679 | NULL_RTX, &new_direct_jump_p)) != 0) |
230d793d RS |
680 | goto retry; |
681 | ||
682 | for (nextlinks = LOG_LINKS (prev); nextlinks; | |
683 | nextlinks = XEXP (nextlinks, 1)) | |
684 | if ((next = try_combine (insn, prev, | |
44a76fc8 AG |
685 | XEXP (nextlinks, 0), |
686 | &new_direct_jump_p)) != 0) | |
230d793d RS |
687 | goto retry; |
688 | } | |
689 | ||
690 | /* Do the same for an insn that explicitly references CC0. */ | |
691 | if (GET_CODE (insn) == INSN | |
692 | && (prev = prev_nonnote_insn (insn)) != 0 | |
693 | && GET_CODE (prev) == INSN | |
694 | && sets_cc0_p (PATTERN (prev)) | |
695 | && GET_CODE (PATTERN (insn)) == SET | |
696 | && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (insn)))) | |
697 | { | |
663522cb | 698 | if ((next = try_combine (insn, prev, |
44a76fc8 | 699 | NULL_RTX, &new_direct_jump_p)) != 0) |
230d793d RS |
700 | goto retry; |
701 | ||
702 | for (nextlinks = LOG_LINKS (prev); nextlinks; | |
703 | nextlinks = XEXP (nextlinks, 1)) | |
704 | if ((next = try_combine (insn, prev, | |
44a76fc8 AG |
705 | XEXP (nextlinks, 0), |
706 | &new_direct_jump_p)) != 0) | |
230d793d RS |
707 | goto retry; |
708 | } | |
709 | ||
710 | /* Finally, see if any of the insns that this insn links to | |
711 | explicitly references CC0. If so, try this insn, that insn, | |
5089e22e | 712 | and its predecessor if it sets CC0. */ |
230d793d RS |
713 | for (links = LOG_LINKS (insn); links; links = XEXP (links, 1)) |
714 | if (GET_CODE (XEXP (links, 0)) == INSN | |
715 | && GET_CODE (PATTERN (XEXP (links, 0))) == SET | |
716 | && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (XEXP (links, 0)))) | |
717 | && (prev = prev_nonnote_insn (XEXP (links, 0))) != 0 | |
718 | && GET_CODE (prev) == INSN | |
719 | && sets_cc0_p (PATTERN (prev)) | |
663522cb | 720 | && (next = try_combine (insn, XEXP (links, 0), |
44a76fc8 | 721 | prev, &new_direct_jump_p)) != 0) |
230d793d RS |
722 | goto retry; |
723 | #endif | |
724 | ||
725 | /* Try combining an insn with two different insns whose results it | |
726 | uses. */ | |
727 | for (links = LOG_LINKS (insn); links; links = XEXP (links, 1)) | |
728 | for (nextlinks = XEXP (links, 1); nextlinks; | |
729 | nextlinks = XEXP (nextlinks, 1)) | |
730 | if ((next = try_combine (insn, XEXP (links, 0), | |
44a76fc8 AG |
731 | XEXP (nextlinks, 0), |
732 | &new_direct_jump_p)) != 0) | |
230d793d RS |
733 | goto retry; |
734 | ||
735 | if (GET_CODE (insn) != NOTE) | |
736 | record_dead_and_set_regs (insn); | |
737 | ||
738 | retry: | |
739 | ; | |
740 | } | |
741 | } | |
742 | ||
715e7fbc | 743 | if (need_refresh) |
49c3bb12 RH |
744 | { |
745 | compute_bb_for_insn (get_max_uid ()); | |
746 | update_life_info (refresh_blocks, UPDATE_LIFE_GLOBAL_RM_NOTES, | |
663522cb | 747 | PROP_DEATH_NOTES); |
49c3bb12 | 748 | } |
c05ddfa7 MM |
749 | |
750 | /* Clean up. */ | |
715e7fbc | 751 | sbitmap_free (refresh_blocks); |
c05ddfa7 MM |
752 | free (reg_nonzero_bits); |
753 | free (reg_sign_bit_copies); | |
754 | free (reg_last_death); | |
755 | free (reg_last_set); | |
756 | free (reg_last_set_value); | |
757 | free (reg_last_set_table_tick); | |
758 | free (reg_last_set_label); | |
759 | free (reg_last_set_invalid); | |
760 | free (reg_last_set_mode); | |
761 | free (reg_last_set_nonzero_bits); | |
762 | free (reg_last_set_sign_bit_copies); | |
763 | free (uid_cuid); | |
715e7fbc | 764 | |
e7749837 RH |
765 | { |
766 | struct undo *undo, *next; | |
767 | for (undo = undobuf.frees; undo; undo = next) | |
768 | { | |
769 | next = undo->next; | |
770 | free (undo); | |
771 | } | |
772 | undobuf.frees = 0; | |
773 | } | |
774 | ||
230d793d RS |
775 | total_attempts += combine_attempts; |
776 | total_merges += combine_merges; | |
777 | total_extras += combine_extras; | |
778 | total_successes += combine_successes; | |
1a26b032 | 779 | |
951553af | 780 | nonzero_sign_valid = 0; |
972b320c R |
781 | |
782 | /* Make recognizer allow volatile MEMs again. */ | |
783 | init_recog (); | |
44a76fc8 AG |
784 | |
785 | return new_direct_jump_p; | |
230d793d | 786 | } |
ef026f91 RS |
787 | |
788 | /* Wipe the reg_last_xxx arrays in preparation for another pass. */ | |
789 | ||
790 | static void | |
791 | init_reg_last_arrays () | |
792 | { | |
770ae6cc | 793 | unsigned int nregs = combine_max_regno; |
ef026f91 | 794 | |
4c9a05bc RK |
795 | bzero ((char *) reg_last_death, nregs * sizeof (rtx)); |
796 | bzero ((char *) reg_last_set, nregs * sizeof (rtx)); | |
797 | bzero ((char *) reg_last_set_value, nregs * sizeof (rtx)); | |
798 | bzero ((char *) reg_last_set_table_tick, nregs * sizeof (int)); | |
799 | bzero ((char *) reg_last_set_label, nregs * sizeof (int)); | |
ef026f91 | 800 | bzero (reg_last_set_invalid, nregs * sizeof (char)); |
4c9a05bc RK |
801 | bzero ((char *) reg_last_set_mode, nregs * sizeof (enum machine_mode)); |
802 | bzero ((char *) reg_last_set_nonzero_bits, nregs * sizeof (HOST_WIDE_INT)); | |
ef026f91 RS |
803 | bzero (reg_last_set_sign_bit_copies, nregs * sizeof (char)); |
804 | } | |
230d793d | 805 | \f |
7988fd36 RK |
806 | /* Set up any promoted values for incoming argument registers. */ |
807 | ||
ee791cc3 | 808 | static void |
7988fd36 RK |
809 | setup_incoming_promotions () |
810 | { | |
811 | #ifdef PROMOTE_FUNCTION_ARGS | |
770ae6cc | 812 | unsigned int regno; |
7988fd36 RK |
813 | rtx reg; |
814 | enum machine_mode mode; | |
815 | int unsignedp; | |
816 | rtx first = get_insns (); | |
817 | ||
c285f57a JJ |
818 | #ifndef OUTGOING_REGNO |
819 | #define OUTGOING_REGNO(N) N | |
820 | #endif | |
7988fd36 | 821 | for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) |
c285f57a JJ |
822 | /* Check whether this register can hold an incoming pointer |
823 | argument. FUNCTION_ARG_REGNO_P tests outgoing register | |
824 | numbers, so translate if necessary due to register windows. */ | |
825 | if (FUNCTION_ARG_REGNO_P (OUTGOING_REGNO (regno)) | |
7988fd36 | 826 | && (reg = promoted_input_arg (regno, &mode, &unsignedp)) != 0) |
38a448ca RH |
827 | { |
828 | record_value_for_reg | |
829 | (reg, first, gen_rtx_fmt_e ((unsignedp ? ZERO_EXTEND | |
830 | : SIGN_EXTEND), | |
831 | GET_MODE (reg), | |
832 | gen_rtx_CLOBBER (mode, const0_rtx))); | |
833 | } | |
7988fd36 RK |
834 | #endif |
835 | } | |
836 | \f | |
91102d5a RK |
837 | /* Called via note_stores. If X is a pseudo that is narrower than |
838 | HOST_BITS_PER_WIDE_INT and is being set, record what bits are known zero. | |
230d793d RS |
839 | |
840 | If we are setting only a portion of X and we can't figure out what | |
841 | portion, assume all bits will be used since we don't know what will | |
d0ab8cd3 RK |
842 | be happening. |
843 | ||
844 | Similarly, set how many bits of X are known to be copies of the sign bit | |
663522cb | 845 | at all locations in the function. This is the smallest number implied |
d0ab8cd3 | 846 | by any set of X. */ |
230d793d RS |
847 | |
848 | static void | |
84832317 | 849 | set_nonzero_bits_and_sign_copies (x, set, data) |
230d793d RS |
850 | rtx x; |
851 | rtx set; | |
84832317 | 852 | void *data ATTRIBUTE_UNUSED; |
230d793d | 853 | { |
770ae6cc | 854 | unsigned int num; |
d0ab8cd3 | 855 | |
230d793d RS |
856 | if (GET_CODE (x) == REG |
857 | && REGNO (x) >= FIRST_PSEUDO_REGISTER | |
e8095e80 RK |
858 | /* If this register is undefined at the start of the file, we can't |
859 | say what its contents were. */ | |
e881bb1b | 860 | && ! REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, REGNO (x)) |
5f4f0e22 | 861 | && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT) |
230d793d | 862 | { |
2dab894a | 863 | if (set == 0 || GET_CODE (set) == CLOBBER) |
e8095e80 RK |
864 | { |
865 | reg_nonzero_bits[REGNO (x)] = GET_MODE_MASK (GET_MODE (x)); | |
88306d12 | 866 | reg_sign_bit_copies[REGNO (x)] = 1; |
e8095e80 RK |
867 | return; |
868 | } | |
230d793d RS |
869 | |
870 | /* If this is a complex assignment, see if we can convert it into a | |
5089e22e | 871 | simple assignment. */ |
230d793d | 872 | set = expand_field_assignment (set); |
d79f08e0 RK |
873 | |
874 | /* If this is a simple assignment, or we have a paradoxical SUBREG, | |
875 | set what we know about X. */ | |
876 | ||
877 | if (SET_DEST (set) == x | |
878 | || (GET_CODE (SET_DEST (set)) == SUBREG | |
705c7b3b JW |
879 | && (GET_MODE_SIZE (GET_MODE (SET_DEST (set))) |
880 | > GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (set))))) | |
d79f08e0 | 881 | && SUBREG_REG (SET_DEST (set)) == x)) |
d0ab8cd3 | 882 | { |
9afa3d54 RK |
883 | rtx src = SET_SRC (set); |
884 | ||
885 | #ifdef SHORT_IMMEDIATES_SIGN_EXTEND | |
886 | /* If X is narrower than a word and SRC is a non-negative | |
887 | constant that would appear negative in the mode of X, | |
888 | sign-extend it for use in reg_nonzero_bits because some | |
889 | machines (maybe most) will actually do the sign-extension | |
663522cb | 890 | and this is the conservative approach. |
9afa3d54 RK |
891 | |
892 | ??? For 2.5, try to tighten up the MD files in this regard | |
893 | instead of this kludge. */ | |
894 | ||
895 | if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD | |
896 | && GET_CODE (src) == CONST_INT | |
897 | && INTVAL (src) > 0 | |
898 | && 0 != (INTVAL (src) | |
899 | & ((HOST_WIDE_INT) 1 | |
9e69be8c | 900 | << (GET_MODE_BITSIZE (GET_MODE (x)) - 1)))) |
9afa3d54 RK |
901 | src = GEN_INT (INTVAL (src) |
902 | | ((HOST_WIDE_INT) (-1) | |
903 | << GET_MODE_BITSIZE (GET_MODE (x)))); | |
904 | #endif | |
905 | ||
951553af | 906 | reg_nonzero_bits[REGNO (x)] |
9afa3d54 | 907 | |= nonzero_bits (src, nonzero_bits_mode); |
d0ab8cd3 RK |
908 | num = num_sign_bit_copies (SET_SRC (set), GET_MODE (x)); |
909 | if (reg_sign_bit_copies[REGNO (x)] == 0 | |
910 | || reg_sign_bit_copies[REGNO (x)] > num) | |
911 | reg_sign_bit_copies[REGNO (x)] = num; | |
912 | } | |
230d793d | 913 | else |
d0ab8cd3 | 914 | { |
951553af | 915 | reg_nonzero_bits[REGNO (x)] = GET_MODE_MASK (GET_MODE (x)); |
88306d12 | 916 | reg_sign_bit_copies[REGNO (x)] = 1; |
d0ab8cd3 | 917 | } |
230d793d RS |
918 | } |
919 | } | |
920 | \f | |
921 | /* See if INSN can be combined into I3. PRED and SUCC are optionally | |
922 | insns that were previously combined into I3 or that will be combined | |
923 | into the merger of INSN and I3. | |
924 | ||
925 | Return 0 if the combination is not allowed for any reason. | |
926 | ||
663522cb | 927 | If the combination is allowed, *PDEST will be set to the single |
230d793d RS |
928 | destination of INSN and *PSRC to the single source, and this function |
929 | will return 1. */ | |
930 | ||
931 | static int | |
932 | can_combine_p (insn, i3, pred, succ, pdest, psrc) | |
933 | rtx insn; | |
934 | rtx i3; | |
e51712db KG |
935 | rtx pred ATTRIBUTE_UNUSED; |
936 | rtx succ; | |
230d793d RS |
937 | rtx *pdest, *psrc; |
938 | { | |
939 | int i; | |
940 | rtx set = 0, src, dest; | |
b729186a JL |
941 | rtx p; |
942 | #ifdef AUTO_INC_DEC | |
76d31c63 | 943 | rtx link; |
b729186a | 944 | #endif |
230d793d RS |
945 | int all_adjacent = (succ ? (next_active_insn (insn) == succ |
946 | && next_active_insn (succ) == i3) | |
947 | : next_active_insn (insn) == i3); | |
948 | ||
949 | /* Can combine only if previous insn is a SET of a REG, a SUBREG or CC0. | |
663522cb | 950 | or a PARALLEL consisting of such a SET and CLOBBERs. |
230d793d RS |
951 | |
952 | If INSN has CLOBBER parallel parts, ignore them for our processing. | |
953 | By definition, these happen during the execution of the insn. When it | |
954 | is merged with another insn, all bets are off. If they are, in fact, | |
955 | needed and aren't also supplied in I3, they may be added by | |
663522cb | 956 | recog_for_combine. Otherwise, it won't match. |
230d793d RS |
957 | |
958 | We can also ignore a SET whose SET_DEST is mentioned in a REG_UNUSED | |
959 | note. | |
960 | ||
663522cb | 961 | Get the source and destination of INSN. If more than one, can't |
230d793d | 962 | combine. */ |
663522cb | 963 | |
230d793d RS |
964 | if (GET_CODE (PATTERN (insn)) == SET) |
965 | set = PATTERN (insn); | |
966 | else if (GET_CODE (PATTERN (insn)) == PARALLEL | |
967 | && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET) | |
968 | { | |
969 | for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++) | |
970 | { | |
971 | rtx elt = XVECEXP (PATTERN (insn), 0, i); | |
972 | ||
973 | switch (GET_CODE (elt)) | |
974 | { | |
e3258cef R |
975 | /* This is important to combine floating point insns |
976 | for the SH4 port. */ | |
977 | case USE: | |
978 | /* Combining an isolated USE doesn't make sense. | |
979 | We depend here on combinable_i3_pat to reject them. */ | |
980 | /* The code below this loop only verifies that the inputs of | |
981 | the SET in INSN do not change. We call reg_set_between_p | |
982 | to verify that the REG in the USE does not change betweeen | |
983 | I3 and INSN. | |
984 | If the USE in INSN was for a pseudo register, the matching | |
985 | insn pattern will likely match any register; combining this | |
986 | with any other USE would only be safe if we knew that the | |
987 | used registers have identical values, or if there was | |
988 | something to tell them apart, e.g. different modes. For | |
989 | now, we forgo such compilcated tests and simply disallow | |
990 | combining of USES of pseudo registers with any other USE. */ | |
991 | if (GET_CODE (XEXP (elt, 0)) == REG | |
992 | && GET_CODE (PATTERN (i3)) == PARALLEL) | |
993 | { | |
994 | rtx i3pat = PATTERN (i3); | |
995 | int i = XVECLEN (i3pat, 0) - 1; | |
770ae6cc RK |
996 | unsigned int regno = REGNO (XEXP (elt, 0)); |
997 | ||
e3258cef R |
998 | do |
999 | { | |
1000 | rtx i3elt = XVECEXP (i3pat, 0, i); | |
770ae6cc | 1001 | |
e3258cef R |
1002 | if (GET_CODE (i3elt) == USE |
1003 | && GET_CODE (XEXP (i3elt, 0)) == REG | |
1004 | && (REGNO (XEXP (i3elt, 0)) == regno | |
1005 | ? reg_set_between_p (XEXP (elt, 0), | |
1006 | PREV_INSN (insn), i3) | |
1007 | : regno >= FIRST_PSEUDO_REGISTER)) | |
1008 | return 0; | |
1009 | } | |
1010 | while (--i >= 0); | |
1011 | } | |
1012 | break; | |
1013 | ||
230d793d RS |
1014 | /* We can ignore CLOBBERs. */ |
1015 | case CLOBBER: | |
1016 | break; | |
1017 | ||
1018 | case SET: | |
1019 | /* Ignore SETs whose result isn't used but not those that | |
1020 | have side-effects. */ | |
1021 | if (find_reg_note (insn, REG_UNUSED, SET_DEST (elt)) | |
1022 | && ! side_effects_p (elt)) | |
1023 | break; | |
1024 | ||
1025 | /* If we have already found a SET, this is a second one and | |
1026 | so we cannot combine with this insn. */ | |
1027 | if (set) | |
1028 | return 0; | |
1029 | ||
1030 | set = elt; | |
1031 | break; | |
1032 | ||
1033 | default: | |
1034 | /* Anything else means we can't combine. */ | |
1035 | return 0; | |
1036 | } | |
1037 | } | |
1038 | ||
1039 | if (set == 0 | |
1040 | /* If SET_SRC is an ASM_OPERANDS we can't throw away these CLOBBERs, | |
1041 | so don't do anything with it. */ | |
1042 | || GET_CODE (SET_SRC (set)) == ASM_OPERANDS) | |
1043 | return 0; | |
1044 | } | |
1045 | else | |
1046 | return 0; | |
1047 | ||
1048 | if (set == 0) | |
1049 | return 0; | |
1050 | ||
1051 | set = expand_field_assignment (set); | |
1052 | src = SET_SRC (set), dest = SET_DEST (set); | |
1053 | ||
1054 | /* Don't eliminate a store in the stack pointer. */ | |
1055 | if (dest == stack_pointer_rtx | |
230d793d RS |
1056 | /* If we couldn't eliminate a field assignment, we can't combine. */ |
1057 | || GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == STRICT_LOW_PART | |
1058 | /* Don't combine with an insn that sets a register to itself if it has | |
1059 | a REG_EQUAL note. This may be part of a REG_NO_CONFLICT sequence. */ | |
5f4f0e22 | 1060 | || (rtx_equal_p (src, dest) && find_reg_note (insn, REG_EQUAL, NULL_RTX)) |
230d793d RS |
1061 | /* Can't merge a function call. */ |
1062 | || GET_CODE (src) == CALL | |
cd5e8f1f | 1063 | /* Don't eliminate a function call argument. */ |
4dca5ec5 RK |
1064 | || (GET_CODE (i3) == CALL_INSN |
1065 | && (find_reg_fusage (i3, USE, dest) | |
1066 | || (GET_CODE (dest) == REG | |
1067 | && REGNO (dest) < FIRST_PSEUDO_REGISTER | |
1068 | && global_regs[REGNO (dest)]))) | |
230d793d RS |
1069 | /* Don't substitute into an incremented register. */ |
1070 | || FIND_REG_INC_NOTE (i3, dest) | |
1071 | || (succ && FIND_REG_INC_NOTE (succ, dest)) | |
ec35104c | 1072 | #if 0 |
230d793d | 1073 | /* Don't combine the end of a libcall into anything. */ |
ec35104c JL |
1074 | /* ??? This gives worse code, and appears to be unnecessary, since no |
1075 | pass after flow uses REG_LIBCALL/REG_RETVAL notes. Local-alloc does | |
1076 | use REG_RETVAL notes for noconflict blocks, but other code here | |
1077 | makes sure that those insns don't disappear. */ | |
5f4f0e22 | 1078 | || find_reg_note (insn, REG_RETVAL, NULL_RTX) |
ec35104c | 1079 | #endif |
230d793d RS |
1080 | /* Make sure that DEST is not used after SUCC but before I3. */ |
1081 | || (succ && ! all_adjacent | |
1082 | && reg_used_between_p (dest, succ, i3)) | |
1083 | /* Make sure that the value that is to be substituted for the register | |
1084 | does not use any registers whose values alter in between. However, | |
1085 | If the insns are adjacent, a use can't cross a set even though we | |
1086 | think it might (this can happen for a sequence of insns each setting | |
1087 | the same destination; reg_last_set of that register might point to | |
d81481d3 RK |
1088 | a NOTE). If INSN has a REG_EQUIV note, the register is always |
1089 | equivalent to the memory so the substitution is valid even if there | |
1090 | are intervening stores. Also, don't move a volatile asm or | |
1091 | UNSPEC_VOLATILE across any other insns. */ | |
230d793d | 1092 | || (! all_adjacent |
d81481d3 RK |
1093 | && (((GET_CODE (src) != MEM |
1094 | || ! find_reg_note (insn, REG_EQUIV, src)) | |
1095 | && use_crosses_set_p (src, INSN_CUID (insn))) | |
a66a10c7 RS |
1096 | || (GET_CODE (src) == ASM_OPERANDS && MEM_VOLATILE_P (src)) |
1097 | || GET_CODE (src) == UNSPEC_VOLATILE)) | |
230d793d RS |
1098 | /* If there is a REG_NO_CONFLICT note for DEST in I3 or SUCC, we get |
1099 | better register allocation by not doing the combine. */ | |
1100 | || find_reg_note (i3, REG_NO_CONFLICT, dest) | |
1101 | || (succ && find_reg_note (succ, REG_NO_CONFLICT, dest)) | |
1102 | /* Don't combine across a CALL_INSN, because that would possibly | |
1103 | change whether the life span of some REGs crosses calls or not, | |
1104 | and it is a pain to update that information. | |
1105 | Exception: if source is a constant, moving it later can't hurt. | |
1106 | Accept that special case, because it helps -fforce-addr a lot. */ | |
1107 | || (INSN_CUID (insn) < last_call_cuid && ! CONSTANT_P (src))) | |
1108 | return 0; | |
1109 | ||
1110 | /* DEST must either be a REG or CC0. */ | |
1111 | if (GET_CODE (dest) == REG) | |
1112 | { | |
1113 | /* If register alignment is being enforced for multi-word items in all | |
1114 | cases except for parameters, it is possible to have a register copy | |
1115 | insn referencing a hard register that is not allowed to contain the | |
1116 | mode being copied and which would not be valid as an operand of most | |
1117 | insns. Eliminate this problem by not combining with such an insn. | |
1118 | ||
1119 | Also, on some machines we don't want to extend the life of a hard | |
4d2c432d RK |
1120 | register. |
1121 | ||
1122 | This is the same test done in can_combine except that we don't test | |
1123 | if SRC is a CALL operation to permit a hard register with | |
1124 | SMALL_REGISTER_CLASSES, and that we have to take all_adjacent | |
1125 | into account. */ | |
230d793d RS |
1126 | |
1127 | if (GET_CODE (src) == REG | |
1128 | && ((REGNO (dest) < FIRST_PSEUDO_REGISTER | |
1129 | && ! HARD_REGNO_MODE_OK (REGNO (dest), GET_MODE (dest))) | |
c448a43e RK |
1130 | /* Don't extend the life of a hard register unless it is |
1131 | user variable (if we have few registers) or it can't | |
1132 | fit into the desired register (meaning something special | |
ecd40809 RK |
1133 | is going on). |
1134 | Also avoid substituting a return register into I3, because | |
1135 | reload can't handle a conflict with constraints of other | |
1136 | inputs. */ | |
230d793d | 1137 | || (REGNO (src) < FIRST_PSEUDO_REGISTER |
c448a43e | 1138 | && (! HARD_REGNO_MODE_OK (REGNO (src), GET_MODE (src)) |
f95182a4 ILT |
1139 | || (SMALL_REGISTER_CLASSES |
1140 | && ((! all_adjacent && ! REG_USERVAR_P (src)) | |
1141 | || (FUNCTION_VALUE_REGNO_P (REGNO (src)) | |
e9a25f70 | 1142 | && ! REG_USERVAR_P (src)))))))) |
230d793d RS |
1143 | return 0; |
1144 | } | |
1145 | else if (GET_CODE (dest) != CC0) | |
1146 | return 0; | |
1147 | ||
5f96750d RS |
1148 | /* Don't substitute for a register intended as a clobberable operand. |
1149 | Similarly, don't substitute an expression containing a register that | |
1150 | will be clobbered in I3. */ | |
230d793d RS |
1151 | if (GET_CODE (PATTERN (i3)) == PARALLEL) |
1152 | for (i = XVECLEN (PATTERN (i3), 0) - 1; i >= 0; i--) | |
1153 | if (GET_CODE (XVECEXP (PATTERN (i3), 0, i)) == CLOBBER | |
5f96750d RS |
1154 | && (reg_overlap_mentioned_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0), |
1155 | src) | |
1156 | || rtx_equal_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0), dest))) | |
230d793d RS |
1157 | return 0; |
1158 | ||
1159 | /* If INSN contains anything volatile, or is an `asm' (whether volatile | |
d276f2bb | 1160 | or not), reject, unless nothing volatile comes between it and I3 */ |
230d793d RS |
1161 | |
1162 | if (GET_CODE (src) == ASM_OPERANDS || volatile_refs_p (src)) | |
d276f2bb CM |
1163 | { |
1164 | /* Make sure succ doesn't contain a volatile reference. */ | |
1165 | if (succ != 0 && volatile_refs_p (PATTERN (succ))) | |
1166 | return 0; | |
663522cb | 1167 | |
d276f2bb CM |
1168 | for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p)) |
1169 | if (GET_RTX_CLASS (GET_CODE (p)) == 'i' | |
1170 | && p != succ && volatile_refs_p (PATTERN (p))) | |
1171 | return 0; | |
1172 | } | |
230d793d | 1173 | |
b79ee7eb RH |
1174 | /* If INSN is an asm, and DEST is a hard register, reject, since it has |
1175 | to be an explicit register variable, and was chosen for a reason. */ | |
1176 | ||
1177 | if (GET_CODE (src) == ASM_OPERANDS | |
1178 | && GET_CODE (dest) == REG && REGNO (dest) < FIRST_PSEUDO_REGISTER) | |
1179 | return 0; | |
1180 | ||
4b2cb4a2 RS |
1181 | /* If there are any volatile insns between INSN and I3, reject, because |
1182 | they might affect machine state. */ | |
1183 | ||
1184 | for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p)) | |
1185 | if (GET_RTX_CLASS (GET_CODE (p)) == 'i' | |
1186 | && p != succ && volatile_insn_p (PATTERN (p))) | |
1187 | return 0; | |
1188 | ||
230d793d RS |
1189 | /* If INSN or I2 contains an autoincrement or autodecrement, |
1190 | make sure that register is not used between there and I3, | |
1191 | and not already used in I3 either. | |
1192 | Also insist that I3 not be a jump; if it were one | |
1193 | and the incremented register were spilled, we would lose. */ | |
1194 | ||
1195 | #ifdef AUTO_INC_DEC | |
1196 | for (link = REG_NOTES (insn); link; link = XEXP (link, 1)) | |
1197 | if (REG_NOTE_KIND (link) == REG_INC | |
1198 | && (GET_CODE (i3) == JUMP_INSN | |
1199 | || reg_used_between_p (XEXP (link, 0), insn, i3) | |
1200 | || reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i3)))) | |
1201 | return 0; | |
1202 | #endif | |
1203 | ||
1204 | #ifdef HAVE_cc0 | |
1205 | /* Don't combine an insn that follows a CC0-setting insn. | |
1206 | An insn that uses CC0 must not be separated from the one that sets it. | |
1207 | We do, however, allow I2 to follow a CC0-setting insn if that insn | |
1208 | is passed as I1; in that case it will be deleted also. | |
1209 | We also allow combining in this case if all the insns are adjacent | |
1210 | because that would leave the two CC0 insns adjacent as well. | |
1211 | It would be more logical to test whether CC0 occurs inside I1 or I2, | |
1212 | but that would be much slower, and this ought to be equivalent. */ | |
1213 | ||
1214 | p = prev_nonnote_insn (insn); | |
1215 | if (p && p != pred && GET_CODE (p) == INSN && sets_cc0_p (PATTERN (p)) | |
1216 | && ! all_adjacent) | |
1217 | return 0; | |
1218 | #endif | |
1219 | ||
1220 | /* If we get here, we have passed all the tests and the combination is | |
1221 | to be allowed. */ | |
1222 | ||
1223 | *pdest = dest; | |
1224 | *psrc = src; | |
1225 | ||
1226 | return 1; | |
1227 | } | |
1228 | \f | |
956d6950 JL |
1229 | /* Check if PAT is an insn - or a part of it - used to set up an |
1230 | argument for a function in a hard register. */ | |
1231 | ||
1232 | static int | |
1233 | sets_function_arg_p (pat) | |
1234 | rtx pat; | |
1235 | { | |
1236 | int i; | |
1237 | rtx inner_dest; | |
1238 | ||
1239 | switch (GET_CODE (pat)) | |
1240 | { | |
1241 | case INSN: | |
1242 | return sets_function_arg_p (PATTERN (pat)); | |
1243 | ||
1244 | case PARALLEL: | |
1245 | for (i = XVECLEN (pat, 0); --i >= 0;) | |
1246 | if (sets_function_arg_p (XVECEXP (pat, 0, i))) | |
1247 | return 1; | |
1248 | ||
1249 | break; | |
1250 | ||
1251 | case SET: | |
1252 | inner_dest = SET_DEST (pat); | |
1253 | while (GET_CODE (inner_dest) == STRICT_LOW_PART | |
1254 | || GET_CODE (inner_dest) == SUBREG | |
1255 | || GET_CODE (inner_dest) == ZERO_EXTRACT) | |
1256 | inner_dest = XEXP (inner_dest, 0); | |
1257 | ||
1258 | return (GET_CODE (inner_dest) == REG | |
1259 | && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER | |
1260 | && FUNCTION_ARG_REGNO_P (REGNO (inner_dest))); | |
1d300e19 KG |
1261 | |
1262 | default: | |
1263 | break; | |
956d6950 JL |
1264 | } |
1265 | ||
1266 | return 0; | |
1267 | } | |
1268 | ||
230d793d RS |
1269 | /* LOC is the location within I3 that contains its pattern or the component |
1270 | of a PARALLEL of the pattern. We validate that it is valid for combining. | |
1271 | ||
1272 | One problem is if I3 modifies its output, as opposed to replacing it | |
1273 | entirely, we can't allow the output to contain I2DEST or I1DEST as doing | |
1274 | so would produce an insn that is not equivalent to the original insns. | |
1275 | ||
1276 | Consider: | |
1277 | ||
1278 | (set (reg:DI 101) (reg:DI 100)) | |
1279 | (set (subreg:SI (reg:DI 101) 0) <foo>) | |
1280 | ||
1281 | This is NOT equivalent to: | |
1282 | ||
1283 | (parallel [(set (subreg:SI (reg:DI 100) 0) <foo>) | |
1284 | (set (reg:DI 101) (reg:DI 100))]) | |
1285 | ||
1286 | Not only does this modify 100 (in which case it might still be valid | |
663522cb | 1287 | if 100 were dead in I2), it sets 101 to the ORIGINAL value of 100. |
230d793d RS |
1288 | |
1289 | We can also run into a problem if I2 sets a register that I1 | |
1290 | uses and I1 gets directly substituted into I3 (not via I2). In that | |
1291 | case, we would be getting the wrong value of I2DEST into I3, so we | |
1292 | must reject the combination. This case occurs when I2 and I1 both | |
1293 | feed into I3, rather than when I1 feeds into I2, which feeds into I3. | |
1294 | If I1_NOT_IN_SRC is non-zero, it means that finding I1 in the source | |
1295 | of a SET must prevent combination from occurring. | |
1296 | ||
e9a25f70 | 1297 | On machines where SMALL_REGISTER_CLASSES is non-zero, we don't combine |
c448a43e RK |
1298 | if the destination of a SET is a hard register that isn't a user |
1299 | variable. | |
230d793d RS |
1300 | |
1301 | Before doing the above check, we first try to expand a field assignment | |
1302 | into a set of logical operations. | |
1303 | ||
1304 | If PI3_DEST_KILLED is non-zero, it is a pointer to a location in which | |
1305 | we place a register that is both set and used within I3. If more than one | |
1306 | such register is detected, we fail. | |
1307 | ||
1308 | Return 1 if the combination is valid, zero otherwise. */ | |
1309 | ||
1310 | static int | |
1311 | combinable_i3pat (i3, loc, i2dest, i1dest, i1_not_in_src, pi3dest_killed) | |
1312 | rtx i3; | |
1313 | rtx *loc; | |
1314 | rtx i2dest; | |
1315 | rtx i1dest; | |
1316 | int i1_not_in_src; | |
1317 | rtx *pi3dest_killed; | |
1318 | { | |
1319 | rtx x = *loc; | |
1320 | ||
1321 | if (GET_CODE (x) == SET) | |
1322 | { | |
1323 | rtx set = expand_field_assignment (x); | |
1324 | rtx dest = SET_DEST (set); | |
1325 | rtx src = SET_SRC (set); | |
29a82058 | 1326 | rtx inner_dest = dest; |
663522cb | 1327 | |
29a82058 JL |
1328 | #if 0 |
1329 | rtx inner_src = src; | |
1330 | #endif | |
230d793d RS |
1331 | |
1332 | SUBST (*loc, set); | |
1333 | ||
1334 | while (GET_CODE (inner_dest) == STRICT_LOW_PART | |
1335 | || GET_CODE (inner_dest) == SUBREG | |
1336 | || GET_CODE (inner_dest) == ZERO_EXTRACT) | |
1337 | inner_dest = XEXP (inner_dest, 0); | |
1338 | ||
1339 | /* We probably don't need this any more now that LIMIT_RELOAD_CLASS | |
1340 | was added. */ | |
1341 | #if 0 | |
1342 | while (GET_CODE (inner_src) == STRICT_LOW_PART | |
1343 | || GET_CODE (inner_src) == SUBREG | |
1344 | || GET_CODE (inner_src) == ZERO_EXTRACT) | |
1345 | inner_src = XEXP (inner_src, 0); | |
1346 | ||
1347 | /* If it is better that two different modes keep two different pseudos, | |
1348 | avoid combining them. This avoids producing the following pattern | |
1349 | on a 386: | |
1350 | (set (subreg:SI (reg/v:QI 21) 0) | |
1351 | (lshiftrt:SI (reg/v:SI 20) | |
1352 | (const_int 24))) | |
1353 | If that were made, reload could not handle the pair of | |
1354 | reg 20/21, since it would try to get any GENERAL_REGS | |
1355 | but some of them don't handle QImode. */ | |
1356 | ||
1357 | if (rtx_equal_p (inner_src, i2dest) | |
1358 | && GET_CODE (inner_dest) == REG | |
1359 | && ! MODES_TIEABLE_P (GET_MODE (i2dest), GET_MODE (inner_dest))) | |
1360 | return 0; | |
1361 | #endif | |
1362 | ||
1363 | /* Check for the case where I3 modifies its output, as | |
1364 | discussed above. */ | |
1365 | if ((inner_dest != dest | |
1366 | && (reg_overlap_mentioned_p (i2dest, inner_dest) | |
1367 | || (i1dest && reg_overlap_mentioned_p (i1dest, inner_dest)))) | |
956d6950 | 1368 | |
3f508eca RK |
1369 | /* This is the same test done in can_combine_p except that we |
1370 | allow a hard register with SMALL_REGISTER_CLASSES if SRC is a | |
956d6950 JL |
1371 | CALL operation. Moreover, we can't test all_adjacent; we don't |
1372 | have to, since this instruction will stay in place, thus we are | |
1373 | not considering increasing the lifetime of INNER_DEST. | |
1374 | ||
1375 | Also, if this insn sets a function argument, combining it with | |
1376 | something that might need a spill could clobber a previous | |
1377 | function argument; the all_adjacent test in can_combine_p also | |
1378 | checks this; here, we do a more specific test for this case. */ | |
663522cb | 1379 | |
230d793d | 1380 | || (GET_CODE (inner_dest) == REG |
dfbe1b2f | 1381 | && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER |
c448a43e RK |
1382 | && (! HARD_REGNO_MODE_OK (REGNO (inner_dest), |
1383 | GET_MODE (inner_dest)) | |
e9a25f70 JL |
1384 | || (SMALL_REGISTER_CLASSES && GET_CODE (src) != CALL |
1385 | && ! REG_USERVAR_P (inner_dest) | |
956d6950 JL |
1386 | && (FUNCTION_VALUE_REGNO_P (REGNO (inner_dest)) |
1387 | || (FUNCTION_ARG_REGNO_P (REGNO (inner_dest)) | |
1388 | && i3 != 0 | |
1389 | && sets_function_arg_p (prev_nonnote_insn (i3))))))) | |
230d793d RS |
1390 | || (i1_not_in_src && reg_overlap_mentioned_p (i1dest, src))) |
1391 | return 0; | |
1392 | ||
1393 | /* If DEST is used in I3, it is being killed in this insn, | |
663522cb | 1394 | so record that for later. |
36a9c2e9 JL |
1395 | Never add REG_DEAD notes for the FRAME_POINTER_REGNUM or the |
1396 | STACK_POINTER_REGNUM, since these are always considered to be | |
1397 | live. Similarly for ARG_POINTER_REGNUM if it is fixed. */ | |
230d793d | 1398 | if (pi3dest_killed && GET_CODE (dest) == REG |
36a9c2e9 JL |
1399 | && reg_referenced_p (dest, PATTERN (i3)) |
1400 | && REGNO (dest) != FRAME_POINTER_REGNUM | |
6d7096b0 DE |
1401 | #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM |
1402 | && REGNO (dest) != HARD_FRAME_POINTER_REGNUM | |
1403 | #endif | |
36a9c2e9 JL |
1404 | #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM |
1405 | && (REGNO (dest) != ARG_POINTER_REGNUM | |
1406 | || ! fixed_regs [REGNO (dest)]) | |
1407 | #endif | |
1408 | && REGNO (dest) != STACK_POINTER_REGNUM) | |
230d793d RS |
1409 | { |
1410 | if (*pi3dest_killed) | |
1411 | return 0; | |
1412 | ||
1413 | *pi3dest_killed = dest; | |
1414 | } | |
1415 | } | |
1416 | ||
1417 | else if (GET_CODE (x) == PARALLEL) | |
1418 | { | |
1419 | int i; | |
1420 | ||
1421 | for (i = 0; i < XVECLEN (x, 0); i++) | |
1422 | if (! combinable_i3pat (i3, &XVECEXP (x, 0, i), i2dest, i1dest, | |
1423 | i1_not_in_src, pi3dest_killed)) | |
1424 | return 0; | |
1425 | } | |
1426 | ||
1427 | return 1; | |
1428 | } | |
1429 | \f | |
14a774a9 RK |
1430 | /* Return 1 if X is an arithmetic expression that contains a multiplication |
1431 | and division. We don't count multiplications by powers of two here. */ | |
1432 | ||
1433 | static int | |
1434 | contains_muldiv (x) | |
1435 | rtx x; | |
1436 | { | |
1437 | switch (GET_CODE (x)) | |
1438 | { | |
1439 | case MOD: case DIV: case UMOD: case UDIV: | |
1440 | return 1; | |
1441 | ||
1442 | case MULT: | |
1443 | return ! (GET_CODE (XEXP (x, 1)) == CONST_INT | |
1444 | && exact_log2 (INTVAL (XEXP (x, 1))) >= 0); | |
1445 | default: | |
1446 | switch (GET_RTX_CLASS (GET_CODE (x))) | |
1447 | { | |
1448 | case 'c': case '<': case '2': | |
1449 | return contains_muldiv (XEXP (x, 0)) | |
1450 | || contains_muldiv (XEXP (x, 1)); | |
1451 | ||
1452 | case '1': | |
1453 | return contains_muldiv (XEXP (x, 0)); | |
1454 | ||
1455 | default: | |
1456 | return 0; | |
1457 | } | |
1458 | } | |
1459 | } | |
1460 | \f | |
230d793d RS |
1461 | /* Try to combine the insns I1 and I2 into I3. |
1462 | Here I1 and I2 appear earlier than I3. | |
1463 | I1 can be zero; then we combine just I2 into I3. | |
663522cb | 1464 | |
230d793d RS |
1465 | It we are combining three insns and the resulting insn is not recognized, |
1466 | try splitting it into two insns. If that happens, I2 and I3 are retained | |
1467 | and I1 is pseudo-deleted by turning it into a NOTE. Otherwise, I1 and I2 | |
1468 | are pseudo-deleted. | |
1469 | ||
663522cb | 1470 | Return 0 if the combination does not work. Then nothing is changed. |
abe6e52f | 1471 | If we did the combination, return the insn at which combine should |
663522cb KH |
1472 | resume scanning. |
1473 | ||
44a76fc8 AG |
1474 | Set NEW_DIRECT_JUMP_P to a non-zero value if try_combine creates a |
1475 | new direct jump instruction. */ | |
230d793d RS |
1476 | |
1477 | static rtx | |
44a76fc8 | 1478 | try_combine (i3, i2, i1, new_direct_jump_p) |
230d793d | 1479 | register rtx i3, i2, i1; |
44a76fc8 | 1480 | register int *new_direct_jump_p; |
230d793d | 1481 | { |
02359929 | 1482 | /* New patterns for I3 and I2, respectively. */ |
230d793d RS |
1483 | rtx newpat, newi2pat = 0; |
1484 | /* Indicates need to preserve SET in I1 or I2 in I3 if it is not dead. */ | |
1485 | int added_sets_1, added_sets_2; | |
1486 | /* Total number of SETs to put into I3. */ | |
1487 | int total_sets; | |
1488 | /* Nonzero is I2's body now appears in I3. */ | |
1489 | int i2_is_used; | |
1490 | /* INSN_CODEs for new I3, new I2, and user of condition code. */ | |
6a651371 | 1491 | int insn_code_number, i2_code_number = 0, other_code_number = 0; |
230d793d RS |
1492 | /* Contains I3 if the destination of I3 is used in its source, which means |
1493 | that the old life of I3 is being killed. If that usage is placed into | |
1494 | I2 and not in I3, a REG_DEAD note must be made. */ | |
1495 | rtx i3dest_killed = 0; | |
1496 | /* SET_DEST and SET_SRC of I2 and I1. */ | |
1497 | rtx i2dest, i2src, i1dest = 0, i1src = 0; | |
1498 | /* PATTERN (I2), or a copy of it in certain cases. */ | |
1499 | rtx i2pat; | |
1500 | /* Indicates if I2DEST or I1DEST is in I2SRC or I1_SRC. */ | |
c4e861e8 | 1501 | int i2dest_in_i2src = 0, i1dest_in_i1src = 0, i2dest_in_i1src = 0; |
230d793d RS |
1502 | int i1_feeds_i3 = 0; |
1503 | /* Notes that must be added to REG_NOTES in I3 and I2. */ | |
1504 | rtx new_i3_notes, new_i2_notes; | |
176c9e6b JW |
1505 | /* Notes that we substituted I3 into I2 instead of the normal case. */ |
1506 | int i3_subst_into_i2 = 0; | |
df7d75de RK |
1507 | /* Notes that I1, I2 or I3 is a MULT operation. */ |
1508 | int have_mult = 0; | |
230d793d RS |
1509 | |
1510 | int maxreg; | |
1511 | rtx temp; | |
1512 | register rtx link; | |
1513 | int i; | |
1514 | ||
1515 | /* If any of I1, I2, and I3 isn't really an insn, we can't do anything. | |
1516 | This can occur when flow deletes an insn that it has merged into an | |
1517 | auto-increment address. We also can't do anything if I3 has a | |
1518 | REG_LIBCALL note since we don't want to disrupt the contiguity of a | |
1519 | libcall. */ | |
1520 | ||
1521 | if (GET_RTX_CLASS (GET_CODE (i3)) != 'i' | |
1522 | || GET_RTX_CLASS (GET_CODE (i2)) != 'i' | |
1523 | || (i1 && GET_RTX_CLASS (GET_CODE (i1)) != 'i') | |
ec35104c JL |
1524 | #if 0 |
1525 | /* ??? This gives worse code, and appears to be unnecessary, since no | |
1526 | pass after flow uses REG_LIBCALL/REG_RETVAL notes. */ | |
1527 | || find_reg_note (i3, REG_LIBCALL, NULL_RTX) | |
1528 | #endif | |
663522cb | 1529 | ) |
230d793d RS |
1530 | return 0; |
1531 | ||
1532 | combine_attempts++; | |
230d793d RS |
1533 | undobuf.other_insn = 0; |
1534 | ||
1535 | /* Save the current high-water-mark so we can free storage if we didn't | |
1536 | accept this combination. */ | |
1537 | undobuf.storage = (char *) oballoc (0); | |
1538 | ||
6e25d159 RK |
1539 | /* Reset the hard register usage information. */ |
1540 | CLEAR_HARD_REG_SET (newpat_used_regs); | |
1541 | ||
230d793d RS |
1542 | /* If I1 and I2 both feed I3, they can be in any order. To simplify the |
1543 | code below, set I1 to be the earlier of the two insns. */ | |
1544 | if (i1 && INSN_CUID (i1) > INSN_CUID (i2)) | |
1545 | temp = i1, i1 = i2, i2 = temp; | |
1546 | ||
abe6e52f | 1547 | added_links_insn = 0; |
137e889e | 1548 | |
230d793d RS |
1549 | /* First check for one important special-case that the code below will |
1550 | not handle. Namely, the case where I1 is zero, I2 has multiple sets, | |
1551 | and I3 is a SET whose SET_SRC is a SET_DEST in I2. In that case, | |
1552 | we may be able to replace that destination with the destination of I3. | |
1553 | This occurs in the common code where we compute both a quotient and | |
1554 | remainder into a structure, in which case we want to do the computation | |
1555 | directly into the structure to avoid register-register copies. | |
1556 | ||
1557 | We make very conservative checks below and only try to handle the | |
1558 | most common cases of this. For example, we only handle the case | |
1559 | where I2 and I3 are adjacent to avoid making difficult register | |
1560 | usage tests. */ | |
1561 | ||
1562 | if (i1 == 0 && GET_CODE (i3) == INSN && GET_CODE (PATTERN (i3)) == SET | |
1563 | && GET_CODE (SET_SRC (PATTERN (i3))) == REG | |
1564 | && REGNO (SET_SRC (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER | |
f95182a4 | 1565 | && (! SMALL_REGISTER_CLASSES |
e9a25f70 JL |
1566 | || (GET_CODE (SET_DEST (PATTERN (i3))) != REG |
1567 | || REGNO (SET_DEST (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER | |
1568 | || REG_USERVAR_P (SET_DEST (PATTERN (i3))))) | |
230d793d RS |
1569 | && find_reg_note (i3, REG_DEAD, SET_SRC (PATTERN (i3))) |
1570 | && GET_CODE (PATTERN (i2)) == PARALLEL | |
1571 | && ! side_effects_p (SET_DEST (PATTERN (i3))) | |
5089e22e RS |
1572 | /* If the dest of I3 is a ZERO_EXTRACT or STRICT_LOW_PART, the code |
1573 | below would need to check what is inside (and reg_overlap_mentioned_p | |
1574 | doesn't support those codes anyway). Don't allow those destinations; | |
1575 | the resulting insn isn't likely to be recognized anyway. */ | |
1576 | && GET_CODE (SET_DEST (PATTERN (i3))) != ZERO_EXTRACT | |
1577 | && GET_CODE (SET_DEST (PATTERN (i3))) != STRICT_LOW_PART | |
230d793d RS |
1578 | && ! reg_overlap_mentioned_p (SET_SRC (PATTERN (i3)), |
1579 | SET_DEST (PATTERN (i3))) | |
1580 | && next_real_insn (i2) == i3) | |
5089e22e RS |
1581 | { |
1582 | rtx p2 = PATTERN (i2); | |
1583 | ||
1584 | /* Make sure that the destination of I3, | |
1585 | which we are going to substitute into one output of I2, | |
1586 | is not used within another output of I2. We must avoid making this: | |
1587 | (parallel [(set (mem (reg 69)) ...) | |
1588 | (set (reg 69) ...)]) | |
1589 | which is not well-defined as to order of actions. | |
1590 | (Besides, reload can't handle output reloads for this.) | |
1591 | ||
1592 | The problem can also happen if the dest of I3 is a memory ref, | |
1593 | if another dest in I2 is an indirect memory ref. */ | |
1594 | for (i = 0; i < XVECLEN (p2, 0); i++) | |
7ca919b7 RK |
1595 | if ((GET_CODE (XVECEXP (p2, 0, i)) == SET |
1596 | || GET_CODE (XVECEXP (p2, 0, i)) == CLOBBER) | |
5089e22e RS |
1597 | && reg_overlap_mentioned_p (SET_DEST (PATTERN (i3)), |
1598 | SET_DEST (XVECEXP (p2, 0, i)))) | |
1599 | break; | |
230d793d | 1600 | |
5089e22e RS |
1601 | if (i == XVECLEN (p2, 0)) |
1602 | for (i = 0; i < XVECLEN (p2, 0); i++) | |
481c7efa FS |
1603 | if ((GET_CODE (XVECEXP (p2, 0, i)) == SET |
1604 | || GET_CODE (XVECEXP (p2, 0, i)) == CLOBBER) | |
1605 | && SET_DEST (XVECEXP (p2, 0, i)) == SET_SRC (PATTERN (i3))) | |
5089e22e RS |
1606 | { |
1607 | combine_merges++; | |
230d793d | 1608 | |
5089e22e RS |
1609 | subst_insn = i3; |
1610 | subst_low_cuid = INSN_CUID (i2); | |
230d793d | 1611 | |
c4e861e8 | 1612 | added_sets_2 = added_sets_1 = 0; |
5089e22e | 1613 | i2dest = SET_SRC (PATTERN (i3)); |
230d793d | 1614 | |
5089e22e RS |
1615 | /* Replace the dest in I2 with our dest and make the resulting |
1616 | insn the new pattern for I3. Then skip to where we | |
1617 | validate the pattern. Everything was set up above. */ | |
663522cb | 1618 | SUBST (SET_DEST (XVECEXP (p2, 0, i)), |
5089e22e RS |
1619 | SET_DEST (PATTERN (i3))); |
1620 | ||
1621 | newpat = p2; | |
176c9e6b | 1622 | i3_subst_into_i2 = 1; |
5089e22e RS |
1623 | goto validate_replacement; |
1624 | } | |
1625 | } | |
230d793d | 1626 | |
667c1c2c RK |
1627 | /* If I2 is setting a double-word pseudo to a constant and I3 is setting |
1628 | one of those words to another constant, merge them by making a new | |
1629 | constant. */ | |
1630 | if (i1 == 0 | |
1631 | && (temp = single_set (i2)) != 0 | |
1632 | && (GET_CODE (SET_SRC (temp)) == CONST_INT | |
1633 | || GET_CODE (SET_SRC (temp)) == CONST_DOUBLE) | |
1634 | && GET_CODE (SET_DEST (temp)) == REG | |
1635 | && GET_MODE_CLASS (GET_MODE (SET_DEST (temp))) == MODE_INT | |
1636 | && GET_MODE_SIZE (GET_MODE (SET_DEST (temp))) == 2 * UNITS_PER_WORD | |
1637 | && GET_CODE (PATTERN (i3)) == SET | |
1638 | && GET_CODE (SET_DEST (PATTERN (i3))) == SUBREG | |
1639 | && SUBREG_REG (SET_DEST (PATTERN (i3))) == SET_DEST (temp) | |
1640 | && GET_MODE_CLASS (GET_MODE (SET_DEST (PATTERN (i3)))) == MODE_INT | |
1641 | && GET_MODE_SIZE (GET_MODE (SET_DEST (PATTERN (i3)))) == UNITS_PER_WORD | |
1642 | && GET_CODE (SET_SRC (PATTERN (i3))) == CONST_INT) | |
1643 | { | |
1644 | HOST_WIDE_INT lo, hi; | |
1645 | ||
1646 | if (GET_CODE (SET_SRC (temp)) == CONST_INT) | |
1647 | lo = INTVAL (SET_SRC (temp)), hi = lo < 0 ? -1 : 0; | |
1648 | else | |
1649 | { | |
1650 | lo = CONST_DOUBLE_LOW (SET_SRC (temp)); | |
1651 | hi = CONST_DOUBLE_HIGH (SET_SRC (temp)); | |
1652 | } | |
1653 | ||
1654 | if (subreg_lowpart_p (SET_DEST (PATTERN (i3)))) | |
1655 | lo = INTVAL (SET_SRC (PATTERN (i3))); | |
1656 | else | |
1657 | hi = INTVAL (SET_SRC (PATTERN (i3))); | |
1658 | ||
1659 | combine_merges++; | |
1660 | subst_insn = i3; | |
1661 | subst_low_cuid = INSN_CUID (i2); | |
1662 | added_sets_2 = added_sets_1 = 0; | |
1663 | i2dest = SET_DEST (temp); | |
1664 | ||
1665 | SUBST (SET_SRC (temp), | |
1666 | immed_double_const (lo, hi, GET_MODE (SET_DEST (temp)))); | |
1667 | ||
1668 | newpat = PATTERN (i2); | |
1669 | i3_subst_into_i2 = 1; | |
1670 | goto validate_replacement; | |
1671 | } | |
1672 | ||
230d793d RS |
1673 | #ifndef HAVE_cc0 |
1674 | /* If we have no I1 and I2 looks like: | |
1675 | (parallel [(set (reg:CC X) (compare:CC OP (const_int 0))) | |
1676 | (set Y OP)]) | |
1677 | make up a dummy I1 that is | |
1678 | (set Y OP) | |
1679 | and change I2 to be | |
1680 | (set (reg:CC X) (compare:CC Y (const_int 0))) | |
1681 | ||
1682 | (We can ignore any trailing CLOBBERs.) | |
1683 | ||
1684 | This undoes a previous combination and allows us to match a branch-and- | |
1685 | decrement insn. */ | |
1686 | ||
1687 | if (i1 == 0 && GET_CODE (PATTERN (i2)) == PARALLEL | |
1688 | && XVECLEN (PATTERN (i2), 0) >= 2 | |
1689 | && GET_CODE (XVECEXP (PATTERN (i2), 0, 0)) == SET | |
1690 | && (GET_MODE_CLASS (GET_MODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 0)))) | |
1691 | == MODE_CC) | |
1692 | && GET_CODE (SET_SRC (XVECEXP (PATTERN (i2), 0, 0))) == COMPARE | |
1693 | && XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 1) == const0_rtx | |
1694 | && GET_CODE (XVECEXP (PATTERN (i2), 0, 1)) == SET | |
1695 | && GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 1))) == REG | |
1696 | && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 0), | |
1697 | SET_SRC (XVECEXP (PATTERN (i2), 0, 1)))) | |
1698 | { | |
663522cb | 1699 | for (i = XVECLEN (PATTERN (i2), 0) - 1; i >= 2; i--) |
230d793d RS |
1700 | if (GET_CODE (XVECEXP (PATTERN (i2), 0, i)) != CLOBBER) |
1701 | break; | |
1702 | ||
1703 | if (i == 1) | |
1704 | { | |
1705 | /* We make I1 with the same INSN_UID as I2. This gives it | |
1706 | the same INSN_CUID for value tracking. Our fake I1 will | |
1707 | never appear in the insn stream so giving it the same INSN_UID | |
1708 | as I2 will not cause a problem. */ | |
1709 | ||
0d9641d1 | 1710 | subst_prev_insn = i1 |
38a448ca RH |
1711 | = gen_rtx_INSN (VOIDmode, INSN_UID (i2), NULL_RTX, i2, |
1712 | XVECEXP (PATTERN (i2), 0, 1), -1, NULL_RTX, | |
1713 | NULL_RTX); | |
230d793d RS |
1714 | |
1715 | SUBST (PATTERN (i2), XVECEXP (PATTERN (i2), 0, 0)); | |
1716 | SUBST (XEXP (SET_SRC (PATTERN (i2)), 0), | |
1717 | SET_DEST (PATTERN (i1))); | |
1718 | } | |
1719 | } | |
1720 | #endif | |
1721 | ||
1722 | /* Verify that I2 and I1 are valid for combining. */ | |
5f4f0e22 CH |
1723 | if (! can_combine_p (i2, i3, i1, NULL_RTX, &i2dest, &i2src) |
1724 | || (i1 && ! can_combine_p (i1, i3, NULL_RTX, i2, &i1dest, &i1src))) | |
230d793d RS |
1725 | { |
1726 | undo_all (); | |
1727 | return 0; | |
1728 | } | |
1729 | ||
1730 | /* Record whether I2DEST is used in I2SRC and similarly for the other | |
1731 | cases. Knowing this will help in register status updating below. */ | |
1732 | i2dest_in_i2src = reg_overlap_mentioned_p (i2dest, i2src); | |
1733 | i1dest_in_i1src = i1 && reg_overlap_mentioned_p (i1dest, i1src); | |
1734 | i2dest_in_i1src = i1 && reg_overlap_mentioned_p (i2dest, i1src); | |
1735 | ||
916f14f1 | 1736 | /* See if I1 directly feeds into I3. It does if I1DEST is not used |
230d793d RS |
1737 | in I2SRC. */ |
1738 | i1_feeds_i3 = i1 && ! reg_overlap_mentioned_p (i1dest, i2src); | |
1739 | ||
1740 | /* Ensure that I3's pattern can be the destination of combines. */ | |
1741 | if (! combinable_i3pat (i3, &PATTERN (i3), i2dest, i1dest, | |
1742 | i1 && i2dest_in_i1src && i1_feeds_i3, | |
1743 | &i3dest_killed)) | |
1744 | { | |
1745 | undo_all (); | |
1746 | return 0; | |
1747 | } | |
1748 | ||
df7d75de RK |
1749 | /* See if any of the insns is a MULT operation. Unless one is, we will |
1750 | reject a combination that is, since it must be slower. Be conservative | |
1751 | here. */ | |
1752 | if (GET_CODE (i2src) == MULT | |
1753 | || (i1 != 0 && GET_CODE (i1src) == MULT) | |
1754 | || (GET_CODE (PATTERN (i3)) == SET | |
1755 | && GET_CODE (SET_SRC (PATTERN (i3))) == MULT)) | |
1756 | have_mult = 1; | |
1757 | ||
230d793d RS |
1758 | /* If I3 has an inc, then give up if I1 or I2 uses the reg that is inc'd. |
1759 | We used to do this EXCEPT in one case: I3 has a post-inc in an | |
1760 | output operand. However, that exception can give rise to insns like | |
1761 | mov r3,(r3)+ | |
1762 | which is a famous insn on the PDP-11 where the value of r3 used as the | |
5089e22e | 1763 | source was model-dependent. Avoid this sort of thing. */ |
230d793d RS |
1764 | |
1765 | #if 0 | |
1766 | if (!(GET_CODE (PATTERN (i3)) == SET | |
1767 | && GET_CODE (SET_SRC (PATTERN (i3))) == REG | |
1768 | && GET_CODE (SET_DEST (PATTERN (i3))) == MEM | |
1769 | && (GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_INC | |
1770 | || GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_DEC))) | |
1771 | /* It's not the exception. */ | |
1772 | #endif | |
1773 | #ifdef AUTO_INC_DEC | |
1774 | for (link = REG_NOTES (i3); link; link = XEXP (link, 1)) | |
1775 | if (REG_NOTE_KIND (link) == REG_INC | |
1776 | && (reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i2)) | |
1777 | || (i1 != 0 | |
1778 | && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i1))))) | |
1779 | { | |
1780 | undo_all (); | |
1781 | return 0; | |
1782 | } | |
1783 | #endif | |
1784 | ||
1785 | /* See if the SETs in I1 or I2 need to be kept around in the merged | |
1786 | instruction: whenever the value set there is still needed past I3. | |
1787 | For the SETs in I2, this is easy: we see if I2DEST dies or is set in I3. | |
1788 | ||
1789 | For the SET in I1, we have two cases: If I1 and I2 independently | |
1790 | feed into I3, the set in I1 needs to be kept around if I1DEST dies | |
1791 | or is set in I3. Otherwise (if I1 feeds I2 which feeds I3), the set | |
1792 | in I1 needs to be kept around unless I1DEST dies or is set in either | |
1793 | I2 or I3. We can distinguish these cases by seeing if I2SRC mentions | |
1794 | I1DEST. If so, we know I1 feeds into I2. */ | |
1795 | ||
1796 | added_sets_2 = ! dead_or_set_p (i3, i2dest); | |
1797 | ||
1798 | added_sets_1 | |
1799 | = i1 && ! (i1_feeds_i3 ? dead_or_set_p (i3, i1dest) | |
1800 | : (dead_or_set_p (i3, i1dest) || dead_or_set_p (i2, i1dest))); | |
1801 | ||
1802 | /* If the set in I2 needs to be kept around, we must make a copy of | |
1803 | PATTERN (I2), so that when we substitute I1SRC for I1DEST in | |
5089e22e | 1804 | PATTERN (I2), we are only substituting for the original I1DEST, not into |
230d793d RS |
1805 | an already-substituted copy. This also prevents making self-referential |
1806 | rtx. If I2 is a PARALLEL, we just need the piece that assigns I2SRC to | |
1807 | I2DEST. */ | |
1808 | ||
1809 | i2pat = (GET_CODE (PATTERN (i2)) == PARALLEL | |
38a448ca | 1810 | ? gen_rtx_SET (VOIDmode, i2dest, i2src) |
230d793d RS |
1811 | : PATTERN (i2)); |
1812 | ||
1813 | if (added_sets_2) | |
1814 | i2pat = copy_rtx (i2pat); | |
1815 | ||
1816 | combine_merges++; | |
1817 | ||
1818 | /* Substitute in the latest insn for the regs set by the earlier ones. */ | |
1819 | ||
1820 | maxreg = max_reg_num (); | |
1821 | ||
1822 | subst_insn = i3; | |
230d793d RS |
1823 | |
1824 | /* It is possible that the source of I2 or I1 may be performing an | |
1825 | unneeded operation, such as a ZERO_EXTEND of something that is known | |
1826 | to have the high part zero. Handle that case by letting subst look at | |
1827 | the innermost one of them. | |
1828 | ||
1829 | Another way to do this would be to have a function that tries to | |
1830 | simplify a single insn instead of merging two or more insns. We don't | |
1831 | do this because of the potential of infinite loops and because | |
1832 | of the potential extra memory required. However, doing it the way | |
1833 | we are is a bit of a kludge and doesn't catch all cases. | |
1834 | ||
1835 | But only do this if -fexpensive-optimizations since it slows things down | |
1836 | and doesn't usually win. */ | |
1837 | ||
1838 | if (flag_expensive_optimizations) | |
1839 | { | |
1840 | /* Pass pc_rtx so no substitutions are done, just simplifications. | |
1841 | The cases that we are interested in here do not involve the few | |
1842 | cases were is_replaced is checked. */ | |
1843 | if (i1) | |
d0ab8cd3 RK |
1844 | { |
1845 | subst_low_cuid = INSN_CUID (i1); | |
1846 | i1src = subst (i1src, pc_rtx, pc_rtx, 0, 0); | |
1847 | } | |
230d793d | 1848 | else |
d0ab8cd3 RK |
1849 | { |
1850 | subst_low_cuid = INSN_CUID (i2); | |
1851 | i2src = subst (i2src, pc_rtx, pc_rtx, 0, 0); | |
1852 | } | |
230d793d | 1853 | |
241cea85 | 1854 | undobuf.previous_undos = undobuf.undos; |
230d793d RS |
1855 | } |
1856 | ||
1857 | #ifndef HAVE_cc0 | |
1858 | /* Many machines that don't use CC0 have insns that can both perform an | |
1859 | arithmetic operation and set the condition code. These operations will | |
1860 | be represented as a PARALLEL with the first element of the vector | |
1861 | being a COMPARE of an arithmetic operation with the constant zero. | |
1862 | The second element of the vector will set some pseudo to the result | |
1863 | of the same arithmetic operation. If we simplify the COMPARE, we won't | |
1864 | match such a pattern and so will generate an extra insn. Here we test | |
1865 | for this case, where both the comparison and the operation result are | |
1866 | needed, and make the PARALLEL by just replacing I2DEST in I3SRC with | |
1867 | I2SRC. Later we will make the PARALLEL that contains I2. */ | |
1868 | ||
1869 | if (i1 == 0 && added_sets_2 && GET_CODE (PATTERN (i3)) == SET | |
1870 | && GET_CODE (SET_SRC (PATTERN (i3))) == COMPARE | |
1871 | && XEXP (SET_SRC (PATTERN (i3)), 1) == const0_rtx | |
1872 | && rtx_equal_p (XEXP (SET_SRC (PATTERN (i3)), 0), i2dest)) | |
1873 | { | |
081f5e7e | 1874 | #ifdef EXTRA_CC_MODES |
230d793d RS |
1875 | rtx *cc_use; |
1876 | enum machine_mode compare_mode; | |
081f5e7e | 1877 | #endif |
230d793d RS |
1878 | |
1879 | newpat = PATTERN (i3); | |
1880 | SUBST (XEXP (SET_SRC (newpat), 0), i2src); | |
1881 | ||
1882 | i2_is_used = 1; | |
1883 | ||
1884 | #ifdef EXTRA_CC_MODES | |
1885 | /* See if a COMPARE with the operand we substituted in should be done | |
1886 | with the mode that is currently being used. If not, do the same | |
1887 | processing we do in `subst' for a SET; namely, if the destination | |
1888 | is used only once, try to replace it with a register of the proper | |
1889 | mode and also replace the COMPARE. */ | |
1890 | if (undobuf.other_insn == 0 | |
1891 | && (cc_use = find_single_use (SET_DEST (newpat), i3, | |
1892 | &undobuf.other_insn)) | |
77fa0940 RK |
1893 | && ((compare_mode = SELECT_CC_MODE (GET_CODE (*cc_use), |
1894 | i2src, const0_rtx)) | |
230d793d RS |
1895 | != GET_MODE (SET_DEST (newpat)))) |
1896 | { | |
770ae6cc | 1897 | unsigned int regno = REGNO (SET_DEST (newpat)); |
38a448ca | 1898 | rtx new_dest = gen_rtx_REG (compare_mode, regno); |
230d793d RS |
1899 | |
1900 | if (regno < FIRST_PSEUDO_REGISTER | |
b1f21e0a | 1901 | || (REG_N_SETS (regno) == 1 && ! added_sets_2 |
230d793d RS |
1902 | && ! REG_USERVAR_P (SET_DEST (newpat)))) |
1903 | { | |
1904 | if (regno >= FIRST_PSEUDO_REGISTER) | |
1905 | SUBST (regno_reg_rtx[regno], new_dest); | |
1906 | ||
1907 | SUBST (SET_DEST (newpat), new_dest); | |
1908 | SUBST (XEXP (*cc_use, 0), new_dest); | |
1909 | SUBST (SET_SRC (newpat), | |
1910 | gen_rtx_combine (COMPARE, compare_mode, | |
1911 | i2src, const0_rtx)); | |
1912 | } | |
1913 | else | |
1914 | undobuf.other_insn = 0; | |
1915 | } | |
663522cb | 1916 | #endif |
230d793d RS |
1917 | } |
1918 | else | |
1919 | #endif | |
1920 | { | |
1921 | n_occurrences = 0; /* `subst' counts here */ | |
1922 | ||
1923 | /* If I1 feeds into I2 (not into I3) and I1DEST is in I1SRC, we | |
1924 | need to make a unique copy of I2SRC each time we substitute it | |
1925 | to avoid self-referential rtl. */ | |
1926 | ||
d0ab8cd3 | 1927 | subst_low_cuid = INSN_CUID (i2); |
230d793d RS |
1928 | newpat = subst (PATTERN (i3), i2dest, i2src, 0, |
1929 | ! i1_feeds_i3 && i1dest_in_i1src); | |
241cea85 | 1930 | undobuf.previous_undos = undobuf.undos; |
230d793d RS |
1931 | |
1932 | /* Record whether i2's body now appears within i3's body. */ | |
1933 | i2_is_used = n_occurrences; | |
1934 | } | |
1935 | ||
1936 | /* If we already got a failure, don't try to do more. Otherwise, | |
1937 | try to substitute in I1 if we have it. */ | |
1938 | ||
1939 | if (i1 && GET_CODE (newpat) != CLOBBER) | |
1940 | { | |
1941 | /* Before we can do this substitution, we must redo the test done | |
1942 | above (see detailed comments there) that ensures that I1DEST | |
0f41302f | 1943 | isn't mentioned in any SETs in NEWPAT that are field assignments. */ |
230d793d | 1944 | |
5f4f0e22 CH |
1945 | if (! combinable_i3pat (NULL_RTX, &newpat, i1dest, NULL_RTX, |
1946 | 0, NULL_PTR)) | |
230d793d RS |
1947 | { |
1948 | undo_all (); | |
1949 | return 0; | |
1950 | } | |
1951 | ||
1952 | n_occurrences = 0; | |
d0ab8cd3 | 1953 | subst_low_cuid = INSN_CUID (i1); |
230d793d | 1954 | newpat = subst (newpat, i1dest, i1src, 0, 0); |
241cea85 | 1955 | undobuf.previous_undos = undobuf.undos; |
230d793d RS |
1956 | } |
1957 | ||
916f14f1 RK |
1958 | /* Fail if an autoincrement side-effect has been duplicated. Be careful |
1959 | to count all the ways that I2SRC and I1SRC can be used. */ | |
5f4f0e22 | 1960 | if ((FIND_REG_INC_NOTE (i2, NULL_RTX) != 0 |
916f14f1 | 1961 | && i2_is_used + added_sets_2 > 1) |
5f4f0e22 | 1962 | || (i1 != 0 && FIND_REG_INC_NOTE (i1, NULL_RTX) != 0 |
916f14f1 RK |
1963 | && (n_occurrences + added_sets_1 + (added_sets_2 && ! i1_feeds_i3) |
1964 | > 1)) | |
230d793d RS |
1965 | /* Fail if we tried to make a new register (we used to abort, but there's |
1966 | really no reason to). */ | |
1967 | || max_reg_num () != maxreg | |
1968 | /* Fail if we couldn't do something and have a CLOBBER. */ | |
df7d75de RK |
1969 | || GET_CODE (newpat) == CLOBBER |
1970 | /* Fail if this new pattern is a MULT and we didn't have one before | |
1971 | at the outer level. */ | |
1972 | || (GET_CODE (newpat) == SET && GET_CODE (SET_SRC (newpat)) == MULT | |
1973 | && ! have_mult)) | |
230d793d RS |
1974 | { |
1975 | undo_all (); | |
1976 | return 0; | |
1977 | } | |
1978 | ||
1979 | /* If the actions of the earlier insns must be kept | |
1980 | in addition to substituting them into the latest one, | |
1981 | we must make a new PARALLEL for the latest insn | |
1982 | to hold additional the SETs. */ | |
1983 | ||
1984 | if (added_sets_1 || added_sets_2) | |
1985 | { | |
1986 | combine_extras++; | |
1987 | ||
1988 | if (GET_CODE (newpat) == PARALLEL) | |
1989 | { | |
1990 | rtvec old = XVEC (newpat, 0); | |
1991 | total_sets = XVECLEN (newpat, 0) + added_sets_1 + added_sets_2; | |
38a448ca | 1992 | newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_sets)); |
59888de2 | 1993 | bcopy ((char *) &old->elem[0], (char *) XVEC (newpat, 0)->elem, |
230d793d RS |
1994 | sizeof (old->elem[0]) * old->num_elem); |
1995 | } | |
1996 | else | |
1997 | { | |
1998 | rtx old = newpat; | |
1999 | total_sets = 1 + added_sets_1 + added_sets_2; | |
38a448ca | 2000 | newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_sets)); |
230d793d RS |
2001 | XVECEXP (newpat, 0, 0) = old; |
2002 | } | |
2003 | ||
2004 | if (added_sets_1) | |
2005 | XVECEXP (newpat, 0, --total_sets) | |
2006 | = (GET_CODE (PATTERN (i1)) == PARALLEL | |
38a448ca | 2007 | ? gen_rtx_SET (VOIDmode, i1dest, i1src) : PATTERN (i1)); |
230d793d RS |
2008 | |
2009 | if (added_sets_2) | |
c5c76735 JL |
2010 | { |
2011 | /* If there is no I1, use I2's body as is. We used to also not do | |
2012 | the subst call below if I2 was substituted into I3, | |
2013 | but that could lose a simplification. */ | |
2014 | if (i1 == 0) | |
2015 | XVECEXP (newpat, 0, --total_sets) = i2pat; | |
2016 | else | |
2017 | /* See comment where i2pat is assigned. */ | |
2018 | XVECEXP (newpat, 0, --total_sets) | |
2019 | = subst (i2pat, i1dest, i1src, 0, 0); | |
2020 | } | |
230d793d RS |
2021 | } |
2022 | ||
2023 | /* We come here when we are replacing a destination in I2 with the | |
2024 | destination of I3. */ | |
2025 | validate_replacement: | |
2026 | ||
6e25d159 RK |
2027 | /* Note which hard regs this insn has as inputs. */ |
2028 | mark_used_regs_combine (newpat); | |
2029 | ||
230d793d | 2030 | /* Is the result of combination a valid instruction? */ |
8e2f6e35 | 2031 | insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes); |
230d793d RS |
2032 | |
2033 | /* If the result isn't valid, see if it is a PARALLEL of two SETs where | |
2034 | the second SET's destination is a register that is unused. In that case, | |
2035 | we just need the first SET. This can occur when simplifying a divmod | |
2036 | insn. We *must* test for this case here because the code below that | |
2037 | splits two independent SETs doesn't handle this case correctly when it | |
2038 | updates the register status. Also check the case where the first | |
2039 | SET's destination is unused. That would not cause incorrect code, but | |
2040 | does cause an unneeded insn to remain. */ | |
2041 | ||
2042 | if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL | |
2043 | && XVECLEN (newpat, 0) == 2 | |
2044 | && GET_CODE (XVECEXP (newpat, 0, 0)) == SET | |
2045 | && GET_CODE (XVECEXP (newpat, 0, 1)) == SET | |
2046 | && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == REG | |
2047 | && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 1))) | |
2048 | && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 1))) | |
2049 | && asm_noperands (newpat) < 0) | |
2050 | { | |
2051 | newpat = XVECEXP (newpat, 0, 0); | |
8e2f6e35 | 2052 | insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes); |
230d793d RS |
2053 | } |
2054 | ||
2055 | else if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL | |
2056 | && XVECLEN (newpat, 0) == 2 | |
2057 | && GET_CODE (XVECEXP (newpat, 0, 0)) == SET | |
2058 | && GET_CODE (XVECEXP (newpat, 0, 1)) == SET | |
2059 | && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) == REG | |
2060 | && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 0))) | |
2061 | && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 0))) | |
2062 | && asm_noperands (newpat) < 0) | |
2063 | { | |
2064 | newpat = XVECEXP (newpat, 0, 1); | |
8e2f6e35 | 2065 | insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes); |
230d793d RS |
2066 | } |
2067 | ||
2068 | /* If we were combining three insns and the result is a simple SET | |
2069 | with no ASM_OPERANDS that wasn't recognized, try to split it into two | |
663522cb | 2070 | insns. There are two ways to do this. It can be split using a |
916f14f1 RK |
2071 | machine-specific method (like when you have an addition of a large |
2072 | constant) or by combine in the function find_split_point. */ | |
2073 | ||
230d793d RS |
2074 | if (i1 && insn_code_number < 0 && GET_CODE (newpat) == SET |
2075 | && asm_noperands (newpat) < 0) | |
2076 | { | |
916f14f1 | 2077 | rtx m_split, *split; |
42495ca0 | 2078 | rtx ni2dest = i2dest; |
916f14f1 RK |
2079 | |
2080 | /* See if the MD file can split NEWPAT. If it can't, see if letting it | |
42495ca0 RK |
2081 | use I2DEST as a scratch register will help. In the latter case, |
2082 | convert I2DEST to the mode of the source of NEWPAT if we can. */ | |
916f14f1 RK |
2083 | |
2084 | m_split = split_insns (newpat, i3); | |
a70c61d9 JW |
2085 | |
2086 | /* We can only use I2DEST as a scratch reg if it doesn't overlap any | |
2087 | inputs of NEWPAT. */ | |
2088 | ||
2089 | /* ??? If I2DEST is not safe, and I1DEST exists, then it would be | |
2090 | possible to try that as a scratch reg. This would require adding | |
2091 | more code to make it work though. */ | |
2092 | ||
2093 | if (m_split == 0 && ! reg_overlap_mentioned_p (ni2dest, newpat)) | |
42495ca0 RK |
2094 | { |
2095 | /* If I2DEST is a hard register or the only use of a pseudo, | |
2096 | we can change its mode. */ | |
2097 | if (GET_MODE (SET_DEST (newpat)) != GET_MODE (i2dest) | |
02f4ada4 | 2098 | && GET_MODE (SET_DEST (newpat)) != VOIDmode |
60654f77 | 2099 | && GET_CODE (i2dest) == REG |
42495ca0 | 2100 | && (REGNO (i2dest) < FIRST_PSEUDO_REGISTER |
b1f21e0a | 2101 | || (REG_N_SETS (REGNO (i2dest)) == 1 && ! added_sets_2 |
42495ca0 | 2102 | && ! REG_USERVAR_P (i2dest)))) |
38a448ca | 2103 | ni2dest = gen_rtx_REG (GET_MODE (SET_DEST (newpat)), |
c5c76735 JL |
2104 | REGNO (i2dest)); |
2105 | ||
2106 | m_split = split_insns (gen_rtx_PARALLEL | |
2107 | (VOIDmode, | |
2108 | gen_rtvec (2, newpat, | |
2109 | gen_rtx_CLOBBER (VOIDmode, | |
2110 | ni2dest))), | |
2111 | i3); | |
42495ca0 | 2112 | } |
916f14f1 RK |
2113 | |
2114 | if (m_split && GET_CODE (m_split) == SEQUENCE | |
3f508eca RK |
2115 | && XVECLEN (m_split, 0) == 2 |
2116 | && (next_real_insn (i2) == i3 | |
2117 | || ! use_crosses_set_p (PATTERN (XVECEXP (m_split, 0, 0)), | |
2118 | INSN_CUID (i2)))) | |
916f14f1 | 2119 | { |
1a26b032 | 2120 | rtx i2set, i3set; |
d0ab8cd3 | 2121 | rtx newi3pat = PATTERN (XVECEXP (m_split, 0, 1)); |
916f14f1 | 2122 | newi2pat = PATTERN (XVECEXP (m_split, 0, 0)); |
916f14f1 | 2123 | |
e4ba89be RK |
2124 | i3set = single_set (XVECEXP (m_split, 0, 1)); |
2125 | i2set = single_set (XVECEXP (m_split, 0, 0)); | |
1a26b032 | 2126 | |
42495ca0 RK |
2127 | /* In case we changed the mode of I2DEST, replace it in the |
2128 | pseudo-register table here. We can't do it above in case this | |
2129 | code doesn't get executed and we do a split the other way. */ | |
2130 | ||
2131 | if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER) | |
2132 | SUBST (regno_reg_rtx[REGNO (i2dest)], ni2dest); | |
2133 | ||
8e2f6e35 | 2134 | i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes); |
1a26b032 RK |
2135 | |
2136 | /* If I2 or I3 has multiple SETs, we won't know how to track | |
9cc96794 RK |
2137 | register status, so don't use these insns. If I2's destination |
2138 | is used between I2 and I3, we also can't use these insns. */ | |
1a26b032 | 2139 | |
9cc96794 RK |
2140 | if (i2_code_number >= 0 && i2set && i3set |
2141 | && (next_real_insn (i2) == i3 | |
2142 | || ! reg_used_between_p (SET_DEST (i2set), i2, i3))) | |
8e2f6e35 BS |
2143 | insn_code_number = recog_for_combine (&newi3pat, i3, |
2144 | &new_i3_notes); | |
d0ab8cd3 RK |
2145 | if (insn_code_number >= 0) |
2146 | newpat = newi3pat; | |
2147 | ||
c767f54b | 2148 | /* It is possible that both insns now set the destination of I3. |
22609cbf | 2149 | If so, we must show an extra use of it. */ |
c767f54b | 2150 | |
393de53f RK |
2151 | if (insn_code_number >= 0) |
2152 | { | |
2153 | rtx new_i3_dest = SET_DEST (i3set); | |
2154 | rtx new_i2_dest = SET_DEST (i2set); | |
2155 | ||
2156 | while (GET_CODE (new_i3_dest) == ZERO_EXTRACT | |
2157 | || GET_CODE (new_i3_dest) == STRICT_LOW_PART | |
2158 | || GET_CODE (new_i3_dest) == SUBREG) | |
2159 | new_i3_dest = XEXP (new_i3_dest, 0); | |
2160 | ||
d4096689 RK |
2161 | while (GET_CODE (new_i2_dest) == ZERO_EXTRACT |
2162 | || GET_CODE (new_i2_dest) == STRICT_LOW_PART | |
2163 | || GET_CODE (new_i2_dest) == SUBREG) | |
2164 | new_i2_dest = XEXP (new_i2_dest, 0); | |
2165 | ||
393de53f RK |
2166 | if (GET_CODE (new_i3_dest) == REG |
2167 | && GET_CODE (new_i2_dest) == REG | |
2168 | && REGNO (new_i3_dest) == REGNO (new_i2_dest)) | |
b1f21e0a | 2169 | REG_N_SETS (REGNO (new_i2_dest))++; |
393de53f | 2170 | } |
916f14f1 | 2171 | } |
230d793d RS |
2172 | |
2173 | /* If we can split it and use I2DEST, go ahead and see if that | |
2174 | helps things be recognized. Verify that none of the registers | |
2175 | are set between I2 and I3. */ | |
d0ab8cd3 | 2176 | if (insn_code_number < 0 && (split = find_split_point (&newpat, i3)) != 0 |
230d793d RS |
2177 | #ifdef HAVE_cc0 |
2178 | && GET_CODE (i2dest) == REG | |
2179 | #endif | |
2180 | /* We need I2DEST in the proper mode. If it is a hard register | |
2181 | or the only use of a pseudo, we can change its mode. */ | |
2182 | && (GET_MODE (*split) == GET_MODE (i2dest) | |
2183 | || GET_MODE (*split) == VOIDmode | |
2184 | || REGNO (i2dest) < FIRST_PSEUDO_REGISTER | |
b1f21e0a | 2185 | || (REG_N_SETS (REGNO (i2dest)) == 1 && ! added_sets_2 |
230d793d RS |
2186 | && ! REG_USERVAR_P (i2dest))) |
2187 | && (next_real_insn (i2) == i3 | |
2188 | || ! use_crosses_set_p (*split, INSN_CUID (i2))) | |
2189 | /* We can't overwrite I2DEST if its value is still used by | |
2190 | NEWPAT. */ | |
2191 | && ! reg_referenced_p (i2dest, newpat)) | |
2192 | { | |
2193 | rtx newdest = i2dest; | |
df7d75de RK |
2194 | enum rtx_code split_code = GET_CODE (*split); |
2195 | enum machine_mode split_mode = GET_MODE (*split); | |
230d793d RS |
2196 | |
2197 | /* Get NEWDEST as a register in the proper mode. We have already | |
2198 | validated that we can do this. */ | |
df7d75de | 2199 | if (GET_MODE (i2dest) != split_mode && split_mode != VOIDmode) |
230d793d | 2200 | { |
38a448ca | 2201 | newdest = gen_rtx_REG (split_mode, REGNO (i2dest)); |
230d793d RS |
2202 | |
2203 | if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER) | |
2204 | SUBST (regno_reg_rtx[REGNO (i2dest)], newdest); | |
2205 | } | |
2206 | ||
2207 | /* If *SPLIT is a (mult FOO (const_int pow2)), convert it to | |
2208 | an ASHIFT. This can occur if it was inside a PLUS and hence | |
2209 | appeared to be a memory address. This is a kludge. */ | |
df7d75de | 2210 | if (split_code == MULT |
230d793d RS |
2211 | && GET_CODE (XEXP (*split, 1)) == CONST_INT |
2212 | && (i = exact_log2 (INTVAL (XEXP (*split, 1)))) >= 0) | |
1dc8a823 JW |
2213 | { |
2214 | SUBST (*split, gen_rtx_combine (ASHIFT, split_mode, | |
2215 | XEXP (*split, 0), GEN_INT (i))); | |
2216 | /* Update split_code because we may not have a multiply | |
2217 | anymore. */ | |
2218 | split_code = GET_CODE (*split); | |
2219 | } | |
230d793d RS |
2220 | |
2221 | #ifdef INSN_SCHEDULING | |
2222 | /* If *SPLIT is a paradoxical SUBREG, when we split it, it should | |
2223 | be written as a ZERO_EXTEND. */ | |
df7d75de RK |
2224 | if (split_code == SUBREG && GET_CODE (SUBREG_REG (*split)) == MEM) |
2225 | SUBST (*split, gen_rtx_combine (ZERO_EXTEND, split_mode, | |
230d793d RS |
2226 | XEXP (*split, 0))); |
2227 | #endif | |
2228 | ||
2229 | newi2pat = gen_rtx_combine (SET, VOIDmode, newdest, *split); | |
2230 | SUBST (*split, newdest); | |
8e2f6e35 | 2231 | i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes); |
df7d75de RK |
2232 | |
2233 | /* If the split point was a MULT and we didn't have one before, | |
2234 | don't use one now. */ | |
2235 | if (i2_code_number >= 0 && ! (split_code == MULT && ! have_mult)) | |
8e2f6e35 | 2236 | insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes); |
230d793d RS |
2237 | } |
2238 | } | |
2239 | ||
2240 | /* Check for a case where we loaded from memory in a narrow mode and | |
2241 | then sign extended it, but we need both registers. In that case, | |
2242 | we have a PARALLEL with both loads from the same memory location. | |
2243 | We can split this into a load from memory followed by a register-register | |
2244 | copy. This saves at least one insn, more if register allocation can | |
f0343c74 RK |
2245 | eliminate the copy. |
2246 | ||
2247 | We cannot do this if the destination of the second assignment is | |
2248 | a register that we have already assumed is zero-extended. Similarly | |
2249 | for a SUBREG of such a register. */ | |
230d793d RS |
2250 | |
2251 | else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0 | |
2252 | && GET_CODE (newpat) == PARALLEL | |
2253 | && XVECLEN (newpat, 0) == 2 | |
2254 | && GET_CODE (XVECEXP (newpat, 0, 0)) == SET | |
2255 | && GET_CODE (SET_SRC (XVECEXP (newpat, 0, 0))) == SIGN_EXTEND | |
2256 | && GET_CODE (XVECEXP (newpat, 0, 1)) == SET | |
2257 | && rtx_equal_p (SET_SRC (XVECEXP (newpat, 0, 1)), | |
2258 | XEXP (SET_SRC (XVECEXP (newpat, 0, 0)), 0)) | |
2259 | && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)), | |
2260 | INSN_CUID (i2)) | |
2261 | && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT | |
2262 | && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART | |
f0343c74 RK |
2263 | && ! (temp = SET_DEST (XVECEXP (newpat, 0, 1)), |
2264 | (GET_CODE (temp) == REG | |
2265 | && reg_nonzero_bits[REGNO (temp)] != 0 | |
2266 | && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD | |
2267 | && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT | |
2268 | && (reg_nonzero_bits[REGNO (temp)] | |
2269 | != GET_MODE_MASK (word_mode)))) | |
2270 | && ! (GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == SUBREG | |
2271 | && (temp = SUBREG_REG (SET_DEST (XVECEXP (newpat, 0, 1))), | |
2272 | (GET_CODE (temp) == REG | |
2273 | && reg_nonzero_bits[REGNO (temp)] != 0 | |
2274 | && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD | |
2275 | && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT | |
2276 | && (reg_nonzero_bits[REGNO (temp)] | |
2277 | != GET_MODE_MASK (word_mode))))) | |
230d793d RS |
2278 | && ! reg_overlap_mentioned_p (SET_DEST (XVECEXP (newpat, 0, 1)), |
2279 | SET_SRC (XVECEXP (newpat, 0, 1))) | |
2280 | && ! find_reg_note (i3, REG_UNUSED, | |
2281 | SET_DEST (XVECEXP (newpat, 0, 0)))) | |
2282 | { | |
472fbdd1 RK |
2283 | rtx ni2dest; |
2284 | ||
230d793d | 2285 | newi2pat = XVECEXP (newpat, 0, 0); |
472fbdd1 | 2286 | ni2dest = SET_DEST (XVECEXP (newpat, 0, 0)); |
230d793d RS |
2287 | newpat = XVECEXP (newpat, 0, 1); |
2288 | SUBST (SET_SRC (newpat), | |
472fbdd1 | 2289 | gen_lowpart_for_combine (GET_MODE (SET_SRC (newpat)), ni2dest)); |
8e2f6e35 | 2290 | i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes); |
a29ca9db | 2291 | |
230d793d | 2292 | if (i2_code_number >= 0) |
8e2f6e35 | 2293 | insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes); |
5089e22e RS |
2294 | |
2295 | if (insn_code_number >= 0) | |
2296 | { | |
2297 | rtx insn; | |
2298 | rtx link; | |
2299 | ||
2300 | /* If we will be able to accept this, we have made a change to the | |
2301 | destination of I3. This can invalidate a LOG_LINKS pointing | |
2302 | to I3. No other part of combine.c makes such a transformation. | |
2303 | ||
2304 | The new I3 will have a destination that was previously the | |
2305 | destination of I1 or I2 and which was used in i2 or I3. Call | |
2306 | distribute_links to make a LOG_LINK from the next use of | |
2307 | that destination. */ | |
2308 | ||
2309 | PATTERN (i3) = newpat; | |
38a448ca | 2310 | distribute_links (gen_rtx_INSN_LIST (VOIDmode, i3, NULL_RTX)); |
5089e22e RS |
2311 | |
2312 | /* I3 now uses what used to be its destination and which is | |
2313 | now I2's destination. That means we need a LOG_LINK from | |
2314 | I3 to I2. But we used to have one, so we still will. | |
2315 | ||
2316 | However, some later insn might be using I2's dest and have | |
2317 | a LOG_LINK pointing at I3. We must remove this link. | |
2318 | The simplest way to remove the link is to point it at I1, | |
2319 | which we know will be a NOTE. */ | |
2320 | ||
2321 | for (insn = NEXT_INSN (i3); | |
0d4d42c3 | 2322 | insn && (this_basic_block == n_basic_blocks - 1 |
3b413743 | 2323 | || insn != BLOCK_HEAD (this_basic_block + 1)); |
5089e22e RS |
2324 | insn = NEXT_INSN (insn)) |
2325 | { | |
2326 | if (GET_RTX_CLASS (GET_CODE (insn)) == 'i' | |
472fbdd1 | 2327 | && reg_referenced_p (ni2dest, PATTERN (insn))) |
5089e22e RS |
2328 | { |
2329 | for (link = LOG_LINKS (insn); link; | |
2330 | link = XEXP (link, 1)) | |
2331 | if (XEXP (link, 0) == i3) | |
2332 | XEXP (link, 0) = i1; | |
2333 | ||
2334 | break; | |
2335 | } | |
2336 | } | |
2337 | } | |
230d793d | 2338 | } |
663522cb | 2339 | |
230d793d RS |
2340 | /* Similarly, check for a case where we have a PARALLEL of two independent |
2341 | SETs but we started with three insns. In this case, we can do the sets | |
2342 | as two separate insns. This case occurs when some SET allows two | |
2343 | other insns to combine, but the destination of that SET is still live. */ | |
2344 | ||
2345 | else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0 | |
2346 | && GET_CODE (newpat) == PARALLEL | |
2347 | && XVECLEN (newpat, 0) == 2 | |
2348 | && GET_CODE (XVECEXP (newpat, 0, 0)) == SET | |
2349 | && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != ZERO_EXTRACT | |
2350 | && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != STRICT_LOW_PART | |
2351 | && GET_CODE (XVECEXP (newpat, 0, 1)) == SET | |
2352 | && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT | |
2353 | && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART | |
2354 | && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)), | |
2355 | INSN_CUID (i2)) | |
2356 | /* Don't pass sets with (USE (MEM ...)) dests to the following. */ | |
2357 | && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != USE | |
2358 | && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != USE | |
2359 | && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 1)), | |
2360 | XVECEXP (newpat, 0, 0)) | |
2361 | && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 0)), | |
14a774a9 RK |
2362 | XVECEXP (newpat, 0, 1)) |
2363 | && ! (contains_muldiv (SET_SRC (XVECEXP (newpat, 0, 0))) | |
2364 | && contains_muldiv (SET_SRC (XVECEXP (newpat, 0, 1))))) | |
230d793d | 2365 | { |
e9a25f70 JL |
2366 | /* Normally, it doesn't matter which of the two is done first, |
2367 | but it does if one references cc0. In that case, it has to | |
2368 | be first. */ | |
2369 | #ifdef HAVE_cc0 | |
2370 | if (reg_referenced_p (cc0_rtx, XVECEXP (newpat, 0, 0))) | |
2371 | { | |
2372 | newi2pat = XVECEXP (newpat, 0, 0); | |
2373 | newpat = XVECEXP (newpat, 0, 1); | |
2374 | } | |
2375 | else | |
2376 | #endif | |
2377 | { | |
2378 | newi2pat = XVECEXP (newpat, 0, 1); | |
2379 | newpat = XVECEXP (newpat, 0, 0); | |
2380 | } | |
230d793d | 2381 | |
8e2f6e35 | 2382 | i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes); |
a29ca9db | 2383 | |
230d793d | 2384 | if (i2_code_number >= 0) |
8e2f6e35 | 2385 | insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes); |
230d793d RS |
2386 | } |
2387 | ||
2388 | /* If it still isn't recognized, fail and change things back the way they | |
2389 | were. */ | |
2390 | if ((insn_code_number < 0 | |
2391 | /* Is the result a reasonable ASM_OPERANDS? */ | |
2392 | && (! check_asm_operands (newpat) || added_sets_1 || added_sets_2))) | |
2393 | { | |
2394 | undo_all (); | |
2395 | return 0; | |
2396 | } | |
2397 | ||
2398 | /* If we had to change another insn, make sure it is valid also. */ | |
2399 | if (undobuf.other_insn) | |
2400 | { | |
230d793d RS |
2401 | rtx other_pat = PATTERN (undobuf.other_insn); |
2402 | rtx new_other_notes; | |
2403 | rtx note, next; | |
2404 | ||
6e25d159 RK |
2405 | CLEAR_HARD_REG_SET (newpat_used_regs); |
2406 | ||
8e2f6e35 BS |
2407 | other_code_number = recog_for_combine (&other_pat, undobuf.other_insn, |
2408 | &new_other_notes); | |
230d793d RS |
2409 | |
2410 | if (other_code_number < 0 && ! check_asm_operands (other_pat)) | |
2411 | { | |
2412 | undo_all (); | |
2413 | return 0; | |
2414 | } | |
2415 | ||
2416 | PATTERN (undobuf.other_insn) = other_pat; | |
2417 | ||
2418 | /* If any of the notes in OTHER_INSN were REG_UNUSED, ensure that they | |
2419 | are still valid. Then add any non-duplicate notes added by | |
2420 | recog_for_combine. */ | |
2421 | for (note = REG_NOTES (undobuf.other_insn); note; note = next) | |
2422 | { | |
2423 | next = XEXP (note, 1); | |
2424 | ||
2425 | if (REG_NOTE_KIND (note) == REG_UNUSED | |
2426 | && ! reg_set_p (XEXP (note, 0), PATTERN (undobuf.other_insn))) | |
1a26b032 RK |
2427 | { |
2428 | if (GET_CODE (XEXP (note, 0)) == REG) | |
b1f21e0a | 2429 | REG_N_DEATHS (REGNO (XEXP (note, 0)))--; |
1a26b032 RK |
2430 | |
2431 | remove_note (undobuf.other_insn, note); | |
2432 | } | |
230d793d RS |
2433 | } |
2434 | ||
1a26b032 RK |
2435 | for (note = new_other_notes; note; note = XEXP (note, 1)) |
2436 | if (GET_CODE (XEXP (note, 0)) == REG) | |
b1f21e0a | 2437 | REG_N_DEATHS (REGNO (XEXP (note, 0)))++; |
1a26b032 | 2438 | |
230d793d | 2439 | distribute_notes (new_other_notes, undobuf.other_insn, |
5f4f0e22 | 2440 | undobuf.other_insn, NULL_RTX, NULL_RTX, NULL_RTX); |
230d793d | 2441 | } |
5ef17dd2 | 2442 | #ifdef HAVE_cc0 |
663522cb | 2443 | /* If I2 is the setter CC0 and I3 is the user CC0 then check whether |
5ef17dd2 CC |
2444 | they are adjacent to each other or not. */ |
2445 | { | |
2446 | rtx p = prev_nonnote_insn (i3); | |
663522cb KH |
2447 | if (p && p != i2 && GET_CODE (p) == INSN && newi2pat |
2448 | && sets_cc0_p (newi2pat)) | |
5ef17dd2 | 2449 | { |
663522cb KH |
2450 | undo_all (); |
2451 | return 0; | |
5ef17dd2 | 2452 | } |
663522cb KH |
2453 | } |
2454 | #endif | |
230d793d | 2455 | |
663522cb | 2456 | /* We now know that we can do this combination. Merge the insns and |
230d793d RS |
2457 | update the status of registers and LOG_LINKS. */ |
2458 | ||
2459 | { | |
2460 | rtx i3notes, i2notes, i1notes = 0; | |
2461 | rtx i3links, i2links, i1links = 0; | |
2462 | rtx midnotes = 0; | |
770ae6cc | 2463 | unsigned int regno; |
ff3467a9 JW |
2464 | /* Compute which registers we expect to eliminate. newi2pat may be setting |
2465 | either i3dest or i2dest, so we must check it. Also, i1dest may be the | |
2466 | same as i3dest, in which case newi2pat may be setting i1dest. */ | |
2467 | rtx elim_i2 = ((newi2pat && reg_set_p (i2dest, newi2pat)) | |
2468 | || i2dest_in_i2src || i2dest_in_i1src | |
230d793d | 2469 | ? 0 : i2dest); |
ff3467a9 JW |
2470 | rtx elim_i1 = (i1 == 0 || i1dest_in_i1src |
2471 | || (newi2pat && reg_set_p (i1dest, newi2pat)) | |
2472 | ? 0 : i1dest); | |
230d793d RS |
2473 | |
2474 | /* Get the old REG_NOTES and LOG_LINKS from all our insns and | |
2475 | clear them. */ | |
2476 | i3notes = REG_NOTES (i3), i3links = LOG_LINKS (i3); | |
2477 | i2notes = REG_NOTES (i2), i2links = LOG_LINKS (i2); | |
2478 | if (i1) | |
2479 | i1notes = REG_NOTES (i1), i1links = LOG_LINKS (i1); | |
2480 | ||
2481 | /* Ensure that we do not have something that should not be shared but | |
2482 | occurs multiple times in the new insns. Check this by first | |
5089e22e | 2483 | resetting all the `used' flags and then copying anything is shared. */ |
230d793d RS |
2484 | |
2485 | reset_used_flags (i3notes); | |
2486 | reset_used_flags (i2notes); | |
2487 | reset_used_flags (i1notes); | |
2488 | reset_used_flags (newpat); | |
2489 | reset_used_flags (newi2pat); | |
2490 | if (undobuf.other_insn) | |
2491 | reset_used_flags (PATTERN (undobuf.other_insn)); | |
2492 | ||
2493 | i3notes = copy_rtx_if_shared (i3notes); | |
2494 | i2notes = copy_rtx_if_shared (i2notes); | |
2495 | i1notes = copy_rtx_if_shared (i1notes); | |
2496 | newpat = copy_rtx_if_shared (newpat); | |
2497 | newi2pat = copy_rtx_if_shared (newi2pat); | |
2498 | if (undobuf.other_insn) | |
2499 | reset_used_flags (PATTERN (undobuf.other_insn)); | |
2500 | ||
2501 | INSN_CODE (i3) = insn_code_number; | |
2502 | PATTERN (i3) = newpat; | |
2503 | if (undobuf.other_insn) | |
2504 | INSN_CODE (undobuf.other_insn) = other_code_number; | |
2505 | ||
2506 | /* We had one special case above where I2 had more than one set and | |
2507 | we replaced a destination of one of those sets with the destination | |
2508 | of I3. In that case, we have to update LOG_LINKS of insns later | |
176c9e6b JW |
2509 | in this basic block. Note that this (expensive) case is rare. |
2510 | ||
2511 | Also, in this case, we must pretend that all REG_NOTEs for I2 | |
2512 | actually came from I3, so that REG_UNUSED notes from I2 will be | |
2513 | properly handled. */ | |
2514 | ||
f85cf636 | 2515 | if (i3_subst_into_i2 && GET_CODE (PATTERN (i2)) == PARALLEL) |
176c9e6b | 2516 | { |
e6770d3c R |
2517 | if (GET_CODE (PATTERN (i2)) == PARALLEL) |
2518 | { | |
2519 | for (i = 0; i < XVECLEN (PATTERN (i2), 0); i++) | |
2520 | if (GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, i))) == REG | |
2521 | && SET_DEST (XVECEXP (PATTERN (i2), 0, i)) != i2dest | |
2522 | && ! find_reg_note (i2, REG_UNUSED, | |
2523 | SET_DEST (XVECEXP (PATTERN (i2), 0, i)))) | |
2524 | for (temp = NEXT_INSN (i2); | |
2525 | temp && (this_basic_block == n_basic_blocks - 1 | |
2526 | || BLOCK_HEAD (this_basic_block) != temp); | |
2527 | temp = NEXT_INSN (temp)) | |
2528 | if (temp != i3 && GET_RTX_CLASS (GET_CODE (temp)) == 'i') | |
2529 | for (link = LOG_LINKS (temp); link; link = XEXP (link, 1)) | |
2530 | if (XEXP (link, 0) == i2) | |
2531 | XEXP (link, 0) = i3; | |
2532 | } | |
176c9e6b JW |
2533 | |
2534 | if (i3notes) | |
2535 | { | |
2536 | rtx link = i3notes; | |
2537 | while (XEXP (link, 1)) | |
2538 | link = XEXP (link, 1); | |
2539 | XEXP (link, 1) = i2notes; | |
2540 | } | |
2541 | else | |
2542 | i3notes = i2notes; | |
2543 | i2notes = 0; | |
2544 | } | |
230d793d RS |
2545 | |
2546 | LOG_LINKS (i3) = 0; | |
2547 | REG_NOTES (i3) = 0; | |
2548 | LOG_LINKS (i2) = 0; | |
2549 | REG_NOTES (i2) = 0; | |
2550 | ||
2551 | if (newi2pat) | |
2552 | { | |
2553 | INSN_CODE (i2) = i2_code_number; | |
2554 | PATTERN (i2) = newi2pat; | |
2555 | } | |
2556 | else | |
2557 | { | |
2558 | PUT_CODE (i2, NOTE); | |
2559 | NOTE_LINE_NUMBER (i2) = NOTE_INSN_DELETED; | |
2560 | NOTE_SOURCE_FILE (i2) = 0; | |
2561 | } | |
2562 | ||
2563 | if (i1) | |
2564 | { | |
2565 | LOG_LINKS (i1) = 0; | |
2566 | REG_NOTES (i1) = 0; | |
2567 | PUT_CODE (i1, NOTE); | |
2568 | NOTE_LINE_NUMBER (i1) = NOTE_INSN_DELETED; | |
2569 | NOTE_SOURCE_FILE (i1) = 0; | |
2570 | } | |
2571 | ||
2572 | /* Get death notes for everything that is now used in either I3 or | |
663522cb | 2573 | I2 and used to die in a previous insn. If we built two new |
6eb12cef RK |
2574 | patterns, move from I1 to I2 then I2 to I3 so that we get the |
2575 | proper movement on registers that I2 modifies. */ | |
230d793d | 2576 | |
230d793d | 2577 | if (newi2pat) |
6eb12cef RK |
2578 | { |
2579 | move_deaths (newi2pat, NULL_RTX, INSN_CUID (i1), i2, &midnotes); | |
2580 | move_deaths (newpat, newi2pat, INSN_CUID (i1), i3, &midnotes); | |
2581 | } | |
2582 | else | |
2583 | move_deaths (newpat, NULL_RTX, i1 ? INSN_CUID (i1) : INSN_CUID (i2), | |
2584 | i3, &midnotes); | |
230d793d RS |
2585 | |
2586 | /* Distribute all the LOG_LINKS and REG_NOTES from I1, I2, and I3. */ | |
2587 | if (i3notes) | |
5f4f0e22 CH |
2588 | distribute_notes (i3notes, i3, i3, newi2pat ? i2 : NULL_RTX, |
2589 | elim_i2, elim_i1); | |
230d793d | 2590 | if (i2notes) |
5f4f0e22 CH |
2591 | distribute_notes (i2notes, i2, i3, newi2pat ? i2 : NULL_RTX, |
2592 | elim_i2, elim_i1); | |
230d793d | 2593 | if (i1notes) |
5f4f0e22 CH |
2594 | distribute_notes (i1notes, i1, i3, newi2pat ? i2 : NULL_RTX, |
2595 | elim_i2, elim_i1); | |
230d793d | 2596 | if (midnotes) |
5f4f0e22 CH |
2597 | distribute_notes (midnotes, NULL_RTX, i3, newi2pat ? i2 : NULL_RTX, |
2598 | elim_i2, elim_i1); | |
230d793d RS |
2599 | |
2600 | /* Distribute any notes added to I2 or I3 by recog_for_combine. We | |
2601 | know these are REG_UNUSED and want them to go to the desired insn, | |
663522cb | 2602 | so we always pass it as i3. We have not counted the notes in |
1a26b032 RK |
2603 | reg_n_deaths yet, so we need to do so now. */ |
2604 | ||
230d793d | 2605 | if (newi2pat && new_i2_notes) |
1a26b032 RK |
2606 | { |
2607 | for (temp = new_i2_notes; temp; temp = XEXP (temp, 1)) | |
2608 | if (GET_CODE (XEXP (temp, 0)) == REG) | |
b1f21e0a | 2609 | REG_N_DEATHS (REGNO (XEXP (temp, 0)))++; |
663522cb | 2610 | |
1a26b032 RK |
2611 | distribute_notes (new_i2_notes, i2, i2, NULL_RTX, NULL_RTX, NULL_RTX); |
2612 | } | |
2613 | ||
230d793d | 2614 | if (new_i3_notes) |
1a26b032 RK |
2615 | { |
2616 | for (temp = new_i3_notes; temp; temp = XEXP (temp, 1)) | |
2617 | if (GET_CODE (XEXP (temp, 0)) == REG) | |
b1f21e0a | 2618 | REG_N_DEATHS (REGNO (XEXP (temp, 0)))++; |
663522cb | 2619 | |
1a26b032 RK |
2620 | distribute_notes (new_i3_notes, i3, i3, NULL_RTX, NULL_RTX, NULL_RTX); |
2621 | } | |
230d793d RS |
2622 | |
2623 | /* If I3DEST was used in I3SRC, it really died in I3. We may need to | |
e9a25f70 JL |
2624 | put a REG_DEAD note for it somewhere. If NEWI2PAT exists and sets |
2625 | I3DEST, the death must be somewhere before I2, not I3. If we passed I3 | |
2626 | in that case, it might delete I2. Similarly for I2 and I1. | |
1a26b032 RK |
2627 | Show an additional death due to the REG_DEAD note we make here. If |
2628 | we discard it in distribute_notes, we will decrement it again. */ | |
d0ab8cd3 | 2629 | |
230d793d | 2630 | if (i3dest_killed) |
1a26b032 RK |
2631 | { |
2632 | if (GET_CODE (i3dest_killed) == REG) | |
b1f21e0a | 2633 | REG_N_DEATHS (REGNO (i3dest_killed))++; |
1a26b032 | 2634 | |
e9a25f70 | 2635 | if (newi2pat && reg_set_p (i3dest_killed, newi2pat)) |
38a448ca RH |
2636 | distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i3dest_killed, |
2637 | NULL_RTX), | |
ff3467a9 | 2638 | NULL_RTX, i2, NULL_RTX, elim_i2, elim_i1); |
e9a25f70 | 2639 | else |
38a448ca RH |
2640 | distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i3dest_killed, |
2641 | NULL_RTX), | |
e9a25f70 | 2642 | NULL_RTX, i3, newi2pat ? i2 : NULL_RTX, |
ff3467a9 | 2643 | elim_i2, elim_i1); |
1a26b032 | 2644 | } |
58c8c593 | 2645 | |
230d793d | 2646 | if (i2dest_in_i2src) |
58c8c593 | 2647 | { |
1a26b032 | 2648 | if (GET_CODE (i2dest) == REG) |
b1f21e0a | 2649 | REG_N_DEATHS (REGNO (i2dest))++; |
1a26b032 | 2650 | |
58c8c593 | 2651 | if (newi2pat && reg_set_p (i2dest, newi2pat)) |
38a448ca | 2652 | distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i2dest, NULL_RTX), |
58c8c593 RK |
2653 | NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX); |
2654 | else | |
38a448ca | 2655 | distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i2dest, NULL_RTX), |
58c8c593 RK |
2656 | NULL_RTX, i3, newi2pat ? i2 : NULL_RTX, |
2657 | NULL_RTX, NULL_RTX); | |
2658 | } | |
2659 | ||
230d793d | 2660 | if (i1dest_in_i1src) |
58c8c593 | 2661 | { |
1a26b032 | 2662 | if (GET_CODE (i1dest) == REG) |
b1f21e0a | 2663 | REG_N_DEATHS (REGNO (i1dest))++; |
1a26b032 | 2664 | |
58c8c593 | 2665 | if (newi2pat && reg_set_p (i1dest, newi2pat)) |
38a448ca | 2666 | distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i1dest, NULL_RTX), |
58c8c593 RK |
2667 | NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX); |
2668 | else | |
38a448ca | 2669 | distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i1dest, NULL_RTX), |
58c8c593 RK |
2670 | NULL_RTX, i3, newi2pat ? i2 : NULL_RTX, |
2671 | NULL_RTX, NULL_RTX); | |
2672 | } | |
230d793d RS |
2673 | |
2674 | distribute_links (i3links); | |
2675 | distribute_links (i2links); | |
2676 | distribute_links (i1links); | |
2677 | ||
2678 | if (GET_CODE (i2dest) == REG) | |
2679 | { | |
d0ab8cd3 RK |
2680 | rtx link; |
2681 | rtx i2_insn = 0, i2_val = 0, set; | |
2682 | ||
2683 | /* The insn that used to set this register doesn't exist, and | |
2684 | this life of the register may not exist either. See if one of | |
663522cb | 2685 | I3's links points to an insn that sets I2DEST. If it does, |
d0ab8cd3 RK |
2686 | that is now the last known value for I2DEST. If we don't update |
2687 | this and I2 set the register to a value that depended on its old | |
230d793d RS |
2688 | contents, we will get confused. If this insn is used, thing |
2689 | will be set correctly in combine_instructions. */ | |
d0ab8cd3 RK |
2690 | |
2691 | for (link = LOG_LINKS (i3); link; link = XEXP (link, 1)) | |
2692 | if ((set = single_set (XEXP (link, 0))) != 0 | |
2693 | && rtx_equal_p (i2dest, SET_DEST (set))) | |
2694 | i2_insn = XEXP (link, 0), i2_val = SET_SRC (set); | |
2695 | ||
2696 | record_value_for_reg (i2dest, i2_insn, i2_val); | |
230d793d RS |
2697 | |
2698 | /* If the reg formerly set in I2 died only once and that was in I3, | |
2699 | zero its use count so it won't make `reload' do any work. */ | |
538fe8cd ILT |
2700 | if (! added_sets_2 |
2701 | && (newi2pat == 0 || ! reg_mentioned_p (i2dest, newi2pat)) | |
2702 | && ! i2dest_in_i2src) | |
230d793d RS |
2703 | { |
2704 | regno = REGNO (i2dest); | |
b1f21e0a | 2705 | REG_N_SETS (regno)--; |
230d793d RS |
2706 | } |
2707 | } | |
2708 | ||
2709 | if (i1 && GET_CODE (i1dest) == REG) | |
2710 | { | |
d0ab8cd3 RK |
2711 | rtx link; |
2712 | rtx i1_insn = 0, i1_val = 0, set; | |
2713 | ||
2714 | for (link = LOG_LINKS (i3); link; link = XEXP (link, 1)) | |
2715 | if ((set = single_set (XEXP (link, 0))) != 0 | |
2716 | && rtx_equal_p (i1dest, SET_DEST (set))) | |
2717 | i1_insn = XEXP (link, 0), i1_val = SET_SRC (set); | |
2718 | ||
2719 | record_value_for_reg (i1dest, i1_insn, i1_val); | |
2720 | ||
230d793d | 2721 | regno = REGNO (i1dest); |
5af91171 | 2722 | if (! added_sets_1 && ! i1dest_in_i1src) |
770ae6cc | 2723 | REG_N_SETS (regno)--; |
230d793d RS |
2724 | } |
2725 | ||
951553af | 2726 | /* Update reg_nonzero_bits et al for any changes that may have been made |
663522cb | 2727 | to this insn. The order of set_nonzero_bits_and_sign_copies() is |
5fb7c247 | 2728 | important. Because newi2pat can affect nonzero_bits of newpat */ |
22609cbf | 2729 | if (newi2pat) |
84832317 | 2730 | note_stores (newi2pat, set_nonzero_bits_and_sign_copies, NULL); |
5fb7c247 | 2731 | note_stores (newpat, set_nonzero_bits_and_sign_copies, NULL); |
22609cbf | 2732 | |
44a76fc8 AG |
2733 | /* Set new_direct_jump_p if a new return or simple jump instruction |
2734 | has been created. | |
2735 | ||
663522cb | 2736 | If I3 is now an unconditional jump, ensure that it has a |
230d793d | 2737 | BARRIER following it since it may have initially been a |
381ee8af | 2738 | conditional jump. It may also be the last nonnote insn. */ |
663522cb | 2739 | |
7f1c097d | 2740 | if (GET_CODE (newpat) == RETURN || any_uncondjump_p (i3)) |
44a76fc8 AG |
2741 | { |
2742 | *new_direct_jump_p = 1; | |
230d793d | 2743 | |
44a76fc8 AG |
2744 | if ((temp = next_nonnote_insn (i3)) == NULL_RTX |
2745 | || GET_CODE (temp) != BARRIER) | |
2746 | emit_barrier_after (i3); | |
2747 | } | |
230d793d RS |
2748 | } |
2749 | ||
2750 | combine_successes++; | |
e7749837 | 2751 | undo_commit (); |
230d793d | 2752 | |
bcd49eb7 JW |
2753 | /* Clear this here, so that subsequent get_last_value calls are not |
2754 | affected. */ | |
2755 | subst_prev_insn = NULL_RTX; | |
2756 | ||
abe6e52f RK |
2757 | if (added_links_insn |
2758 | && (newi2pat == 0 || INSN_CUID (added_links_insn) < INSN_CUID (i2)) | |
2759 | && INSN_CUID (added_links_insn) < INSN_CUID (i3)) | |
2760 | return added_links_insn; | |
2761 | else | |
2762 | return newi2pat ? i2 : i3; | |
230d793d RS |
2763 | } |
2764 | \f | |
2765 | /* Undo all the modifications recorded in undobuf. */ | |
2766 | ||
2767 | static void | |
2768 | undo_all () | |
2769 | { | |
241cea85 RK |
2770 | struct undo *undo, *next; |
2771 | ||
2772 | for (undo = undobuf.undos; undo; undo = next) | |
7c046e4e | 2773 | { |
241cea85 RK |
2774 | next = undo->next; |
2775 | if (undo->is_int) | |
2776 | *undo->where.i = undo->old_contents.i; | |
7c046e4e | 2777 | else |
241cea85 RK |
2778 | *undo->where.r = undo->old_contents.r; |
2779 | ||
2780 | undo->next = undobuf.frees; | |
2781 | undobuf.frees = undo; | |
7c046e4e | 2782 | } |
230d793d RS |
2783 | |
2784 | obfree (undobuf.storage); | |
845fc875 | 2785 | undobuf.undos = undobuf.previous_undos = 0; |
bcd49eb7 JW |
2786 | |
2787 | /* Clear this here, so that subsequent get_last_value calls are not | |
2788 | affected. */ | |
2789 | subst_prev_insn = NULL_RTX; | |
230d793d | 2790 | } |
e7749837 RH |
2791 | |
2792 | /* We've committed to accepting the changes we made. Move all | |
2793 | of the undos to the free list. */ | |
2794 | ||
2795 | static void | |
2796 | undo_commit () | |
2797 | { | |
2798 | struct undo *undo, *next; | |
2799 | ||
2800 | for (undo = undobuf.undos; undo; undo = next) | |
2801 | { | |
2802 | next = undo->next; | |
2803 | undo->next = undobuf.frees; | |
2804 | undobuf.frees = undo; | |
2805 | } | |
2806 | undobuf.undos = undobuf.previous_undos = 0; | |
2807 | } | |
2808 | ||
230d793d RS |
2809 | \f |
2810 | /* Find the innermost point within the rtx at LOC, possibly LOC itself, | |
d0ab8cd3 RK |
2811 | where we have an arithmetic expression and return that point. LOC will |
2812 | be inside INSN. | |
230d793d RS |
2813 | |
2814 | try_combine will call this function to see if an insn can be split into | |
2815 | two insns. */ | |
2816 | ||
2817 | static rtx * | |
d0ab8cd3 | 2818 | find_split_point (loc, insn) |
230d793d | 2819 | rtx *loc; |
d0ab8cd3 | 2820 | rtx insn; |
230d793d RS |
2821 | { |
2822 | rtx x = *loc; | |
2823 | enum rtx_code code = GET_CODE (x); | |
2824 | rtx *split; | |
770ae6cc RK |
2825 | unsigned HOST_WIDE_INT len = 0; |
2826 | HOST_WIDE_INT pos = 0; | |
2827 | int unsignedp = 0; | |
6a651371 | 2828 | rtx inner = NULL_RTX; |
230d793d RS |
2829 | |
2830 | /* First special-case some codes. */ | |
2831 | switch (code) | |
2832 | { | |
2833 | case SUBREG: | |
2834 | #ifdef INSN_SCHEDULING | |
2835 | /* If we are making a paradoxical SUBREG invalid, it becomes a split | |
2836 | point. */ | |
2837 | if (GET_CODE (SUBREG_REG (x)) == MEM) | |
2838 | return loc; | |
2839 | #endif | |
d0ab8cd3 | 2840 | return find_split_point (&SUBREG_REG (x), insn); |
230d793d | 2841 | |
230d793d | 2842 | case MEM: |
916f14f1 | 2843 | #ifdef HAVE_lo_sum |
230d793d RS |
2844 | /* If we have (mem (const ..)) or (mem (symbol_ref ...)), split it |
2845 | using LO_SUM and HIGH. */ | |
2846 | if (GET_CODE (XEXP (x, 0)) == CONST | |
2847 | || GET_CODE (XEXP (x, 0)) == SYMBOL_REF) | |
2848 | { | |
2849 | SUBST (XEXP (x, 0), | |
2850 | gen_rtx_combine (LO_SUM, Pmode, | |
2851 | gen_rtx_combine (HIGH, Pmode, XEXP (x, 0)), | |
2852 | XEXP (x, 0))); | |
2853 | return &XEXP (XEXP (x, 0), 0); | |
2854 | } | |
230d793d RS |
2855 | #endif |
2856 | ||
916f14f1 RK |
2857 | /* If we have a PLUS whose second operand is a constant and the |
2858 | address is not valid, perhaps will can split it up using | |
2859 | the machine-specific way to split large constants. We use | |
ddd5a7c1 | 2860 | the first pseudo-reg (one of the virtual regs) as a placeholder; |
916f14f1 RK |
2861 | it will not remain in the result. */ |
2862 | if (GET_CODE (XEXP (x, 0)) == PLUS | |
2863 | && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT | |
2864 | && ! memory_address_p (GET_MODE (x), XEXP (x, 0))) | |
2865 | { | |
2866 | rtx reg = regno_reg_rtx[FIRST_PSEUDO_REGISTER]; | |
38a448ca | 2867 | rtx seq = split_insns (gen_rtx_SET (VOIDmode, reg, XEXP (x, 0)), |
916f14f1 RK |
2868 | subst_insn); |
2869 | ||
2870 | /* This should have produced two insns, each of which sets our | |
2871 | placeholder. If the source of the second is a valid address, | |
2872 | we can make put both sources together and make a split point | |
2873 | in the middle. */ | |
2874 | ||
2875 | if (seq && XVECLEN (seq, 0) == 2 | |
2876 | && GET_CODE (XVECEXP (seq, 0, 0)) == INSN | |
2877 | && GET_CODE (PATTERN (XVECEXP (seq, 0, 0))) == SET | |
2878 | && SET_DEST (PATTERN (XVECEXP (seq, 0, 0))) == reg | |
2879 | && ! reg_mentioned_p (reg, | |
2880 | SET_SRC (PATTERN (XVECEXP (seq, 0, 0)))) | |
2881 | && GET_CODE (XVECEXP (seq, 0, 1)) == INSN | |
2882 | && GET_CODE (PATTERN (XVECEXP (seq, 0, 1))) == SET | |
2883 | && SET_DEST (PATTERN (XVECEXP (seq, 0, 1))) == reg | |
2884 | && memory_address_p (GET_MODE (x), | |
2885 | SET_SRC (PATTERN (XVECEXP (seq, 0, 1))))) | |
2886 | { | |
2887 | rtx src1 = SET_SRC (PATTERN (XVECEXP (seq, 0, 0))); | |
2888 | rtx src2 = SET_SRC (PATTERN (XVECEXP (seq, 0, 1))); | |
2889 | ||
2890 | /* Replace the placeholder in SRC2 with SRC1. If we can | |
2891 | find where in SRC2 it was placed, that can become our | |
2892 | split point and we can replace this address with SRC2. | |
2893 | Just try two obvious places. */ | |
2894 | ||
2895 | src2 = replace_rtx (src2, reg, src1); | |
2896 | split = 0; | |
2897 | if (XEXP (src2, 0) == src1) | |
2898 | split = &XEXP (src2, 0); | |
2899 | else if (GET_RTX_FORMAT (GET_CODE (XEXP (src2, 0)))[0] == 'e' | |
2900 | && XEXP (XEXP (src2, 0), 0) == src1) | |
2901 | split = &XEXP (XEXP (src2, 0), 0); | |
2902 | ||
2903 | if (split) | |
2904 | { | |
2905 | SUBST (XEXP (x, 0), src2); | |
2906 | return split; | |
2907 | } | |
2908 | } | |
663522cb | 2909 | |
1a26b032 RK |
2910 | /* If that didn't work, perhaps the first operand is complex and |
2911 | needs to be computed separately, so make a split point there. | |
2912 | This will occur on machines that just support REG + CONST | |
2913 | and have a constant moved through some previous computation. */ | |
2914 | ||
2915 | else if (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (x, 0), 0))) != 'o' | |
2916 | && ! (GET_CODE (XEXP (XEXP (x, 0), 0)) == SUBREG | |
2917 | && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (XEXP (x, 0), 0)))) | |
2918 | == 'o'))) | |
2919 | return &XEXP (XEXP (x, 0), 0); | |
916f14f1 RK |
2920 | } |
2921 | break; | |
2922 | ||
230d793d RS |
2923 | case SET: |
2924 | #ifdef HAVE_cc0 | |
2925 | /* If SET_DEST is CC0 and SET_SRC is not an operand, a COMPARE, or a | |
2926 | ZERO_EXTRACT, the most likely reason why this doesn't match is that | |
2927 | we need to put the operand into a register. So split at that | |
2928 | point. */ | |
2929 | ||
2930 | if (SET_DEST (x) == cc0_rtx | |
2931 | && GET_CODE (SET_SRC (x)) != COMPARE | |
2932 | && GET_CODE (SET_SRC (x)) != ZERO_EXTRACT | |
2933 | && GET_RTX_CLASS (GET_CODE (SET_SRC (x))) != 'o' | |
2934 | && ! (GET_CODE (SET_SRC (x)) == SUBREG | |
2935 | && GET_RTX_CLASS (GET_CODE (SUBREG_REG (SET_SRC (x)))) == 'o')) | |
2936 | return &SET_SRC (x); | |
2937 | #endif | |
2938 | ||
2939 | /* See if we can split SET_SRC as it stands. */ | |
d0ab8cd3 | 2940 | split = find_split_point (&SET_SRC (x), insn); |
230d793d RS |
2941 | if (split && split != &SET_SRC (x)) |
2942 | return split; | |
2943 | ||
041d7180 JL |
2944 | /* See if we can split SET_DEST as it stands. */ |
2945 | split = find_split_point (&SET_DEST (x), insn); | |
2946 | if (split && split != &SET_DEST (x)) | |
2947 | return split; | |
2948 | ||
230d793d RS |
2949 | /* See if this is a bitfield assignment with everything constant. If |
2950 | so, this is an IOR of an AND, so split it into that. */ | |
2951 | if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT | |
2952 | && (GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0))) | |
5f4f0e22 | 2953 | <= HOST_BITS_PER_WIDE_INT) |
230d793d RS |
2954 | && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT |
2955 | && GET_CODE (XEXP (SET_DEST (x), 2)) == CONST_INT | |
2956 | && GET_CODE (SET_SRC (x)) == CONST_INT | |
2957 | && ((INTVAL (XEXP (SET_DEST (x), 1)) | |
2958 | + INTVAL (XEXP (SET_DEST (x), 2))) | |
2959 | <= GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)))) | |
2960 | && ! side_effects_p (XEXP (SET_DEST (x), 0))) | |
2961 | { | |
770ae6cc RK |
2962 | HOST_WIDE_INT pos = INTVAL (XEXP (SET_DEST (x), 2)); |
2963 | unsigned HOST_WIDE_INT len = INTVAL (XEXP (SET_DEST (x), 1)); | |
2964 | unsigned HOST_WIDE_INT src = INTVAL (SET_SRC (x)); | |
230d793d RS |
2965 | rtx dest = XEXP (SET_DEST (x), 0); |
2966 | enum machine_mode mode = GET_MODE (dest); | |
5f4f0e22 | 2967 | unsigned HOST_WIDE_INT mask = ((HOST_WIDE_INT) 1 << len) - 1; |
230d793d | 2968 | |
f76b9db2 ILT |
2969 | if (BITS_BIG_ENDIAN) |
2970 | pos = GET_MODE_BITSIZE (mode) - len - pos; | |
230d793d | 2971 | |
770ae6cc | 2972 | if (src == mask) |
230d793d | 2973 | SUBST (SET_SRC (x), |
5f4f0e22 | 2974 | gen_binary (IOR, mode, dest, GEN_INT (src << pos))); |
230d793d RS |
2975 | else |
2976 | SUBST (SET_SRC (x), | |
2977 | gen_binary (IOR, mode, | |
663522cb KH |
2978 | gen_binary (AND, mode, dest, |
2979 | GEN_INT (~(mask << pos) | |
5f4f0e22 CH |
2980 | & GET_MODE_MASK (mode))), |
2981 | GEN_INT (src << pos))); | |
230d793d RS |
2982 | |
2983 | SUBST (SET_DEST (x), dest); | |
2984 | ||
d0ab8cd3 | 2985 | split = find_split_point (&SET_SRC (x), insn); |
230d793d RS |
2986 | if (split && split != &SET_SRC (x)) |
2987 | return split; | |
2988 | } | |
2989 | ||
2990 | /* Otherwise, see if this is an operation that we can split into two. | |
2991 | If so, try to split that. */ | |
2992 | code = GET_CODE (SET_SRC (x)); | |
2993 | ||
2994 | switch (code) | |
2995 | { | |
d0ab8cd3 RK |
2996 | case AND: |
2997 | /* If we are AND'ing with a large constant that is only a single | |
2998 | bit and the result is only being used in a context where we | |
2999 | need to know if it is zero or non-zero, replace it with a bit | |
3000 | extraction. This will avoid the large constant, which might | |
3001 | have taken more than one insn to make. If the constant were | |
3002 | not a valid argument to the AND but took only one insn to make, | |
3003 | this is no worse, but if it took more than one insn, it will | |
3004 | be better. */ | |
3005 | ||
3006 | if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT | |
3007 | && GET_CODE (XEXP (SET_SRC (x), 0)) == REG | |
3008 | && (pos = exact_log2 (INTVAL (XEXP (SET_SRC (x), 1)))) >= 7 | |
3009 | && GET_CODE (SET_DEST (x)) == REG | |
3010 | && (split = find_single_use (SET_DEST (x), insn, NULL_PTR)) != 0 | |
3011 | && (GET_CODE (*split) == EQ || GET_CODE (*split) == NE) | |
3012 | && XEXP (*split, 0) == SET_DEST (x) | |
3013 | && XEXP (*split, 1) == const0_rtx) | |
3014 | { | |
76184def DE |
3015 | rtx extraction = make_extraction (GET_MODE (SET_DEST (x)), |
3016 | XEXP (SET_SRC (x), 0), | |
3017 | pos, NULL_RTX, 1, 1, 0, 0); | |
3018 | if (extraction != 0) | |
3019 | { | |
3020 | SUBST (SET_SRC (x), extraction); | |
3021 | return find_split_point (loc, insn); | |
3022 | } | |
d0ab8cd3 RK |
3023 | } |
3024 | break; | |
3025 | ||
1a6ec070 RK |
3026 | case NE: |
3027 | /* if STORE_FLAG_VALUE is -1, this is (NE X 0) and only one bit of X | |
3028 | is known to be on, this can be converted into a NEG of a shift. */ | |
3029 | if (STORE_FLAG_VALUE == -1 && XEXP (SET_SRC (x), 1) == const0_rtx | |
3030 | && GET_MODE (SET_SRC (x)) == GET_MODE (XEXP (SET_SRC (x), 0)) | |
4eb2cb10 | 3031 | && 1 <= (pos = exact_log2 |
1a6ec070 RK |
3032 | (nonzero_bits (XEXP (SET_SRC (x), 0), |
3033 | GET_MODE (XEXP (SET_SRC (x), 0)))))) | |
3034 | { | |
3035 | enum machine_mode mode = GET_MODE (XEXP (SET_SRC (x), 0)); | |
3036 | ||
3037 | SUBST (SET_SRC (x), | |
3038 | gen_rtx_combine (NEG, mode, | |
3039 | gen_rtx_combine (LSHIFTRT, mode, | |
3040 | XEXP (SET_SRC (x), 0), | |
4eb2cb10 | 3041 | GEN_INT (pos)))); |
1a6ec070 RK |
3042 | |
3043 | split = find_split_point (&SET_SRC (x), insn); | |
3044 | if (split && split != &SET_SRC (x)) | |
3045 | return split; | |
3046 | } | |
3047 | break; | |
3048 | ||
230d793d RS |
3049 | case SIGN_EXTEND: |
3050 | inner = XEXP (SET_SRC (x), 0); | |
101c1a3d JL |
3051 | |
3052 | /* We can't optimize if either mode is a partial integer | |
3053 | mode as we don't know how many bits are significant | |
3054 | in those modes. */ | |
3055 | if (GET_MODE_CLASS (GET_MODE (inner)) == MODE_PARTIAL_INT | |
3056 | || GET_MODE_CLASS (GET_MODE (SET_SRC (x))) == MODE_PARTIAL_INT) | |
3057 | break; | |
3058 | ||
230d793d RS |
3059 | pos = 0; |
3060 | len = GET_MODE_BITSIZE (GET_MODE (inner)); | |
3061 | unsignedp = 0; | |
3062 | break; | |
3063 | ||
3064 | case SIGN_EXTRACT: | |
3065 | case ZERO_EXTRACT: | |
3066 | if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT | |
3067 | && GET_CODE (XEXP (SET_SRC (x), 2)) == CONST_INT) | |
3068 | { | |
3069 | inner = XEXP (SET_SRC (x), 0); | |
3070 | len = INTVAL (XEXP (SET_SRC (x), 1)); | |
3071 | pos = INTVAL (XEXP (SET_SRC (x), 2)); | |
3072 | ||
f76b9db2 ILT |
3073 | if (BITS_BIG_ENDIAN) |
3074 | pos = GET_MODE_BITSIZE (GET_MODE (inner)) - len - pos; | |
230d793d RS |
3075 | unsignedp = (code == ZERO_EXTRACT); |
3076 | } | |
3077 | break; | |
e9a25f70 JL |
3078 | |
3079 | default: | |
3080 | break; | |
230d793d RS |
3081 | } |
3082 | ||
3083 | if (len && pos >= 0 && pos + len <= GET_MODE_BITSIZE (GET_MODE (inner))) | |
3084 | { | |
3085 | enum machine_mode mode = GET_MODE (SET_SRC (x)); | |
3086 | ||
d0ab8cd3 RK |
3087 | /* For unsigned, we have a choice of a shift followed by an |
3088 | AND or two shifts. Use two shifts for field sizes where the | |
3089 | constant might be too large. We assume here that we can | |
3090 | always at least get 8-bit constants in an AND insn, which is | |
3091 | true for every current RISC. */ | |
3092 | ||
3093 | if (unsignedp && len <= 8) | |
230d793d RS |
3094 | { |
3095 | SUBST (SET_SRC (x), | |
3096 | gen_rtx_combine | |
3097 | (AND, mode, | |
3098 | gen_rtx_combine (LSHIFTRT, mode, | |
3099 | gen_lowpart_for_combine (mode, inner), | |
5f4f0e22 CH |
3100 | GEN_INT (pos)), |
3101 | GEN_INT (((HOST_WIDE_INT) 1 << len) - 1))); | |
230d793d | 3102 | |
d0ab8cd3 | 3103 | split = find_split_point (&SET_SRC (x), insn); |
230d793d RS |
3104 | if (split && split != &SET_SRC (x)) |
3105 | return split; | |
3106 | } | |
3107 | else | |
3108 | { | |
3109 | SUBST (SET_SRC (x), | |
3110 | gen_rtx_combine | |
d0ab8cd3 | 3111 | (unsignedp ? LSHIFTRT : ASHIFTRT, mode, |
230d793d RS |
3112 | gen_rtx_combine (ASHIFT, mode, |
3113 | gen_lowpart_for_combine (mode, inner), | |
5f4f0e22 CH |
3114 | GEN_INT (GET_MODE_BITSIZE (mode) |
3115 | - len - pos)), | |
3116 | GEN_INT (GET_MODE_BITSIZE (mode) - len))); | |
230d793d | 3117 | |
d0ab8cd3 | 3118 | split = find_split_point (&SET_SRC (x), insn); |
230d793d RS |
3119 | if (split && split != &SET_SRC (x)) |
3120 | return split; | |
3121 | } | |
3122 | } | |
3123 | ||
3124 | /* See if this is a simple operation with a constant as the second | |
3125 | operand. It might be that this constant is out of range and hence | |
3126 | could be used as a split point. */ | |
3127 | if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2' | |
3128 | || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c' | |
3129 | || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<') | |
3130 | && CONSTANT_P (XEXP (SET_SRC (x), 1)) | |
3131 | && (GET_RTX_CLASS (GET_CODE (XEXP (SET_SRC (x), 0))) == 'o' | |
3132 | || (GET_CODE (XEXP (SET_SRC (x), 0)) == SUBREG | |
3133 | && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (SET_SRC (x), 0)))) | |
3134 | == 'o')))) | |
3135 | return &XEXP (SET_SRC (x), 1); | |
3136 | ||
3137 | /* Finally, see if this is a simple operation with its first operand | |
3138 | not in a register. The operation might require this operand in a | |
3139 | register, so return it as a split point. We can always do this | |
3140 | because if the first operand were another operation, we would have | |
3141 | already found it as a split point. */ | |
3142 | if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2' | |
3143 | || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c' | |
3144 | || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<' | |
3145 | || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '1') | |
3146 | && ! register_operand (XEXP (SET_SRC (x), 0), VOIDmode)) | |
3147 | return &XEXP (SET_SRC (x), 0); | |
3148 | ||
3149 | return 0; | |
3150 | ||
3151 | case AND: | |
3152 | case IOR: | |
3153 | /* We write NOR as (and (not A) (not B)), but if we don't have a NOR, | |
3154 | it is better to write this as (not (ior A B)) so we can split it. | |
3155 | Similarly for IOR. */ | |
3156 | if (GET_CODE (XEXP (x, 0)) == NOT && GET_CODE (XEXP (x, 1)) == NOT) | |
3157 | { | |
3158 | SUBST (*loc, | |
3159 | gen_rtx_combine (NOT, GET_MODE (x), | |
3160 | gen_rtx_combine (code == IOR ? AND : IOR, | |
3161 | GET_MODE (x), | |
3162 | XEXP (XEXP (x, 0), 0), | |
3163 | XEXP (XEXP (x, 1), 0)))); | |
d0ab8cd3 | 3164 | return find_split_point (loc, insn); |
230d793d RS |
3165 | } |
3166 | ||
3167 | /* Many RISC machines have a large set of logical insns. If the | |
3168 | second operand is a NOT, put it first so we will try to split the | |
3169 | other operand first. */ | |
3170 | if (GET_CODE (XEXP (x, 1)) == NOT) | |
3171 | { | |
3172 | rtx tem = XEXP (x, 0); | |
3173 | SUBST (XEXP (x, 0), XEXP (x, 1)); | |
3174 | SUBST (XEXP (x, 1), tem); | |
3175 | } | |
3176 | break; | |
e9a25f70 JL |
3177 | |
3178 | default: | |
3179 | break; | |
230d793d RS |
3180 | } |
3181 | ||
3182 | /* Otherwise, select our actions depending on our rtx class. */ | |
3183 | switch (GET_RTX_CLASS (code)) | |
3184 | { | |
3185 | case 'b': /* This is ZERO_EXTRACT and SIGN_EXTRACT. */ | |
3186 | case '3': | |
d0ab8cd3 | 3187 | split = find_split_point (&XEXP (x, 2), insn); |
230d793d RS |
3188 | if (split) |
3189 | return split; | |
0f41302f | 3190 | /* ... fall through ... */ |
230d793d RS |
3191 | case '2': |
3192 | case 'c': | |
3193 | case '<': | |
d0ab8cd3 | 3194 | split = find_split_point (&XEXP (x, 1), insn); |
230d793d RS |
3195 | if (split) |
3196 | return split; | |
0f41302f | 3197 | /* ... fall through ... */ |
230d793d RS |
3198 | case '1': |
3199 | /* Some machines have (and (shift ...) ...) insns. If X is not | |
3200 | an AND, but XEXP (X, 0) is, use it as our split point. */ | |
3201 | if (GET_CODE (x) != AND && GET_CODE (XEXP (x, 0)) == AND) | |
3202 | return &XEXP (x, 0); | |
3203 | ||
d0ab8cd3 | 3204 | split = find_split_point (&XEXP (x, 0), insn); |
230d793d RS |
3205 | if (split) |
3206 | return split; | |
3207 | return loc; | |
3208 | } | |
3209 | ||
3210 | /* Otherwise, we don't have a split point. */ | |
3211 | return 0; | |
3212 | } | |
3213 | \f | |
3214 | /* Throughout X, replace FROM with TO, and return the result. | |
3215 | The result is TO if X is FROM; | |
3216 | otherwise the result is X, but its contents may have been modified. | |
3217 | If they were modified, a record was made in undobuf so that | |
3218 | undo_all will (among other things) return X to its original state. | |
3219 | ||
3220 | If the number of changes necessary is too much to record to undo, | |
3221 | the excess changes are not made, so the result is invalid. | |
3222 | The changes already made can still be undone. | |
3223 | undobuf.num_undo is incremented for such changes, so by testing that | |
3224 | the caller can tell whether the result is valid. | |
3225 | ||
3226 | `n_occurrences' is incremented each time FROM is replaced. | |
663522cb | 3227 | |
230d793d RS |
3228 | IN_DEST is non-zero if we are processing the SET_DEST of a SET. |
3229 | ||
5089e22e | 3230 | UNIQUE_COPY is non-zero if each substitution must be unique. We do this |
230d793d RS |
3231 | by copying if `n_occurrences' is non-zero. */ |
3232 | ||
3233 | static rtx | |
3234 | subst (x, from, to, in_dest, unique_copy) | |
3235 | register rtx x, from, to; | |
3236 | int in_dest; | |
3237 | int unique_copy; | |
3238 | { | |
f24ad0e4 | 3239 | register enum rtx_code code = GET_CODE (x); |
230d793d | 3240 | enum machine_mode op0_mode = VOIDmode; |
6f7d635c | 3241 | register const char *fmt; |
8079805d RK |
3242 | register int len, i; |
3243 | rtx new; | |
230d793d RS |
3244 | |
3245 | /* Two expressions are equal if they are identical copies of a shared | |
3246 | RTX or if they are both registers with the same register number | |
3247 | and mode. */ | |
3248 | ||
3249 | #define COMBINE_RTX_EQUAL_P(X,Y) \ | |
3250 | ((X) == (Y) \ | |
3251 | || (GET_CODE (X) == REG && GET_CODE (Y) == REG \ | |
3252 | && REGNO (X) == REGNO (Y) && GET_MODE (X) == GET_MODE (Y))) | |
3253 | ||
3254 | if (! in_dest && COMBINE_RTX_EQUAL_P (x, from)) | |
3255 | { | |
3256 | n_occurrences++; | |
3257 | return (unique_copy && n_occurrences > 1 ? copy_rtx (to) : to); | |
3258 | } | |
3259 | ||
3260 | /* If X and FROM are the same register but different modes, they will | |
663522cb | 3261 | not have been seen as equal above. However, flow.c will make a |
230d793d RS |
3262 | LOG_LINKS entry for that case. If we do nothing, we will try to |
3263 | rerecognize our original insn and, when it succeeds, we will | |
3264 | delete the feeding insn, which is incorrect. | |
3265 | ||
3266 | So force this insn not to match in this (rare) case. */ | |
3267 | if (! in_dest && code == REG && GET_CODE (from) == REG | |
3268 | && REGNO (x) == REGNO (from)) | |
38a448ca | 3269 | return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx); |
230d793d RS |
3270 | |
3271 | /* If this is an object, we are done unless it is a MEM or LO_SUM, both | |
3272 | of which may contain things that can be combined. */ | |
3273 | if (code != MEM && code != LO_SUM && GET_RTX_CLASS (code) == 'o') | |
3274 | return x; | |
3275 | ||
3276 | /* It is possible to have a subexpression appear twice in the insn. | |
3277 | Suppose that FROM is a register that appears within TO. | |
3278 | Then, after that subexpression has been scanned once by `subst', | |
3279 | the second time it is scanned, TO may be found. If we were | |
3280 | to scan TO here, we would find FROM within it and create a | |
3281 | self-referent rtl structure which is completely wrong. */ | |
3282 | if (COMBINE_RTX_EQUAL_P (x, to)) | |
3283 | return to; | |
3284 | ||
4f4b3679 RH |
3285 | /* Parallel asm_operands need special attention because all of the |
3286 | inputs are shared across the arms. Furthermore, unsharing the | |
3287 | rtl results in recognition failures. Failure to handle this case | |
3288 | specially can result in circular rtl. | |
3289 | ||
3290 | Solve this by doing a normal pass across the first entry of the | |
3291 | parallel, and only processing the SET_DESTs of the subsequent | |
3292 | entries. Ug. */ | |
3293 | ||
3294 | if (code == PARALLEL | |
3295 | && GET_CODE (XVECEXP (x, 0, 0)) == SET | |
3296 | && GET_CODE (SET_SRC (XVECEXP (x, 0, 0))) == ASM_OPERANDS) | |
230d793d | 3297 | { |
4f4b3679 RH |
3298 | new = subst (XVECEXP (x, 0, 0), from, to, 0, unique_copy); |
3299 | ||
3300 | /* If this substitution failed, this whole thing fails. */ | |
3301 | if (GET_CODE (new) == CLOBBER | |
3302 | && XEXP (new, 0) == const0_rtx) | |
3303 | return new; | |
3304 | ||
3305 | SUBST (XVECEXP (x, 0, 0), new); | |
3306 | ||
3307 | for (i = XVECLEN (x, 0) - 1; i >= 1; i--) | |
230d793d | 3308 | { |
4f4b3679 | 3309 | rtx dest = SET_DEST (XVECEXP (x, 0, i)); |
663522cb | 3310 | |
4f4b3679 RH |
3311 | if (GET_CODE (dest) != REG |
3312 | && GET_CODE (dest) != CC0 | |
3313 | && GET_CODE (dest) != PC) | |
230d793d | 3314 | { |
4f4b3679 | 3315 | new = subst (dest, from, to, 0, unique_copy); |
230d793d | 3316 | |
4f4b3679 RH |
3317 | /* If this substitution failed, this whole thing fails. */ |
3318 | if (GET_CODE (new) == CLOBBER | |
3319 | && XEXP (new, 0) == const0_rtx) | |
3320 | return new; | |
230d793d | 3321 | |
4f4b3679 | 3322 | SUBST (SET_DEST (XVECEXP (x, 0, i)), new); |
230d793d RS |
3323 | } |
3324 | } | |
4f4b3679 RH |
3325 | } |
3326 | else | |
3327 | { | |
3328 | len = GET_RTX_LENGTH (code); | |
3329 | fmt = GET_RTX_FORMAT (code); | |
3330 | ||
3331 | /* We don't need to process a SET_DEST that is a register, CC0, | |
3332 | or PC, so set up to skip this common case. All other cases | |
3333 | where we want to suppress replacing something inside a | |
3334 | SET_SRC are handled via the IN_DEST operand. */ | |
3335 | if (code == SET | |
3336 | && (GET_CODE (SET_DEST (x)) == REG | |
3337 | || GET_CODE (SET_DEST (x)) == CC0 | |
3338 | || GET_CODE (SET_DEST (x)) == PC)) | |
3339 | fmt = "ie"; | |
3340 | ||
3341 | /* Get the mode of operand 0 in case X is now a SIGN_EXTEND of a | |
3342 | constant. */ | |
3343 | if (fmt[0] == 'e') | |
3344 | op0_mode = GET_MODE (XEXP (x, 0)); | |
3345 | ||
3346 | for (i = 0; i < len; i++) | |
230d793d | 3347 | { |
4f4b3679 | 3348 | if (fmt[i] == 'E') |
230d793d | 3349 | { |
4f4b3679 RH |
3350 | register int j; |
3351 | for (j = XVECLEN (x, i) - 1; j >= 0; j--) | |
3352 | { | |
3353 | if (COMBINE_RTX_EQUAL_P (XVECEXP (x, i, j), from)) | |
3354 | { | |
3355 | new = (unique_copy && n_occurrences | |
3356 | ? copy_rtx (to) : to); | |
3357 | n_occurrences++; | |
3358 | } | |
3359 | else | |
3360 | { | |
3361 | new = subst (XVECEXP (x, i, j), from, to, 0, | |
3362 | unique_copy); | |
3363 | ||
3364 | /* If this substitution failed, this whole thing | |
3365 | fails. */ | |
3366 | if (GET_CODE (new) == CLOBBER | |
3367 | && XEXP (new, 0) == const0_rtx) | |
3368 | return new; | |
3369 | } | |
3370 | ||
3371 | SUBST (XVECEXP (x, i, j), new); | |
3372 | } | |
3373 | } | |
3374 | else if (fmt[i] == 'e') | |
3375 | { | |
3376 | if (COMBINE_RTX_EQUAL_P (XEXP (x, i), from)) | |
3377 | { | |
3378 | /* In general, don't install a subreg involving two | |
3379 | modes not tieable. It can worsen register | |
3380 | allocation, and can even make invalid reload | |
3381 | insns, since the reg inside may need to be copied | |
3382 | from in the outside mode, and that may be invalid | |
3383 | if it is an fp reg copied in integer mode. | |
3384 | ||
3385 | We allow two exceptions to this: It is valid if | |
3386 | it is inside another SUBREG and the mode of that | |
3387 | SUBREG and the mode of the inside of TO is | |
3388 | tieable and it is valid if X is a SET that copies | |
3389 | FROM to CC0. */ | |
3390 | ||
3391 | if (GET_CODE (to) == SUBREG | |
3392 | && ! MODES_TIEABLE_P (GET_MODE (to), | |
3393 | GET_MODE (SUBREG_REG (to))) | |
3394 | && ! (code == SUBREG | |
3395 | && MODES_TIEABLE_P (GET_MODE (x), | |
3396 | GET_MODE (SUBREG_REG (to)))) | |
42301240 | 3397 | #ifdef HAVE_cc0 |
4f4b3679 | 3398 | && ! (code == SET && i == 1 && XEXP (x, 0) == cc0_rtx) |
42301240 | 3399 | #endif |
4f4b3679 RH |
3400 | ) |
3401 | return gen_rtx_CLOBBER (VOIDmode, const0_rtx); | |
42301240 | 3402 | |
02188693 | 3403 | #ifdef CLASS_CANNOT_CHANGE_MODE |
ed8afe3a GK |
3404 | if (code == SUBREG |
3405 | && GET_CODE (to) == REG | |
3406 | && REGNO (to) < FIRST_PSEUDO_REGISTER | |
3407 | && (TEST_HARD_REG_BIT | |
02188693 | 3408 | (reg_class_contents[(int) CLASS_CANNOT_CHANGE_MODE], |
ed8afe3a | 3409 | REGNO (to))) |
02188693 RH |
3410 | && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (to), |
3411 | GET_MODE (x))) | |
ed8afe3a GK |
3412 | return gen_rtx_CLOBBER (VOIDmode, const0_rtx); |
3413 | #endif | |
3414 | ||
4f4b3679 RH |
3415 | new = (unique_copy && n_occurrences ? copy_rtx (to) : to); |
3416 | n_occurrences++; | |
3417 | } | |
3418 | else | |
3419 | /* If we are in a SET_DEST, suppress most cases unless we | |
3420 | have gone inside a MEM, in which case we want to | |
3421 | simplify the address. We assume here that things that | |
3422 | are actually part of the destination have their inner | |
663522cb | 3423 | parts in the first expression. This is true for SUBREG, |
4f4b3679 RH |
3424 | STRICT_LOW_PART, and ZERO_EXTRACT, which are the only |
3425 | things aside from REG and MEM that should appear in a | |
3426 | SET_DEST. */ | |
3427 | new = subst (XEXP (x, i), from, to, | |
3428 | (((in_dest | |
3429 | && (code == SUBREG || code == STRICT_LOW_PART | |
3430 | || code == ZERO_EXTRACT)) | |
3431 | || code == SET) | |
3432 | && i == 0), unique_copy); | |
3433 | ||
3434 | /* If we found that we will have to reject this combination, | |
3435 | indicate that by returning the CLOBBER ourselves, rather than | |
3436 | an expression containing it. This will speed things up as | |
3437 | well as prevent accidents where two CLOBBERs are considered | |
3438 | to be equal, thus producing an incorrect simplification. */ | |
3439 | ||
3440 | if (GET_CODE (new) == CLOBBER && XEXP (new, 0) == const0_rtx) | |
3441 | return new; | |
3442 | ||
3443 | SUBST (XEXP (x, i), new); | |
230d793d | 3444 | } |
230d793d RS |
3445 | } |
3446 | } | |
3447 | ||
8079805d RK |
3448 | /* Try to simplify X. If the simplification changed the code, it is likely |
3449 | that further simplification will help, so loop, but limit the number | |
3450 | of repetitions that will be performed. */ | |
3451 | ||
3452 | for (i = 0; i < 4; i++) | |
3453 | { | |
3454 | /* If X is sufficiently simple, don't bother trying to do anything | |
3455 | with it. */ | |
3456 | if (code != CONST_INT && code != REG && code != CLOBBER) | |
31ec4e5e | 3457 | x = combine_simplify_rtx (x, op0_mode, i == 3, in_dest); |
d0ab8cd3 | 3458 | |
8079805d RK |
3459 | if (GET_CODE (x) == code) |
3460 | break; | |
d0ab8cd3 | 3461 | |
8079805d | 3462 | code = GET_CODE (x); |
eeb43d32 | 3463 | |
8079805d RK |
3464 | /* We no longer know the original mode of operand 0 since we |
3465 | have changed the form of X) */ | |
3466 | op0_mode = VOIDmode; | |
3467 | } | |
eeb43d32 | 3468 | |
8079805d RK |
3469 | return x; |
3470 | } | |
3471 | \f | |
3472 | /* Simplify X, a piece of RTL. We just operate on the expression at the | |
3473 | outer level; call `subst' to simplify recursively. Return the new | |
3474 | expression. | |
3475 | ||
3476 | OP0_MODE is the original mode of XEXP (x, 0); LAST is nonzero if this | |
3477 | will be the iteration even if an expression with a code different from | |
3478 | X is returned; IN_DEST is nonzero if we are inside a SET_DEST. */ | |
eeb43d32 | 3479 | |
8079805d | 3480 | static rtx |
31ec4e5e | 3481 | combine_simplify_rtx (x, op0_mode, last, in_dest) |
8079805d RK |
3482 | rtx x; |
3483 | enum machine_mode op0_mode; | |
3484 | int last; | |
3485 | int in_dest; | |
3486 | { | |
3487 | enum rtx_code code = GET_CODE (x); | |
3488 | enum machine_mode mode = GET_MODE (x); | |
3489 | rtx temp; | |
3490 | int i; | |
d0ab8cd3 | 3491 | |
230d793d RS |
3492 | /* If this is a commutative operation, put a constant last and a complex |
3493 | expression first. We don't need to do this for comparisons here. */ | |
3494 | if (GET_RTX_CLASS (code) == 'c' | |
3495 | && ((CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT) | |
3496 | || (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == 'o' | |
3497 | && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o') | |
3498 | || (GET_CODE (XEXP (x, 0)) == SUBREG | |
3499 | && GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0)))) == 'o' | |
3500 | && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o'))) | |
3501 | { | |
3502 | temp = XEXP (x, 0); | |
3503 | SUBST (XEXP (x, 0), XEXP (x, 1)); | |
3504 | SUBST (XEXP (x, 1), temp); | |
3505 | } | |
3506 | ||
22609cbf RK |
3507 | /* If this is a PLUS, MINUS, or MULT, and the first operand is the |
3508 | sign extension of a PLUS with a constant, reverse the order of the sign | |
3509 | extension and the addition. Note that this not the same as the original | |
3510 | code, but overflow is undefined for signed values. Also note that the | |
3511 | PLUS will have been partially moved "inside" the sign-extension, so that | |
3512 | the first operand of X will really look like: | |
3513 | (ashiftrt (plus (ashift A C4) C5) C4). | |
3514 | We convert this to | |
3515 | (plus (ashiftrt (ashift A C4) C2) C4) | |
3516 | and replace the first operand of X with that expression. Later parts | |
3517 | of this function may simplify the expression further. | |
3518 | ||
3519 | For example, if we start with (mult (sign_extend (plus A C1)) C2), | |
3520 | we swap the SIGN_EXTEND and PLUS. Later code will apply the | |
3521 | distributive law to produce (plus (mult (sign_extend X) C1) C3). | |
3522 | ||
3523 | We do this to simplify address expressions. */ | |
3524 | ||
3525 | if ((code == PLUS || code == MINUS || code == MULT) | |
3526 | && GET_CODE (XEXP (x, 0)) == ASHIFTRT | |
3527 | && GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS | |
3528 | && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ASHIFT | |
3529 | && GET_CODE (XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1)) == CONST_INT | |
3530 | && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT | |
3531 | && XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1) == XEXP (XEXP (x, 0), 1) | |
3532 | && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT | |
3533 | && (temp = simplify_binary_operation (ASHIFTRT, mode, | |
3534 | XEXP (XEXP (XEXP (x, 0), 0), 1), | |
3535 | XEXP (XEXP (x, 0), 1))) != 0) | |
3536 | { | |
3537 | rtx new | |
3538 | = simplify_shift_const (NULL_RTX, ASHIFT, mode, | |
3539 | XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 0), | |
3540 | INTVAL (XEXP (XEXP (x, 0), 1))); | |
3541 | ||
3542 | new = simplify_shift_const (NULL_RTX, ASHIFTRT, mode, new, | |
3543 | INTVAL (XEXP (XEXP (x, 0), 1))); | |
3544 | ||
3545 | SUBST (XEXP (x, 0), gen_binary (PLUS, mode, new, temp)); | |
3546 | } | |
3547 | ||
663522cb | 3548 | /* If this is a simple operation applied to an IF_THEN_ELSE, try |
d0ab8cd3 | 3549 | applying it to the arms of the IF_THEN_ELSE. This often simplifies |
abe6e52f RK |
3550 | things. Check for cases where both arms are testing the same |
3551 | condition. | |
3552 | ||
3553 | Don't do anything if all operands are very simple. */ | |
3554 | ||
3555 | if (((GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c' | |
3556 | || GET_RTX_CLASS (code) == '<') | |
3557 | && ((GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) != 'o' | |
3558 | && ! (GET_CODE (XEXP (x, 0)) == SUBREG | |
3559 | && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0)))) | |
3560 | == 'o'))) | |
3561 | || (GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o' | |
3562 | && ! (GET_CODE (XEXP (x, 1)) == SUBREG | |
3563 | && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 1)))) | |
3564 | == 'o'))))) | |
3565 | || (GET_RTX_CLASS (code) == '1' | |
3566 | && ((GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) != 'o' | |
3567 | && ! (GET_CODE (XEXP (x, 0)) == SUBREG | |
3568 | && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0)))) | |
3569 | == 'o')))))) | |
d0ab8cd3 | 3570 | { |
abe6e52f RK |
3571 | rtx cond, true, false; |
3572 | ||
3573 | cond = if_then_else_cond (x, &true, &false); | |
0802d516 RK |
3574 | if (cond != 0 |
3575 | /* If everything is a comparison, what we have is highly unlikely | |
3576 | to be simpler, so don't use it. */ | |
3577 | && ! (GET_RTX_CLASS (code) == '<' | |
3578 | && (GET_RTX_CLASS (GET_CODE (true)) == '<' | |
3579 | || GET_RTX_CLASS (GET_CODE (false)) == '<'))) | |
abe6e52f RK |
3580 | { |
3581 | rtx cop1 = const0_rtx; | |
3582 | enum rtx_code cond_code = simplify_comparison (NE, &cond, &cop1); | |
3583 | ||
15448afc RK |
3584 | if (cond_code == NE && GET_RTX_CLASS (GET_CODE (cond)) == '<') |
3585 | return x; | |
3586 | ||
663522cb | 3587 | /* Simplify the alternative arms; this may collapse the true and |
9210df58 RK |
3588 | false arms to store-flag values. */ |
3589 | true = subst (true, pc_rtx, pc_rtx, 0, 0); | |
3590 | false = subst (false, pc_rtx, pc_rtx, 0, 0); | |
3591 | ||
085f1714 RH |
3592 | /* If true and false are not general_operands, an if_then_else |
3593 | is unlikely to be simpler. */ | |
3594 | if (general_operand (true, VOIDmode) | |
3595 | && general_operand (false, VOIDmode)) | |
3596 | { | |
3597 | /* Restarting if we generate a store-flag expression will cause | |
3598 | us to loop. Just drop through in this case. */ | |
3599 | ||
3600 | /* If the result values are STORE_FLAG_VALUE and zero, we can | |
3601 | just make the comparison operation. */ | |
3602 | if (true == const_true_rtx && false == const0_rtx) | |
3603 | x = gen_binary (cond_code, mode, cond, cop1); | |
3604 | else if (true == const0_rtx && false == const_true_rtx) | |
3605 | x = gen_binary (reverse_condition (cond_code), | |
3606 | mode, cond, cop1); | |
3607 | ||
3608 | /* Likewise, we can make the negate of a comparison operation | |
3609 | if the result values are - STORE_FLAG_VALUE and zero. */ | |
3610 | else if (GET_CODE (true) == CONST_INT | |
3611 | && INTVAL (true) == - STORE_FLAG_VALUE | |
3612 | && false == const0_rtx) | |
3613 | x = gen_unary (NEG, mode, mode, | |
3614 | gen_binary (cond_code, mode, cond, cop1)); | |
3615 | else if (GET_CODE (false) == CONST_INT | |
3616 | && INTVAL (false) == - STORE_FLAG_VALUE | |
3617 | && true == const0_rtx) | |
3618 | x = gen_unary (NEG, mode, mode, | |
663522cb | 3619 | gen_binary (reverse_condition (cond_code), |
085f1714 RH |
3620 | mode, cond, cop1)); |
3621 | else | |
3622 | return gen_rtx_IF_THEN_ELSE (mode, | |
3623 | gen_binary (cond_code, VOIDmode, | |
3624 | cond, cop1), | |
3625 | true, false); | |
5109d49f | 3626 | |
085f1714 RH |
3627 | code = GET_CODE (x); |
3628 | op0_mode = VOIDmode; | |
3629 | } | |
abe6e52f | 3630 | } |
d0ab8cd3 RK |
3631 | } |
3632 | ||
230d793d RS |
3633 | /* Try to fold this expression in case we have constants that weren't |
3634 | present before. */ | |
3635 | temp = 0; | |
3636 | switch (GET_RTX_CLASS (code)) | |
3637 | { | |
3638 | case '1': | |
3639 | temp = simplify_unary_operation (code, mode, XEXP (x, 0), op0_mode); | |
3640 | break; | |
3641 | case '<': | |
47b1e19b JH |
3642 | { |
3643 | enum machine_mode cmp_mode = GET_MODE (XEXP (x, 0)); | |
3644 | if (cmp_mode == VOIDmode) | |
3645 | cmp_mode = GET_MODE (XEXP (x, 1)); | |
3646 | temp = simplify_relational_operation (code, cmp_mode, | |
3647 | XEXP (x, 0), XEXP (x, 1)); | |
3648 | } | |
77fa0940 | 3649 | #ifdef FLOAT_STORE_FLAG_VALUE |
12530dbe RH |
3650 | if (temp != 0 && GET_MODE_CLASS (mode) == MODE_FLOAT) |
3651 | { | |
3652 | if (temp == const0_rtx) | |
3653 | temp = CONST0_RTX (mode); | |
3654 | else | |
3655 | temp = immed_real_const_1 (FLOAT_STORE_FLAG_VALUE (mode), mode); | |
3656 | } | |
77fa0940 | 3657 | #endif |
230d793d RS |
3658 | break; |
3659 | case 'c': | |
3660 | case '2': | |
3661 | temp = simplify_binary_operation (code, mode, XEXP (x, 0), XEXP (x, 1)); | |
3662 | break; | |
3663 | case 'b': | |
3664 | case '3': | |
3665 | temp = simplify_ternary_operation (code, mode, op0_mode, XEXP (x, 0), | |
3666 | XEXP (x, 1), XEXP (x, 2)); | |
3667 | break; | |
3668 | } | |
3669 | ||
3670 | if (temp) | |
d0ab8cd3 | 3671 | x = temp, code = GET_CODE (temp); |
230d793d | 3672 | |
230d793d | 3673 | /* First see if we can apply the inverse distributive law. */ |
224eeff2 RK |
3674 | if (code == PLUS || code == MINUS |
3675 | || code == AND || code == IOR || code == XOR) | |
230d793d RS |
3676 | { |
3677 | x = apply_distributive_law (x); | |
3678 | code = GET_CODE (x); | |
3679 | } | |
3680 | ||
3681 | /* If CODE is an associative operation not otherwise handled, see if we | |
3682 | can associate some operands. This can win if they are constants or | |
3683 | if they are logically related (i.e. (a & b) & a. */ | |
3684 | if ((code == PLUS || code == MINUS | |
3685 | || code == MULT || code == AND || code == IOR || code == XOR | |
3686 | || code == DIV || code == UDIV | |
3687 | || code == SMAX || code == SMIN || code == UMAX || code == UMIN) | |
3ad2180a | 3688 | && INTEGRAL_MODE_P (mode)) |
230d793d RS |
3689 | { |
3690 | if (GET_CODE (XEXP (x, 0)) == code) | |
3691 | { | |
3692 | rtx other = XEXP (XEXP (x, 0), 0); | |
3693 | rtx inner_op0 = XEXP (XEXP (x, 0), 1); | |
3694 | rtx inner_op1 = XEXP (x, 1); | |
3695 | rtx inner; | |
663522cb | 3696 | |
230d793d RS |
3697 | /* Make sure we pass the constant operand if any as the second |
3698 | one if this is a commutative operation. */ | |
3699 | if (CONSTANT_P (inner_op0) && GET_RTX_CLASS (code) == 'c') | |
3700 | { | |
3701 | rtx tem = inner_op0; | |
3702 | inner_op0 = inner_op1; | |
3703 | inner_op1 = tem; | |
3704 | } | |
3705 | inner = simplify_binary_operation (code == MINUS ? PLUS | |
3706 | : code == DIV ? MULT | |
3707 | : code == UDIV ? MULT | |
3708 | : code, | |
3709 | mode, inner_op0, inner_op1); | |
3710 | ||
3711 | /* For commutative operations, try the other pair if that one | |
3712 | didn't simplify. */ | |
3713 | if (inner == 0 && GET_RTX_CLASS (code) == 'c') | |
3714 | { | |
3715 | other = XEXP (XEXP (x, 0), 1); | |
3716 | inner = simplify_binary_operation (code, mode, | |
3717 | XEXP (XEXP (x, 0), 0), | |
3718 | XEXP (x, 1)); | |
3719 | } | |
3720 | ||
3721 | if (inner) | |
8079805d | 3722 | return gen_binary (code, mode, other, inner); |
230d793d RS |
3723 | } |
3724 | } | |
3725 | ||
3726 | /* A little bit of algebraic simplification here. */ | |
3727 | switch (code) | |
3728 | { | |
3729 | case MEM: | |
3730 | /* Ensure that our address has any ASHIFTs converted to MULT in case | |
3731 | address-recognizing predicates are called later. */ | |
3732 | temp = make_compound_operation (XEXP (x, 0), MEM); | |
3733 | SUBST (XEXP (x, 0), temp); | |
3734 | break; | |
3735 | ||
3736 | case SUBREG: | |
3737 | /* (subreg:A (mem:B X) N) becomes a modified MEM unless the SUBREG | |
3738 | is paradoxical. If we can't do that safely, then it becomes | |
3739 | something nonsensical so that this combination won't take place. */ | |
3740 | ||
3741 | if (GET_CODE (SUBREG_REG (x)) == MEM | |
3742 | && (GET_MODE_SIZE (mode) | |
3743 | <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))) | |
3744 | { | |
3745 | rtx inner = SUBREG_REG (x); | |
3746 | int endian_offset = 0; | |
3747 | /* Don't change the mode of the MEM | |
3748 | if that would change the meaning of the address. */ | |
3749 | if (MEM_VOLATILE_P (SUBREG_REG (x)) | |
3750 | || mode_dependent_address_p (XEXP (inner, 0))) | |
38a448ca | 3751 | return gen_rtx_CLOBBER (mode, const0_rtx); |
230d793d | 3752 | |
f76b9db2 ILT |
3753 | if (BYTES_BIG_ENDIAN) |
3754 | { | |
3755 | if (GET_MODE_SIZE (mode) < UNITS_PER_WORD) | |
3756 | endian_offset += UNITS_PER_WORD - GET_MODE_SIZE (mode); | |
3757 | if (GET_MODE_SIZE (GET_MODE (inner)) < UNITS_PER_WORD) | |
3758 | endian_offset -= (UNITS_PER_WORD | |
3759 | - GET_MODE_SIZE (GET_MODE (inner))); | |
3760 | } | |
230d793d RS |
3761 | /* Note if the plus_constant doesn't make a valid address |
3762 | then this combination won't be accepted. */ | |
38a448ca RH |
3763 | x = gen_rtx_MEM (mode, |
3764 | plus_constant (XEXP (inner, 0), | |
3765 | (SUBREG_WORD (x) * UNITS_PER_WORD | |
3766 | + endian_offset))); | |
c6df88cb | 3767 | MEM_COPY_ATTRIBUTES (x, inner); |
230d793d RS |
3768 | return x; |
3769 | } | |
3770 | ||
3771 | /* If we are in a SET_DEST, these other cases can't apply. */ | |
3772 | if (in_dest) | |
3773 | return x; | |
3774 | ||
3775 | /* Changing mode twice with SUBREG => just change it once, | |
3776 | or not at all if changing back to starting mode. */ | |
3777 | if (GET_CODE (SUBREG_REG (x)) == SUBREG) | |
3778 | { | |
3779 | if (mode == GET_MODE (SUBREG_REG (SUBREG_REG (x))) | |
3780 | && SUBREG_WORD (x) == 0 && SUBREG_WORD (SUBREG_REG (x)) == 0) | |
3781 | return SUBREG_REG (SUBREG_REG (x)); | |
3782 | ||
3783 | SUBST_INT (SUBREG_WORD (x), | |
3784 | SUBREG_WORD (x) + SUBREG_WORD (SUBREG_REG (x))); | |
3785 | SUBST (SUBREG_REG (x), SUBREG_REG (SUBREG_REG (x))); | |
3786 | } | |
3787 | ||
3788 | /* SUBREG of a hard register => just change the register number | |
3789 | and/or mode. If the hard register is not valid in that mode, | |
26ecfc76 RK |
3790 | suppress this combination. If the hard register is the stack, |
3791 | frame, or argument pointer, leave this as a SUBREG. */ | |
230d793d RS |
3792 | |
3793 | if (GET_CODE (SUBREG_REG (x)) == REG | |
26ecfc76 RK |
3794 | && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER |
3795 | && REGNO (SUBREG_REG (x)) != FRAME_POINTER_REGNUM | |
6d7096b0 DE |
3796 | #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM |
3797 | && REGNO (SUBREG_REG (x)) != HARD_FRAME_POINTER_REGNUM | |
3798 | #endif | |
26ecfc76 RK |
3799 | #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM |
3800 | && REGNO (SUBREG_REG (x)) != ARG_POINTER_REGNUM | |
3801 | #endif | |
3802 | && REGNO (SUBREG_REG (x)) != STACK_POINTER_REGNUM) | |
230d793d RS |
3803 | { |
3804 | if (HARD_REGNO_MODE_OK (REGNO (SUBREG_REG (x)) + SUBREG_WORD (x), | |
3805 | mode)) | |
38a448ca RH |
3806 | return gen_rtx_REG (mode, |
3807 | REGNO (SUBREG_REG (x)) + SUBREG_WORD (x)); | |
230d793d | 3808 | else |
38a448ca | 3809 | return gen_rtx_CLOBBER (mode, const0_rtx); |
230d793d RS |
3810 | } |
3811 | ||
3812 | /* For a constant, try to pick up the part we want. Handle a full | |
a4bde0b1 RK |
3813 | word and low-order part. Only do this if we are narrowing |
3814 | the constant; if it is being widened, we have no idea what | |
3815 | the extra bits will have been set to. */ | |
230d793d RS |
3816 | |
3817 | if (CONSTANT_P (SUBREG_REG (x)) && op0_mode != VOIDmode | |
3818 | && GET_MODE_SIZE (mode) == UNITS_PER_WORD | |
3c99d5ff | 3819 | && GET_MODE_SIZE (op0_mode) > UNITS_PER_WORD |
230d793d RS |
3820 | && GET_MODE_CLASS (mode) == MODE_INT) |
3821 | { | |
3822 | temp = operand_subword (SUBREG_REG (x), SUBREG_WORD (x), | |
5f4f0e22 | 3823 | 0, op0_mode); |
230d793d RS |
3824 | if (temp) |
3825 | return temp; | |
3826 | } | |
663522cb | 3827 | |
19808e22 RS |
3828 | /* If we want a subreg of a constant, at offset 0, |
3829 | take the low bits. On a little-endian machine, that's | |
3830 | always valid. On a big-endian machine, it's valid | |
3c99d5ff | 3831 | only if the constant's mode fits in one word. Note that we |
61b1bece | 3832 | cannot use subreg_lowpart_p since SUBREG_REG may be VOIDmode. */ |
3c99d5ff RK |
3833 | if (CONSTANT_P (SUBREG_REG (x)) |
3834 | && ((GET_MODE_SIZE (op0_mode) <= UNITS_PER_WORD | |
3835 | || ! WORDS_BIG_ENDIAN) | |
3836 | ? SUBREG_WORD (x) == 0 | |
3837 | : (SUBREG_WORD (x) | |
3838 | == ((GET_MODE_SIZE (op0_mode) | |
3839 | - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD)) | |
3840 | / UNITS_PER_WORD))) | |
f82da7d2 | 3841 | && GET_MODE_SIZE (mode) <= GET_MODE_SIZE (op0_mode) |
f76b9db2 ILT |
3842 | && (! WORDS_BIG_ENDIAN |
3843 | || GET_MODE_BITSIZE (op0_mode) <= BITS_PER_WORD)) | |
230d793d RS |
3844 | return gen_lowpart_for_combine (mode, SUBREG_REG (x)); |
3845 | ||
b65c1b5b RK |
3846 | /* A paradoxical SUBREG of a VOIDmode constant is the same constant, |
3847 | since we are saying that the high bits don't matter. */ | |
3848 | if (CONSTANT_P (SUBREG_REG (x)) && GET_MODE (SUBREG_REG (x)) == VOIDmode | |
3849 | && GET_MODE_SIZE (mode) > GET_MODE_SIZE (op0_mode)) | |
54f3b5c2 R |
3850 | { |
3851 | if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) > UNITS_PER_WORD | |
3852 | && (WORDS_BIG_ENDIAN || SUBREG_WORD (x) != 0)) | |
3853 | return operand_subword (SUBREG_REG (x), SUBREG_WORD (x), 0, mode); | |
3854 | return SUBREG_REG (x); | |
3855 | } | |
b65c1b5b | 3856 | |
87e3e0c1 RK |
3857 | /* Note that we cannot do any narrowing for non-constants since |
3858 | we might have been counting on using the fact that some bits were | |
3859 | zero. We now do this in the SET. */ | |
3860 | ||
230d793d RS |
3861 | break; |
3862 | ||
3863 | case NOT: | |
3864 | /* (not (plus X -1)) can become (neg X). */ | |
3865 | if (GET_CODE (XEXP (x, 0)) == PLUS | |
3866 | && XEXP (XEXP (x, 0), 1) == constm1_rtx) | |
8079805d | 3867 | return gen_rtx_combine (NEG, mode, XEXP (XEXP (x, 0), 0)); |
230d793d RS |
3868 | |
3869 | /* Similarly, (not (neg X)) is (plus X -1). */ | |
3870 | if (GET_CODE (XEXP (x, 0)) == NEG) | |
8079805d RK |
3871 | return gen_rtx_combine (PLUS, mode, XEXP (XEXP (x, 0), 0), |
3872 | constm1_rtx); | |
230d793d | 3873 | |
663522cb | 3874 | /* (not (xor X C)) for C constant is (xor X D) with D = ~C. */ |
d0ab8cd3 RK |
3875 | if (GET_CODE (XEXP (x, 0)) == XOR |
3876 | && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT | |
3877 | && (temp = simplify_unary_operation (NOT, mode, | |
3878 | XEXP (XEXP (x, 0), 1), | |
3879 | mode)) != 0) | |
787745f5 | 3880 | return gen_binary (XOR, mode, XEXP (XEXP (x, 0), 0), temp); |
663522cb | 3881 | |
230d793d RS |
3882 | /* (not (ashift 1 X)) is (rotate ~1 X). We used to do this for operands |
3883 | other than 1, but that is not valid. We could do a similar | |
3884 | simplification for (not (lshiftrt C X)) where C is just the sign bit, | |
3885 | but this doesn't seem common enough to bother with. */ | |
3886 | if (GET_CODE (XEXP (x, 0)) == ASHIFT | |
3887 | && XEXP (XEXP (x, 0), 0) == const1_rtx) | |
38a448ca RH |
3888 | return gen_rtx_ROTATE (mode, gen_unary (NOT, mode, mode, const1_rtx), |
3889 | XEXP (XEXP (x, 0), 1)); | |
663522cb | 3890 | |
230d793d RS |
3891 | if (GET_CODE (XEXP (x, 0)) == SUBREG |
3892 | && subreg_lowpart_p (XEXP (x, 0)) | |
3893 | && (GET_MODE_SIZE (GET_MODE (XEXP (x, 0))) | |
3894 | < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (x, 0))))) | |
3895 | && GET_CODE (SUBREG_REG (XEXP (x, 0))) == ASHIFT | |
3896 | && XEXP (SUBREG_REG (XEXP (x, 0)), 0) == const1_rtx) | |
3897 | { | |
3898 | enum machine_mode inner_mode = GET_MODE (SUBREG_REG (XEXP (x, 0))); | |
3899 | ||
38a448ca RH |
3900 | x = gen_rtx_ROTATE (inner_mode, |
3901 | gen_unary (NOT, inner_mode, inner_mode, | |
3902 | const1_rtx), | |
3903 | XEXP (SUBREG_REG (XEXP (x, 0)), 1)); | |
8079805d | 3904 | return gen_lowpart_for_combine (mode, x); |
230d793d | 3905 | } |
663522cb | 3906 | |
0802d516 RK |
3907 | /* If STORE_FLAG_VALUE is -1, (not (comparison foo bar)) can be done by |
3908 | reversing the comparison code if valid. */ | |
3909 | if (STORE_FLAG_VALUE == -1 | |
3910 | && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<' | |
230d793d RS |
3911 | && reversible_comparison_p (XEXP (x, 0))) |
3912 | return gen_rtx_combine (reverse_condition (GET_CODE (XEXP (x, 0))), | |
3913 | mode, XEXP (XEXP (x, 0), 0), | |
3914 | XEXP (XEXP (x, 0), 1)); | |
500c518b RK |
3915 | |
3916 | /* (ashiftrt foo C) where C is the number of bits in FOO minus 1 | |
0802d516 RK |
3917 | is (lt foo (const_int 0)) if STORE_FLAG_VALUE is -1, so we can |
3918 | perform the above simplification. */ | |
500c518b | 3919 | |
0802d516 | 3920 | if (STORE_FLAG_VALUE == -1 |
500c518b | 3921 | && GET_CODE (XEXP (x, 0)) == ASHIFTRT |
37ac53d9 | 3922 | && XEXP (x, 1) == const1_rtx |
500c518b RK |
3923 | && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT |
3924 | && INTVAL (XEXP (XEXP (x, 0), 1)) == GET_MODE_BITSIZE (mode) - 1) | |
3925 | return gen_rtx_combine (GE, mode, XEXP (XEXP (x, 0), 0), const0_rtx); | |
230d793d RS |
3926 | |
3927 | /* Apply De Morgan's laws to reduce number of patterns for machines | |
3928 | with negating logical insns (and-not, nand, etc.). If result has | |
3929 | only one NOT, put it first, since that is how the patterns are | |
3930 | coded. */ | |
3931 | ||
3932 | if (GET_CODE (XEXP (x, 0)) == IOR || GET_CODE (XEXP (x, 0)) == AND) | |
3933 | { | |
663522cb | 3934 | rtx in1 = XEXP (XEXP (x, 0), 0), in2 = XEXP (XEXP (x, 0), 1); |
230d793d | 3935 | |
663522cb KH |
3936 | if (GET_CODE (in1) == NOT) |
3937 | in1 = XEXP (in1, 0); | |
3938 | else | |
3939 | in1 = gen_rtx_combine (NOT, GET_MODE (in1), in1); | |
230d793d | 3940 | |
663522cb KH |
3941 | if (GET_CODE (in2) == NOT) |
3942 | in2 = XEXP (in2, 0); | |
3943 | else if (GET_CODE (in2) == CONST_INT | |
3944 | && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT) | |
3945 | in2 = GEN_INT (GET_MODE_MASK (mode) & ~INTVAL (in2)); | |
3946 | else | |
3947 | in2 = gen_rtx_combine (NOT, GET_MODE (in2), in2); | |
3948 | ||
3949 | if (GET_CODE (in2) == NOT) | |
3950 | { | |
3951 | rtx tem = in2; | |
3952 | in2 = in1; in1 = tem; | |
3953 | } | |
3954 | ||
3955 | return gen_rtx_combine (GET_CODE (XEXP (x, 0)) == IOR ? AND : IOR, | |
3956 | mode, in1, in2); | |
3957 | } | |
230d793d RS |
3958 | break; |
3959 | ||
3960 | case NEG: | |
3961 | /* (neg (plus X 1)) can become (not X). */ | |
3962 | if (GET_CODE (XEXP (x, 0)) == PLUS | |
3963 | && XEXP (XEXP (x, 0), 1) == const1_rtx) | |
8079805d | 3964 | return gen_rtx_combine (NOT, mode, XEXP (XEXP (x, 0), 0)); |
230d793d RS |
3965 | |
3966 | /* Similarly, (neg (not X)) is (plus X 1). */ | |
3967 | if (GET_CODE (XEXP (x, 0)) == NOT) | |
8079805d | 3968 | return plus_constant (XEXP (XEXP (x, 0), 0), 1); |
230d793d | 3969 | |
230d793d RS |
3970 | /* (neg (minus X Y)) can become (minus Y X). */ |
3971 | if (GET_CODE (XEXP (x, 0)) == MINUS | |
3ad2180a | 3972 | && (! FLOAT_MODE_P (mode) |
0f41302f | 3973 | /* x-y != -(y-x) with IEEE floating point. */ |
7e2a0d8e RK |
3974 | || TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT |
3975 | || flag_fast_math)) | |
8079805d RK |
3976 | return gen_binary (MINUS, mode, XEXP (XEXP (x, 0), 1), |
3977 | XEXP (XEXP (x, 0), 0)); | |
230d793d | 3978 | |
0f41302f | 3979 | /* (neg (xor A 1)) is (plus A -1) if A is known to be either 0 or 1. */ |
d0ab8cd3 | 3980 | if (GET_CODE (XEXP (x, 0)) == XOR && XEXP (XEXP (x, 0), 1) == const1_rtx |
951553af | 3981 | && nonzero_bits (XEXP (XEXP (x, 0), 0), mode) == 1) |
8079805d | 3982 | return gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0), constm1_rtx); |
d0ab8cd3 | 3983 | |
230d793d RS |
3984 | /* NEG commutes with ASHIFT since it is multiplication. Only do this |
3985 | if we can then eliminate the NEG (e.g., | |
3986 | if the operand is a constant). */ | |
3987 | ||
3988 | if (GET_CODE (XEXP (x, 0)) == ASHIFT) | |
3989 | { | |
3990 | temp = simplify_unary_operation (NEG, mode, | |
3991 | XEXP (XEXP (x, 0), 0), mode); | |
3992 | if (temp) | |
3993 | { | |
3994 | SUBST (XEXP (XEXP (x, 0), 0), temp); | |
3995 | return XEXP (x, 0); | |
3996 | } | |
3997 | } | |
3998 | ||
3999 | temp = expand_compound_operation (XEXP (x, 0)); | |
4000 | ||
4001 | /* For C equal to the width of MODE minus 1, (neg (ashiftrt X C)) can be | |
4002 | replaced by (lshiftrt X C). This will convert | |
4003 | (neg (sign_extract X 1 Y)) to (zero_extract X 1 Y). */ | |
4004 | ||
4005 | if (GET_CODE (temp) == ASHIFTRT | |
4006 | && GET_CODE (XEXP (temp, 1)) == CONST_INT | |
4007 | && INTVAL (XEXP (temp, 1)) == GET_MODE_BITSIZE (mode) - 1) | |
8079805d RK |
4008 | return simplify_shift_const (temp, LSHIFTRT, mode, XEXP (temp, 0), |
4009 | INTVAL (XEXP (temp, 1))); | |
230d793d | 4010 | |
951553af | 4011 | /* If X has only a single bit that might be nonzero, say, bit I, convert |
230d793d RS |
4012 | (neg X) to (ashiftrt (ashift X C-I) C-I) where C is the bitsize of |
4013 | MODE minus 1. This will convert (neg (zero_extract X 1 Y)) to | |
4014 | (sign_extract X 1 Y). But only do this if TEMP isn't a register | |
4015 | or a SUBREG of one since we'd be making the expression more | |
4016 | complex if it was just a register. */ | |
4017 | ||
4018 | if (GET_CODE (temp) != REG | |
4019 | && ! (GET_CODE (temp) == SUBREG | |
4020 | && GET_CODE (SUBREG_REG (temp)) == REG) | |
951553af | 4021 | && (i = exact_log2 (nonzero_bits (temp, mode))) >= 0) |
230d793d RS |
4022 | { |
4023 | rtx temp1 = simplify_shift_const | |
5f4f0e22 CH |
4024 | (NULL_RTX, ASHIFTRT, mode, |
4025 | simplify_shift_const (NULL_RTX, ASHIFT, mode, temp, | |
230d793d RS |
4026 | GET_MODE_BITSIZE (mode) - 1 - i), |
4027 | GET_MODE_BITSIZE (mode) - 1 - i); | |
4028 | ||
4029 | /* If all we did was surround TEMP with the two shifts, we | |
4030 | haven't improved anything, so don't use it. Otherwise, | |
4031 | we are better off with TEMP1. */ | |
4032 | if (GET_CODE (temp1) != ASHIFTRT | |
4033 | || GET_CODE (XEXP (temp1, 0)) != ASHIFT | |
4034 | || XEXP (XEXP (temp1, 0), 0) != temp) | |
8079805d | 4035 | return temp1; |
230d793d RS |
4036 | } |
4037 | break; | |
4038 | ||
2ca9ae17 | 4039 | case TRUNCATE: |
e30fb98f JL |
4040 | /* We can't handle truncation to a partial integer mode here |
4041 | because we don't know the real bitsize of the partial | |
4042 | integer mode. */ | |
4043 | if (GET_MODE_CLASS (mode) == MODE_PARTIAL_INT) | |
4044 | break; | |
4045 | ||
80608e27 JL |
4046 | if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT |
4047 | && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode), | |
4048 | GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))) | |
2ca9ae17 JW |
4049 | SUBST (XEXP (x, 0), |
4050 | force_to_mode (XEXP (x, 0), GET_MODE (XEXP (x, 0)), | |
4051 | GET_MODE_MASK (mode), NULL_RTX, 0)); | |
0f13a422 ILT |
4052 | |
4053 | /* (truncate:SI ({sign,zero}_extend:DI foo:SI)) == foo:SI. */ | |
4054 | if ((GET_CODE (XEXP (x, 0)) == SIGN_EXTEND | |
4055 | || GET_CODE (XEXP (x, 0)) == ZERO_EXTEND) | |
4056 | && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode) | |
4057 | return XEXP (XEXP (x, 0), 0); | |
4058 | ||
4059 | /* (truncate:SI (OP:DI ({sign,zero}_extend:DI foo:SI))) is | |
4060 | (OP:SI foo:SI) if OP is NEG or ABS. */ | |
4061 | if ((GET_CODE (XEXP (x, 0)) == ABS | |
4062 | || GET_CODE (XEXP (x, 0)) == NEG) | |
4063 | && (GET_CODE (XEXP (XEXP (x, 0), 0)) == SIGN_EXTEND | |
4064 | || GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND) | |
4065 | && GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == mode) | |
4066 | return gen_unary (GET_CODE (XEXP (x, 0)), mode, mode, | |
4067 | XEXP (XEXP (XEXP (x, 0), 0), 0)); | |
4068 | ||
4069 | /* (truncate:SI (subreg:DI (truncate:SI X) 0)) is | |
4070 | (truncate:SI x). */ | |
4071 | if (GET_CODE (XEXP (x, 0)) == SUBREG | |
4072 | && GET_CODE (SUBREG_REG (XEXP (x, 0))) == TRUNCATE | |
4073 | && subreg_lowpart_p (XEXP (x, 0))) | |
4074 | return SUBREG_REG (XEXP (x, 0)); | |
4075 | ||
4076 | /* If we know that the value is already truncated, we can | |
14a774a9 RK |
4077 | replace the TRUNCATE with a SUBREG if TRULY_NOOP_TRUNCATION |
4078 | is nonzero for the corresponding modes. But don't do this | |
4079 | for an (LSHIFTRT (MULT ...)) since this will cause problems | |
4080 | with the umulXi3_highpart patterns. */ | |
6a992214 JL |
4081 | if (TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode), |
4082 | GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))) | |
4083 | && num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0))) | |
14a774a9 RK |
4084 | >= GET_MODE_BITSIZE (mode) + 1 |
4085 | && ! (GET_CODE (XEXP (x, 0)) == LSHIFTRT | |
4086 | && GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT)) | |
0f13a422 ILT |
4087 | return gen_lowpart_for_combine (mode, XEXP (x, 0)); |
4088 | ||
4089 | /* A truncate of a comparison can be replaced with a subreg if | |
4090 | STORE_FLAG_VALUE permits. This is like the previous test, | |
4091 | but it works even if the comparison is done in a mode larger | |
4092 | than HOST_BITS_PER_WIDE_INT. */ | |
4093 | if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT | |
4094 | && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<' | |
663522cb | 4095 | && ((HOST_WIDE_INT) STORE_FLAG_VALUE & ~GET_MODE_MASK (mode)) == 0) |
0f13a422 ILT |
4096 | return gen_lowpart_for_combine (mode, XEXP (x, 0)); |
4097 | ||
4098 | /* Similarly, a truncate of a register whose value is a | |
4099 | comparison can be replaced with a subreg if STORE_FLAG_VALUE | |
4100 | permits. */ | |
4101 | if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT | |
663522cb | 4102 | && ((HOST_WIDE_INT) STORE_FLAG_VALUE & ~GET_MODE_MASK (mode)) == 0 |
0f13a422 ILT |
4103 | && (temp = get_last_value (XEXP (x, 0))) |
4104 | && GET_RTX_CLASS (GET_CODE (temp)) == '<') | |
4105 | return gen_lowpart_for_combine (mode, XEXP (x, 0)); | |
4106 | ||
2ca9ae17 JW |
4107 | break; |
4108 | ||
230d793d RS |
4109 | case FLOAT_TRUNCATE: |
4110 | /* (float_truncate:SF (float_extend:DF foo:SF)) = foo:SF. */ | |
4111 | if (GET_CODE (XEXP (x, 0)) == FLOAT_EXTEND | |
4112 | && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode) | |
663522cb | 4113 | return XEXP (XEXP (x, 0), 0); |
4635f748 RK |
4114 | |
4115 | /* (float_truncate:SF (OP:DF (float_extend:DF foo:sf))) is | |
4116 | (OP:SF foo:SF) if OP is NEG or ABS. */ | |
4117 | if ((GET_CODE (XEXP (x, 0)) == ABS | |
4118 | || GET_CODE (XEXP (x, 0)) == NEG) | |
4119 | && GET_CODE (XEXP (XEXP (x, 0), 0)) == FLOAT_EXTEND | |
4120 | && GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == mode) | |
0c1c8ea6 RK |
4121 | return gen_unary (GET_CODE (XEXP (x, 0)), mode, mode, |
4122 | XEXP (XEXP (XEXP (x, 0), 0), 0)); | |
1d12df72 RK |
4123 | |
4124 | /* (float_truncate:SF (subreg:DF (float_truncate:SF X) 0)) | |
4125 | is (float_truncate:SF x). */ | |
4126 | if (GET_CODE (XEXP (x, 0)) == SUBREG | |
4127 | && subreg_lowpart_p (XEXP (x, 0)) | |
4128 | && GET_CODE (SUBREG_REG (XEXP (x, 0))) == FLOAT_TRUNCATE) | |
4129 | return SUBREG_REG (XEXP (x, 0)); | |
663522cb | 4130 | break; |
230d793d RS |
4131 | |
4132 | #ifdef HAVE_cc0 | |
4133 | case COMPARE: | |
4134 | /* Convert (compare FOO (const_int 0)) to FOO unless we aren't | |
4135 | using cc0, in which case we want to leave it as a COMPARE | |
4136 | so we can distinguish it from a register-register-copy. */ | |
4137 | if (XEXP (x, 1) == const0_rtx) | |
4138 | return XEXP (x, 0); | |
4139 | ||
4140 | /* In IEEE floating point, x-0 is not the same as x. */ | |
4141 | if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT | |
7e2a0d8e RK |
4142 | || ! FLOAT_MODE_P (GET_MODE (XEXP (x, 0))) |
4143 | || flag_fast_math) | |
230d793d RS |
4144 | && XEXP (x, 1) == CONST0_RTX (GET_MODE (XEXP (x, 0)))) |
4145 | return XEXP (x, 0); | |
4146 | break; | |
4147 | #endif | |
4148 | ||
4149 | case CONST: | |
4150 | /* (const (const X)) can become (const X). Do it this way rather than | |
4151 | returning the inner CONST since CONST can be shared with a | |
4152 | REG_EQUAL note. */ | |
4153 | if (GET_CODE (XEXP (x, 0)) == CONST) | |
4154 | SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0)); | |
4155 | break; | |
4156 | ||
4157 | #ifdef HAVE_lo_sum | |
4158 | case LO_SUM: | |
4159 | /* Convert (lo_sum (high FOO) FOO) to FOO. This is necessary so we | |
4160 | can add in an offset. find_split_point will split this address up | |
4161 | again if it doesn't match. */ | |
4162 | if (GET_CODE (XEXP (x, 0)) == HIGH | |
4163 | && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1))) | |
4164 | return XEXP (x, 1); | |
4165 | break; | |
4166 | #endif | |
4167 | ||
4168 | case PLUS: | |
4169 | /* If we have (plus (plus (A const) B)), associate it so that CONST is | |
4170 | outermost. That's because that's the way indexed addresses are | |
4171 | supposed to appear. This code used to check many more cases, but | |
4172 | they are now checked elsewhere. */ | |
4173 | if (GET_CODE (XEXP (x, 0)) == PLUS | |
4174 | && CONSTANT_ADDRESS_P (XEXP (XEXP (x, 0), 1))) | |
4175 | return gen_binary (PLUS, mode, | |
4176 | gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0), | |
4177 | XEXP (x, 1)), | |
4178 | XEXP (XEXP (x, 0), 1)); | |
4179 | ||
4180 | /* (plus (xor (and <foo> (const_int pow2 - 1)) <c>) <-c>) | |
4181 | when c is (const_int (pow2 + 1) / 2) is a sign extension of a | |
4182 | bit-field and can be replaced by either a sign_extend or a | |
e6380233 JL |
4183 | sign_extract. The `and' may be a zero_extend and the two |
4184 | <c>, -<c> constants may be reversed. */ | |
230d793d RS |
4185 | if (GET_CODE (XEXP (x, 0)) == XOR |
4186 | && GET_CODE (XEXP (x, 1)) == CONST_INT | |
4187 | && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT | |
663522cb | 4188 | && INTVAL (XEXP (x, 1)) == -INTVAL (XEXP (XEXP (x, 0), 1)) |
e6380233 JL |
4189 | && ((i = exact_log2 (INTVAL (XEXP (XEXP (x, 0), 1)))) >= 0 |
4190 | || (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0) | |
5f4f0e22 | 4191 | && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT |
230d793d RS |
4192 | && ((GET_CODE (XEXP (XEXP (x, 0), 0)) == AND |
4193 | && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT | |
4194 | && (INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1)) | |
5f4f0e22 | 4195 | == ((HOST_WIDE_INT) 1 << (i + 1)) - 1)) |
230d793d RS |
4196 | || (GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND |
4197 | && (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0))) | |
770ae6cc | 4198 | == (unsigned int) i + 1)))) |
8079805d RK |
4199 | return simplify_shift_const |
4200 | (NULL_RTX, ASHIFTRT, mode, | |
4201 | simplify_shift_const (NULL_RTX, ASHIFT, mode, | |
4202 | XEXP (XEXP (XEXP (x, 0), 0), 0), | |
4203 | GET_MODE_BITSIZE (mode) - (i + 1)), | |
4204 | GET_MODE_BITSIZE (mode) - (i + 1)); | |
230d793d | 4205 | |
bc0776c6 RK |
4206 | /* (plus (comparison A B) C) can become (neg (rev-comp A B)) if |
4207 | C is 1 and STORE_FLAG_VALUE is -1 or if C is -1 and STORE_FLAG_VALUE | |
4208 | is 1. This produces better code than the alternative immediately | |
4209 | below. */ | |
4210 | if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<' | |
4211 | && reversible_comparison_p (XEXP (x, 0)) | |
4212 | && ((STORE_FLAG_VALUE == -1 && XEXP (x, 1) == const1_rtx) | |
4213 | || (STORE_FLAG_VALUE == 1 && XEXP (x, 1) == constm1_rtx))) | |
8079805d | 4214 | return |
0c1c8ea6 | 4215 | gen_unary (NEG, mode, mode, |
8079805d RK |
4216 | gen_binary (reverse_condition (GET_CODE (XEXP (x, 0))), |
4217 | mode, XEXP (XEXP (x, 0), 0), | |
4218 | XEXP (XEXP (x, 0), 1))); | |
bc0776c6 RK |
4219 | |
4220 | /* If only the low-order bit of X is possibly nonzero, (plus x -1) | |
230d793d RS |
4221 | can become (ashiftrt (ashift (xor x 1) C) C) where C is |
4222 | the bitsize of the mode - 1. This allows simplification of | |
4223 | "a = (b & 8) == 0;" */ | |
4224 | if (XEXP (x, 1) == constm1_rtx | |
4225 | && GET_CODE (XEXP (x, 0)) != REG | |
4226 | && ! (GET_CODE (XEXP (x,0)) == SUBREG | |
4227 | && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG) | |
951553af | 4228 | && nonzero_bits (XEXP (x, 0), mode) == 1) |
8079805d RK |
4229 | return simplify_shift_const (NULL_RTX, ASHIFTRT, mode, |
4230 | simplify_shift_const (NULL_RTX, ASHIFT, mode, | |
4231 | gen_rtx_combine (XOR, mode, | |
4232 | XEXP (x, 0), const1_rtx), | |
4233 | GET_MODE_BITSIZE (mode) - 1), | |
4234 | GET_MODE_BITSIZE (mode) - 1); | |
02f4ada4 RK |
4235 | |
4236 | /* If we are adding two things that have no bits in common, convert | |
4237 | the addition into an IOR. This will often be further simplified, | |
4238 | for example in cases like ((a & 1) + (a & 2)), which can | |
4239 | become a & 3. */ | |
4240 | ||
ac49a949 | 4241 | if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT |
951553af RK |
4242 | && (nonzero_bits (XEXP (x, 0), mode) |
4243 | & nonzero_bits (XEXP (x, 1), mode)) == 0) | |
085f1714 RH |
4244 | { |
4245 | /* Try to simplify the expression further. */ | |
4246 | rtx tor = gen_binary (IOR, mode, XEXP (x, 0), XEXP (x, 1)); | |
4247 | temp = combine_simplify_rtx (tor, mode, last, in_dest); | |
4248 | ||
4249 | /* If we could, great. If not, do not go ahead with the IOR | |
4250 | replacement, since PLUS appears in many special purpose | |
4251 | address arithmetic instructions. */ | |
4252 | if (GET_CODE (temp) != CLOBBER && temp != tor) | |
4253 | return temp; | |
4254 | } | |
230d793d RS |
4255 | break; |
4256 | ||
4257 | case MINUS: | |
0802d516 RK |
4258 | /* If STORE_FLAG_VALUE is 1, (minus 1 (comparison foo bar)) can be done |
4259 | by reversing the comparison code if valid. */ | |
4260 | if (STORE_FLAG_VALUE == 1 | |
4261 | && XEXP (x, 0) == const1_rtx | |
5109d49f RK |
4262 | && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<' |
4263 | && reversible_comparison_p (XEXP (x, 1))) | |
663522cb KH |
4264 | return gen_binary (reverse_condition (GET_CODE (XEXP (x, 1))), mode, |
4265 | XEXP (XEXP (x, 1), 0), | |
4266 | XEXP (XEXP (x, 1), 1)); | |
5109d49f | 4267 | |
230d793d RS |
4268 | /* (minus <foo> (and <foo> (const_int -pow2))) becomes |
4269 | (and <foo> (const_int pow2-1)) */ | |
4270 | if (GET_CODE (XEXP (x, 1)) == AND | |
4271 | && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT | |
663522cb | 4272 | && exact_log2 (-INTVAL (XEXP (XEXP (x, 1), 1))) >= 0 |
230d793d | 4273 | && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0))) |
8079805d | 4274 | return simplify_and_const_int (NULL_RTX, mode, XEXP (x, 0), |
663522cb | 4275 | -INTVAL (XEXP (XEXP (x, 1), 1)) - 1); |
7bef8680 RK |
4276 | |
4277 | /* Canonicalize (minus A (plus B C)) to (minus (minus A B) C) for | |
4278 | integers. */ | |
4279 | if (GET_CODE (XEXP (x, 1)) == PLUS && INTEGRAL_MODE_P (mode)) | |
8079805d RK |
4280 | return gen_binary (MINUS, mode, |
4281 | gen_binary (MINUS, mode, XEXP (x, 0), | |
4282 | XEXP (XEXP (x, 1), 0)), | |
4283 | XEXP (XEXP (x, 1), 1)); | |
230d793d RS |
4284 | break; |
4285 | ||
4286 | case MULT: | |
4287 | /* If we have (mult (plus A B) C), apply the distributive law and then | |
4288 | the inverse distributive law to see if things simplify. This | |
4289 | occurs mostly in addresses, often when unrolling loops. */ | |
4290 | ||
4291 | if (GET_CODE (XEXP (x, 0)) == PLUS) | |
4292 | { | |
4293 | x = apply_distributive_law | |
4294 | (gen_binary (PLUS, mode, | |
4295 | gen_binary (MULT, mode, | |
4296 | XEXP (XEXP (x, 0), 0), XEXP (x, 1)), | |
4297 | gen_binary (MULT, mode, | |
3749f4ca BS |
4298 | XEXP (XEXP (x, 0), 1), |
4299 | copy_rtx (XEXP (x, 1))))); | |
230d793d RS |
4300 | |
4301 | if (GET_CODE (x) != MULT) | |
8079805d | 4302 | return x; |
230d793d | 4303 | } |
230d793d RS |
4304 | break; |
4305 | ||
4306 | case UDIV: | |
4307 | /* If this is a divide by a power of two, treat it as a shift if | |
4308 | its first operand is a shift. */ | |
4309 | if (GET_CODE (XEXP (x, 1)) == CONST_INT | |
4310 | && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0 | |
4311 | && (GET_CODE (XEXP (x, 0)) == ASHIFT | |
4312 | || GET_CODE (XEXP (x, 0)) == LSHIFTRT | |
4313 | || GET_CODE (XEXP (x, 0)) == ASHIFTRT | |
4314 | || GET_CODE (XEXP (x, 0)) == ROTATE | |
4315 | || GET_CODE (XEXP (x, 0)) == ROTATERT)) | |
8079805d | 4316 | return simplify_shift_const (NULL_RTX, LSHIFTRT, mode, XEXP (x, 0), i); |
230d793d RS |
4317 | break; |
4318 | ||
4319 | case EQ: case NE: | |
4320 | case GT: case GTU: case GE: case GEU: | |
4321 | case LT: case LTU: case LE: case LEU: | |
4322 | /* If the first operand is a condition code, we can't do anything | |
4323 | with it. */ | |
4324 | if (GET_CODE (XEXP (x, 0)) == COMPARE | |
4325 | || (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) != MODE_CC | |
4326 | #ifdef HAVE_cc0 | |
4327 | && XEXP (x, 0) != cc0_rtx | |
4328 | #endif | |
663522cb | 4329 | )) |
230d793d RS |
4330 | { |
4331 | rtx op0 = XEXP (x, 0); | |
4332 | rtx op1 = XEXP (x, 1); | |
4333 | enum rtx_code new_code; | |
4334 | ||
4335 | if (GET_CODE (op0) == COMPARE) | |
4336 | op1 = XEXP (op0, 1), op0 = XEXP (op0, 0); | |
4337 | ||
4338 | /* Simplify our comparison, if possible. */ | |
4339 | new_code = simplify_comparison (code, &op0, &op1); | |
4340 | ||
230d793d | 4341 | /* If STORE_FLAG_VALUE is 1, we can convert (ne x 0) to simply X |
951553af | 4342 | if only the low-order bit is possibly nonzero in X (such as when |
5109d49f RK |
4343 | X is a ZERO_EXTRACT of one bit). Similarly, we can convert EQ to |
4344 | (xor X 1) or (minus 1 X); we use the former. Finally, if X is | |
4345 | known to be either 0 or -1, NE becomes a NEG and EQ becomes | |
4346 | (plus X 1). | |
4347 | ||
4348 | Remove any ZERO_EXTRACT we made when thinking this was a | |
4349 | comparison. It may now be simpler to use, e.g., an AND. If a | |
4350 | ZERO_EXTRACT is indeed appropriate, it will be placed back by | |
4351 | the call to make_compound_operation in the SET case. */ | |
4352 | ||
0802d516 RK |
4353 | if (STORE_FLAG_VALUE == 1 |
4354 | && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT | |
4355 | && op1 == const0_rtx && nonzero_bits (op0, mode) == 1) | |
818b11b9 RK |
4356 | return gen_lowpart_for_combine (mode, |
4357 | expand_compound_operation (op0)); | |
5109d49f | 4358 | |
0802d516 RK |
4359 | else if (STORE_FLAG_VALUE == 1 |
4360 | && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT | |
5109d49f RK |
4361 | && op1 == const0_rtx |
4362 | && (num_sign_bit_copies (op0, mode) | |
4363 | == GET_MODE_BITSIZE (mode))) | |
4364 | { | |
4365 | op0 = expand_compound_operation (op0); | |
0c1c8ea6 | 4366 | return gen_unary (NEG, mode, mode, |
8079805d | 4367 | gen_lowpart_for_combine (mode, op0)); |
5109d49f RK |
4368 | } |
4369 | ||
0802d516 RK |
4370 | else if (STORE_FLAG_VALUE == 1 |
4371 | && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT | |
230d793d | 4372 | && op1 == const0_rtx |
5109d49f | 4373 | && nonzero_bits (op0, mode) == 1) |
818b11b9 RK |
4374 | { |
4375 | op0 = expand_compound_operation (op0); | |
8079805d RK |
4376 | return gen_binary (XOR, mode, |
4377 | gen_lowpart_for_combine (mode, op0), | |
4378 | const1_rtx); | |
5109d49f | 4379 | } |
818b11b9 | 4380 | |
0802d516 RK |
4381 | else if (STORE_FLAG_VALUE == 1 |
4382 | && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT | |
5109d49f RK |
4383 | && op1 == const0_rtx |
4384 | && (num_sign_bit_copies (op0, mode) | |
4385 | == GET_MODE_BITSIZE (mode))) | |
4386 | { | |
4387 | op0 = expand_compound_operation (op0); | |
8079805d | 4388 | return plus_constant (gen_lowpart_for_combine (mode, op0), 1); |
818b11b9 | 4389 | } |
230d793d | 4390 | |
5109d49f RK |
4391 | /* If STORE_FLAG_VALUE is -1, we have cases similar to |
4392 | those above. */ | |
0802d516 RK |
4393 | if (STORE_FLAG_VALUE == -1 |
4394 | && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT | |
230d793d | 4395 | && op1 == const0_rtx |
5109d49f RK |
4396 | && (num_sign_bit_copies (op0, mode) |
4397 | == GET_MODE_BITSIZE (mode))) | |
4398 | return gen_lowpart_for_combine (mode, | |
4399 | expand_compound_operation (op0)); | |
4400 | ||
0802d516 RK |
4401 | else if (STORE_FLAG_VALUE == -1 |
4402 | && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT | |
5109d49f RK |
4403 | && op1 == const0_rtx |
4404 | && nonzero_bits (op0, mode) == 1) | |
4405 | { | |
4406 | op0 = expand_compound_operation (op0); | |
0c1c8ea6 | 4407 | return gen_unary (NEG, mode, mode, |
8079805d | 4408 | gen_lowpart_for_combine (mode, op0)); |
5109d49f RK |
4409 | } |
4410 | ||
0802d516 RK |
4411 | else if (STORE_FLAG_VALUE == -1 |
4412 | && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT | |
5109d49f RK |
4413 | && op1 == const0_rtx |
4414 | && (num_sign_bit_copies (op0, mode) | |
4415 | == GET_MODE_BITSIZE (mode))) | |
230d793d | 4416 | { |
818b11b9 | 4417 | op0 = expand_compound_operation (op0); |
0c1c8ea6 | 4418 | return gen_unary (NOT, mode, mode, |
8079805d | 4419 | gen_lowpart_for_combine (mode, op0)); |
5109d49f RK |
4420 | } |
4421 | ||
4422 | /* If X is 0/1, (eq X 0) is X-1. */ | |
0802d516 RK |
4423 | else if (STORE_FLAG_VALUE == -1 |
4424 | && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT | |
5109d49f RK |
4425 | && op1 == const0_rtx |
4426 | && nonzero_bits (op0, mode) == 1) | |
4427 | { | |
4428 | op0 = expand_compound_operation (op0); | |
8079805d | 4429 | return plus_constant (gen_lowpart_for_combine (mode, op0), -1); |
230d793d | 4430 | } |
230d793d RS |
4431 | |
4432 | /* If STORE_FLAG_VALUE says to just test the sign bit and X has just | |
951553af RK |
4433 | one bit that might be nonzero, we can convert (ne x 0) to |
4434 | (ashift x c) where C puts the bit in the sign bit. Remove any | |
4435 | AND with STORE_FLAG_VALUE when we are done, since we are only | |
4436 | going to test the sign bit. */ | |
3f508eca | 4437 | if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT |
5f4f0e22 | 4438 | && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT |
0802d516 | 4439 | && ((STORE_FLAG_VALUE & GET_MODE_MASK (mode)) |
e51712db | 4440 | == (unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE(mode)-1)) |
230d793d RS |
4441 | && op1 == const0_rtx |
4442 | && mode == GET_MODE (op0) | |
5109d49f | 4443 | && (i = exact_log2 (nonzero_bits (op0, mode))) >= 0) |
230d793d | 4444 | { |
818b11b9 RK |
4445 | x = simplify_shift_const (NULL_RTX, ASHIFT, mode, |
4446 | expand_compound_operation (op0), | |
230d793d RS |
4447 | GET_MODE_BITSIZE (mode) - 1 - i); |
4448 | if (GET_CODE (x) == AND && XEXP (x, 1) == const_true_rtx) | |
4449 | return XEXP (x, 0); | |
4450 | else | |
4451 | return x; | |
4452 | } | |
4453 | ||
4454 | /* If the code changed, return a whole new comparison. */ | |
4455 | if (new_code != code) | |
4456 | return gen_rtx_combine (new_code, mode, op0, op1); | |
4457 | ||
663522cb | 4458 | /* Otherwise, keep this operation, but maybe change its operands. |
230d793d RS |
4459 | This also converts (ne (compare FOO BAR) 0) to (ne FOO BAR). */ |
4460 | SUBST (XEXP (x, 0), op0); | |
4461 | SUBST (XEXP (x, 1), op1); | |
4462 | } | |
4463 | break; | |
663522cb | 4464 | |
230d793d | 4465 | case IF_THEN_ELSE: |
8079805d | 4466 | return simplify_if_then_else (x); |
9210df58 | 4467 | |
8079805d RK |
4468 | case ZERO_EXTRACT: |
4469 | case SIGN_EXTRACT: | |
4470 | case ZERO_EXTEND: | |
4471 | case SIGN_EXTEND: | |
0f41302f | 4472 | /* If we are processing SET_DEST, we are done. */ |
8079805d RK |
4473 | if (in_dest) |
4474 | return x; | |
d0ab8cd3 | 4475 | |
8079805d | 4476 | return expand_compound_operation (x); |
d0ab8cd3 | 4477 | |
8079805d RK |
4478 | case SET: |
4479 | return simplify_set (x); | |
1a26b032 | 4480 | |
8079805d RK |
4481 | case AND: |
4482 | case IOR: | |
4483 | case XOR: | |
4484 | return simplify_logical (x, last); | |
d0ab8cd3 | 4485 | |
663522cb | 4486 | case ABS: |
8079805d RK |
4487 | /* (abs (neg <foo>)) -> (abs <foo>) */ |
4488 | if (GET_CODE (XEXP (x, 0)) == NEG) | |
4489 | SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0)); | |
1a26b032 | 4490 | |
b472527b JL |
4491 | /* If the mode of the operand is VOIDmode (i.e. if it is ASM_OPERANDS), |
4492 | do nothing. */ | |
4493 | if (GET_MODE (XEXP (x, 0)) == VOIDmode) | |
4494 | break; | |
f40421ce | 4495 | |
8079805d RK |
4496 | /* If operand is something known to be positive, ignore the ABS. */ |
4497 | if (GET_CODE (XEXP (x, 0)) == FFS || GET_CODE (XEXP (x, 0)) == ABS | |
4498 | || ((GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) | |
4499 | <= HOST_BITS_PER_WIDE_INT) | |
4500 | && ((nonzero_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0))) | |
4501 | & ((HOST_WIDE_INT) 1 | |
4502 | << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1))) | |
4503 | == 0))) | |
4504 | return XEXP (x, 0); | |
1a26b032 | 4505 | |
8079805d RK |
4506 | /* If operand is known to be only -1 or 0, convert ABS to NEG. */ |
4507 | if (num_sign_bit_copies (XEXP (x, 0), mode) == GET_MODE_BITSIZE (mode)) | |
4508 | return gen_rtx_combine (NEG, mode, XEXP (x, 0)); | |
1a26b032 | 4509 | |
8079805d | 4510 | break; |
1a26b032 | 4511 | |
8079805d RK |
4512 | case FFS: |
4513 | /* (ffs (*_extend <X>)) = (ffs <X>) */ | |
4514 | if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND | |
4515 | || GET_CODE (XEXP (x, 0)) == ZERO_EXTEND) | |
4516 | SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0)); | |
4517 | break; | |
1a26b032 | 4518 | |
8079805d RK |
4519 | case FLOAT: |
4520 | /* (float (sign_extend <X>)) = (float <X>). */ | |
4521 | if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND) | |
4522 | SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0)); | |
4523 | break; | |
1a26b032 | 4524 | |
8079805d RK |
4525 | case ASHIFT: |
4526 | case LSHIFTRT: | |
4527 | case ASHIFTRT: | |
4528 | case ROTATE: | |
4529 | case ROTATERT: | |
4530 | /* If this is a shift by a constant amount, simplify it. */ | |
4531 | if (GET_CODE (XEXP (x, 1)) == CONST_INT) | |
663522cb | 4532 | return simplify_shift_const (x, code, mode, XEXP (x, 0), |
8079805d RK |
4533 | INTVAL (XEXP (x, 1))); |
4534 | ||
4535 | #ifdef SHIFT_COUNT_TRUNCATED | |
4536 | else if (SHIFT_COUNT_TRUNCATED && GET_CODE (XEXP (x, 1)) != REG) | |
4537 | SUBST (XEXP (x, 1), | |
4538 | force_to_mode (XEXP (x, 1), GET_MODE (x), | |
663522cb | 4539 | ((HOST_WIDE_INT) 1 |
8079805d RK |
4540 | << exact_log2 (GET_MODE_BITSIZE (GET_MODE (x)))) |
4541 | - 1, | |
4542 | NULL_RTX, 0)); | |
4543 | #endif | |
4544 | ||
4545 | break; | |
e9a25f70 JL |
4546 | |
4547 | default: | |
4548 | break; | |
8079805d RK |
4549 | } |
4550 | ||
4551 | return x; | |
4552 | } | |
4553 | \f | |
4554 | /* Simplify X, an IF_THEN_ELSE expression. Return the new expression. */ | |
5109d49f | 4555 | |
8079805d RK |
4556 | static rtx |
4557 | simplify_if_then_else (x) | |
4558 | rtx x; | |
4559 | { | |
4560 | enum machine_mode mode = GET_MODE (x); | |
4561 | rtx cond = XEXP (x, 0); | |
4562 | rtx true = XEXP (x, 1); | |
4563 | rtx false = XEXP (x, 2); | |
4564 | enum rtx_code true_code = GET_CODE (cond); | |
4565 | int comparison_p = GET_RTX_CLASS (true_code) == '<'; | |
4566 | rtx temp; | |
4567 | int i; | |
4568 | ||
0f41302f | 4569 | /* Simplify storing of the truth value. */ |
8079805d RK |
4570 | if (comparison_p && true == const_true_rtx && false == const0_rtx) |
4571 | return gen_binary (true_code, mode, XEXP (cond, 0), XEXP (cond, 1)); | |
663522cb | 4572 | |
0f41302f | 4573 | /* Also when the truth value has to be reversed. */ |
8079805d RK |
4574 | if (comparison_p && reversible_comparison_p (cond) |
4575 | && true == const0_rtx && false == const_true_rtx) | |
4576 | return gen_binary (reverse_condition (true_code), | |
4577 | mode, XEXP (cond, 0), XEXP (cond, 1)); | |
4578 | ||
4579 | /* Sometimes we can simplify the arm of an IF_THEN_ELSE if a register used | |
4580 | in it is being compared against certain values. Get the true and false | |
4581 | comparisons and see if that says anything about the value of each arm. */ | |
4582 | ||
4583 | if (comparison_p && reversible_comparison_p (cond) | |
4584 | && GET_CODE (XEXP (cond, 0)) == REG) | |
4585 | { | |
4586 | HOST_WIDE_INT nzb; | |
4587 | rtx from = XEXP (cond, 0); | |
4588 | enum rtx_code false_code = reverse_condition (true_code); | |
4589 | rtx true_val = XEXP (cond, 1); | |
4590 | rtx false_val = true_val; | |
4591 | int swapped = 0; | |
9210df58 | 4592 | |
8079805d | 4593 | /* If FALSE_CODE is EQ, swap the codes and arms. */ |
5109d49f | 4594 | |
8079805d | 4595 | if (false_code == EQ) |
1a26b032 | 4596 | { |
8079805d RK |
4597 | swapped = 1, true_code = EQ, false_code = NE; |
4598 | temp = true, true = false, false = temp; | |
4599 | } | |
5109d49f | 4600 | |
8079805d RK |
4601 | /* If we are comparing against zero and the expression being tested has |
4602 | only a single bit that might be nonzero, that is its value when it is | |
4603 | not equal to zero. Similarly if it is known to be -1 or 0. */ | |
4604 | ||
4605 | if (true_code == EQ && true_val == const0_rtx | |
4606 | && exact_log2 (nzb = nonzero_bits (from, GET_MODE (from))) >= 0) | |
4607 | false_code = EQ, false_val = GEN_INT (nzb); | |
4608 | else if (true_code == EQ && true_val == const0_rtx | |
4609 | && (num_sign_bit_copies (from, GET_MODE (from)) | |
4610 | == GET_MODE_BITSIZE (GET_MODE (from)))) | |
4611 | false_code = EQ, false_val = constm1_rtx; | |
4612 | ||
4613 | /* Now simplify an arm if we know the value of the register in the | |
4614 | branch and it is used in the arm. Be careful due to the potential | |
4615 | of locally-shared RTL. */ | |
4616 | ||
4617 | if (reg_mentioned_p (from, true)) | |
4618 | true = subst (known_cond (copy_rtx (true), true_code, from, true_val), | |
4619 | pc_rtx, pc_rtx, 0, 0); | |
4620 | if (reg_mentioned_p (from, false)) | |
4621 | false = subst (known_cond (copy_rtx (false), false_code, | |
4622 | from, false_val), | |
4623 | pc_rtx, pc_rtx, 0, 0); | |
4624 | ||
4625 | SUBST (XEXP (x, 1), swapped ? false : true); | |
4626 | SUBST (XEXP (x, 2), swapped ? true : false); | |
4627 | ||
4628 | true = XEXP (x, 1), false = XEXP (x, 2), true_code = GET_CODE (cond); | |
4629 | } | |
5109d49f | 4630 | |
8079805d RK |
4631 | /* If we have (if_then_else FOO (pc) (label_ref BAR)) and FOO can be |
4632 | reversed, do so to avoid needing two sets of patterns for | |
4633 | subtract-and-branch insns. Similarly if we have a constant in the true | |
4634 | arm, the false arm is the same as the first operand of the comparison, or | |
4635 | the false arm is more complicated than the true arm. */ | |
4636 | ||
4637 | if (comparison_p && reversible_comparison_p (cond) | |
663522cb | 4638 | && (true == pc_rtx |
8079805d RK |
4639 | || (CONSTANT_P (true) |
4640 | && GET_CODE (false) != CONST_INT && false != pc_rtx) | |
4641 | || true == const0_rtx | |
4642 | || (GET_RTX_CLASS (GET_CODE (true)) == 'o' | |
4643 | && GET_RTX_CLASS (GET_CODE (false)) != 'o') | |
4644 | || (GET_CODE (true) == SUBREG | |
4645 | && GET_RTX_CLASS (GET_CODE (SUBREG_REG (true))) == 'o' | |
4646 | && GET_RTX_CLASS (GET_CODE (false)) != 'o') | |
4647 | || reg_mentioned_p (true, false) | |
4648 | || rtx_equal_p (false, XEXP (cond, 0)))) | |
4649 | { | |
4650 | true_code = reverse_condition (true_code); | |
4651 | SUBST (XEXP (x, 0), | |
4652 | gen_binary (true_code, GET_MODE (cond), XEXP (cond, 0), | |
4653 | XEXP (cond, 1))); | |
5109d49f | 4654 | |
8079805d RK |
4655 | SUBST (XEXP (x, 1), false); |
4656 | SUBST (XEXP (x, 2), true); | |
1a26b032 | 4657 | |
8079805d | 4658 | temp = true, true = false, false = temp, cond = XEXP (x, 0); |
bb821298 | 4659 | |
0f41302f | 4660 | /* It is possible that the conditional has been simplified out. */ |
bb821298 RK |
4661 | true_code = GET_CODE (cond); |
4662 | comparison_p = GET_RTX_CLASS (true_code) == '<'; | |
8079805d | 4663 | } |
abe6e52f | 4664 | |
8079805d | 4665 | /* If the two arms are identical, we don't need the comparison. */ |
1a26b032 | 4666 | |
8079805d RK |
4667 | if (rtx_equal_p (true, false) && ! side_effects_p (cond)) |
4668 | return true; | |
1a26b032 | 4669 | |
5be669c7 RK |
4670 | /* Convert a == b ? b : a to "a". */ |
4671 | if (true_code == EQ && ! side_effects_p (cond) | |
4672 | && rtx_equal_p (XEXP (cond, 0), false) | |
4673 | && rtx_equal_p (XEXP (cond, 1), true)) | |
4674 | return false; | |
4675 | else if (true_code == NE && ! side_effects_p (cond) | |
4676 | && rtx_equal_p (XEXP (cond, 0), true) | |
4677 | && rtx_equal_p (XEXP (cond, 1), false)) | |
4678 | return true; | |
4679 | ||
8079805d RK |
4680 | /* Look for cases where we have (abs x) or (neg (abs X)). */ |
4681 | ||
4682 | if (GET_MODE_CLASS (mode) == MODE_INT | |
4683 | && GET_CODE (false) == NEG | |
4684 | && rtx_equal_p (true, XEXP (false, 0)) | |
4685 | && comparison_p | |
4686 | && rtx_equal_p (true, XEXP (cond, 0)) | |
4687 | && ! side_effects_p (true)) | |
4688 | switch (true_code) | |
4689 | { | |
4690 | case GT: | |
4691 | case GE: | |
0c1c8ea6 | 4692 | return gen_unary (ABS, mode, mode, true); |
8079805d RK |
4693 | case LT: |
4694 | case LE: | |
0c1c8ea6 | 4695 | return gen_unary (NEG, mode, mode, gen_unary (ABS, mode, mode, true)); |
e9a25f70 JL |
4696 | default: |
4697 | break; | |
8079805d RK |
4698 | } |
4699 | ||
4700 | /* Look for MIN or MAX. */ | |
4701 | ||
34c8be72 | 4702 | if ((! FLOAT_MODE_P (mode) || flag_fast_math) |
8079805d RK |
4703 | && comparison_p |
4704 | && rtx_equal_p (XEXP (cond, 0), true) | |
4705 | && rtx_equal_p (XEXP (cond, 1), false) | |
4706 | && ! side_effects_p (cond)) | |
4707 | switch (true_code) | |
4708 | { | |
4709 | case GE: | |
4710 | case GT: | |
4711 | return gen_binary (SMAX, mode, true, false); | |
4712 | case LE: | |
4713 | case LT: | |
4714 | return gen_binary (SMIN, mode, true, false); | |
4715 | case GEU: | |
4716 | case GTU: | |
4717 | return gen_binary (UMAX, mode, true, false); | |
4718 | case LEU: | |
4719 | case LTU: | |
4720 | return gen_binary (UMIN, mode, true, false); | |
e9a25f70 JL |
4721 | default: |
4722 | break; | |
8079805d | 4723 | } |
663522cb | 4724 | |
8079805d RK |
4725 | /* If we have (if_then_else COND (OP Z C1) Z) and OP is an identity when its |
4726 | second operand is zero, this can be done as (OP Z (mult COND C2)) where | |
4727 | C2 = C1 * STORE_FLAG_VALUE. Similarly if OP has an outer ZERO_EXTEND or | |
4728 | SIGN_EXTEND as long as Z is already extended (so we don't destroy it). | |
4729 | We can do this kind of thing in some cases when STORE_FLAG_VALUE is | |
0802d516 | 4730 | neither 1 or -1, but it isn't worth checking for. */ |
8079805d | 4731 | |
0802d516 RK |
4732 | if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1) |
4733 | && comparison_p && mode != VOIDmode && ! side_effects_p (x)) | |
8079805d RK |
4734 | { |
4735 | rtx t = make_compound_operation (true, SET); | |
4736 | rtx f = make_compound_operation (false, SET); | |
4737 | rtx cond_op0 = XEXP (cond, 0); | |
4738 | rtx cond_op1 = XEXP (cond, 1); | |
6a651371 | 4739 | enum rtx_code op = NIL, extend_op = NIL; |
8079805d | 4740 | enum machine_mode m = mode; |
6a651371 | 4741 | rtx z = 0, c1 = NULL_RTX; |
8079805d | 4742 | |
8079805d RK |
4743 | if ((GET_CODE (t) == PLUS || GET_CODE (t) == MINUS |
4744 | || GET_CODE (t) == IOR || GET_CODE (t) == XOR | |
4745 | || GET_CODE (t) == ASHIFT | |
4746 | || GET_CODE (t) == LSHIFTRT || GET_CODE (t) == ASHIFTRT) | |
4747 | && rtx_equal_p (XEXP (t, 0), f)) | |
4748 | c1 = XEXP (t, 1), op = GET_CODE (t), z = f; | |
4749 | ||
4750 | /* If an identity-zero op is commutative, check whether there | |
0f41302f | 4751 | would be a match if we swapped the operands. */ |
8079805d RK |
4752 | else if ((GET_CODE (t) == PLUS || GET_CODE (t) == IOR |
4753 | || GET_CODE (t) == XOR) | |
4754 | && rtx_equal_p (XEXP (t, 1), f)) | |
4755 | c1 = XEXP (t, 0), op = GET_CODE (t), z = f; | |
4756 | else if (GET_CODE (t) == SIGN_EXTEND | |
4757 | && (GET_CODE (XEXP (t, 0)) == PLUS | |
4758 | || GET_CODE (XEXP (t, 0)) == MINUS | |
4759 | || GET_CODE (XEXP (t, 0)) == IOR | |
4760 | || GET_CODE (XEXP (t, 0)) == XOR | |
4761 | || GET_CODE (XEXP (t, 0)) == ASHIFT | |
4762 | || GET_CODE (XEXP (t, 0)) == LSHIFTRT | |
4763 | || GET_CODE (XEXP (t, 0)) == ASHIFTRT) | |
4764 | && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG | |
4765 | && subreg_lowpart_p (XEXP (XEXP (t, 0), 0)) | |
4766 | && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f) | |
4767 | && (num_sign_bit_copies (f, GET_MODE (f)) | |
4768 | > (GET_MODE_BITSIZE (mode) | |
4769 | - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 0)))))) | |
4770 | { | |
4771 | c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0)); | |
4772 | extend_op = SIGN_EXTEND; | |
4773 | m = GET_MODE (XEXP (t, 0)); | |
1a26b032 | 4774 | } |
8079805d RK |
4775 | else if (GET_CODE (t) == SIGN_EXTEND |
4776 | && (GET_CODE (XEXP (t, 0)) == PLUS | |
4777 | || GET_CODE (XEXP (t, 0)) == IOR | |
4778 | || GET_CODE (XEXP (t, 0)) == XOR) | |
4779 | && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG | |
4780 | && subreg_lowpart_p (XEXP (XEXP (t, 0), 1)) | |
4781 | && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f) | |
4782 | && (num_sign_bit_copies (f, GET_MODE (f)) | |
4783 | > (GET_MODE_BITSIZE (mode) | |
4784 | - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 1)))))) | |
4785 | { | |
4786 | c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0)); | |
4787 | extend_op = SIGN_EXTEND; | |
4788 | m = GET_MODE (XEXP (t, 0)); | |
4789 | } | |
4790 | else if (GET_CODE (t) == ZERO_EXTEND | |
4791 | && (GET_CODE (XEXP (t, 0)) == PLUS | |
4792 | || GET_CODE (XEXP (t, 0)) == MINUS | |
4793 | || GET_CODE (XEXP (t, 0)) == IOR | |
4794 | || GET_CODE (XEXP (t, 0)) == XOR | |
4795 | || GET_CODE (XEXP (t, 0)) == ASHIFT | |
4796 | || GET_CODE (XEXP (t, 0)) == LSHIFTRT | |
4797 | || GET_CODE (XEXP (t, 0)) == ASHIFTRT) | |
4798 | && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG | |
4799 | && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT | |
4800 | && subreg_lowpart_p (XEXP (XEXP (t, 0), 0)) | |
4801 | && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f) | |
4802 | && ((nonzero_bits (f, GET_MODE (f)) | |
663522cb | 4803 | & ~GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 0)))) |
8079805d RK |
4804 | == 0)) |
4805 | { | |
4806 | c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0)); | |
4807 | extend_op = ZERO_EXTEND; | |
4808 | m = GET_MODE (XEXP (t, 0)); | |
4809 | } | |
4810 | else if (GET_CODE (t) == ZERO_EXTEND | |
4811 | && (GET_CODE (XEXP (t, 0)) == PLUS | |
4812 | || GET_CODE (XEXP (t, 0)) == IOR | |
4813 | || GET_CODE (XEXP (t, 0)) == XOR) | |
4814 | && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG | |
4815 | && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT | |
4816 | && subreg_lowpart_p (XEXP (XEXP (t, 0), 1)) | |
4817 | && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f) | |
4818 | && ((nonzero_bits (f, GET_MODE (f)) | |
663522cb | 4819 | & ~GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 1)))) |
8079805d RK |
4820 | == 0)) |
4821 | { | |
4822 | c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0)); | |
4823 | extend_op = ZERO_EXTEND; | |
4824 | m = GET_MODE (XEXP (t, 0)); | |
4825 | } | |
663522cb | 4826 | |
8079805d RK |
4827 | if (z) |
4828 | { | |
4829 | temp = subst (gen_binary (true_code, m, cond_op0, cond_op1), | |
4830 | pc_rtx, pc_rtx, 0, 0); | |
4831 | temp = gen_binary (MULT, m, temp, | |
4832 | gen_binary (MULT, m, c1, const_true_rtx)); | |
4833 | temp = subst (temp, pc_rtx, pc_rtx, 0, 0); | |
4834 | temp = gen_binary (op, m, gen_lowpart_for_combine (m, z), temp); | |
4835 | ||
4836 | if (extend_op != NIL) | |
0c1c8ea6 | 4837 | temp = gen_unary (extend_op, mode, m, temp); |
8079805d RK |
4838 | |
4839 | return temp; | |
4840 | } | |
4841 | } | |
224eeff2 | 4842 | |
8079805d RK |
4843 | /* If we have (if_then_else (ne A 0) C1 0) and either A is known to be 0 or |
4844 | 1 and C1 is a single bit or A is known to be 0 or -1 and C1 is the | |
4845 | negation of a single bit, we can convert this operation to a shift. We | |
4846 | can actually do this more generally, but it doesn't seem worth it. */ | |
4847 | ||
4848 | if (true_code == NE && XEXP (cond, 1) == const0_rtx | |
4849 | && false == const0_rtx && GET_CODE (true) == CONST_INT | |
4850 | && ((1 == nonzero_bits (XEXP (cond, 0), mode) | |
4851 | && (i = exact_log2 (INTVAL (true))) >= 0) | |
4852 | || ((num_sign_bit_copies (XEXP (cond, 0), mode) | |
4853 | == GET_MODE_BITSIZE (mode)) | |
663522cb | 4854 | && (i = exact_log2 (-INTVAL (true))) >= 0))) |
8079805d RK |
4855 | return |
4856 | simplify_shift_const (NULL_RTX, ASHIFT, mode, | |
4857 | gen_lowpart_for_combine (mode, XEXP (cond, 0)), i); | |
230d793d | 4858 | |
8079805d RK |
4859 | return x; |
4860 | } | |
4861 | \f | |
4862 | /* Simplify X, a SET expression. Return the new expression. */ | |
230d793d | 4863 | |
8079805d RK |
4864 | static rtx |
4865 | simplify_set (x) | |
4866 | rtx x; | |
4867 | { | |
4868 | rtx src = SET_SRC (x); | |
4869 | rtx dest = SET_DEST (x); | |
4870 | enum machine_mode mode | |
4871 | = GET_MODE (src) != VOIDmode ? GET_MODE (src) : GET_MODE (dest); | |
4872 | rtx other_insn; | |
4873 | rtx *cc_use; | |
4874 | ||
4875 | /* (set (pc) (return)) gets written as (return). */ | |
4876 | if (GET_CODE (dest) == PC && GET_CODE (src) == RETURN) | |
4877 | return src; | |
230d793d | 4878 | |
87e3e0c1 RK |
4879 | /* Now that we know for sure which bits of SRC we are using, see if we can |
4880 | simplify the expression for the object knowing that we only need the | |
4881 | low-order bits. */ | |
4882 | ||
4883 | if (GET_MODE_CLASS (mode) == MODE_INT) | |
c5c76735 | 4884 | { |
e8dc6d50 | 4885 | src = force_to_mode (src, mode, ~(HOST_WIDE_INT) 0, NULL_RTX, 0); |
c5c76735 JL |
4886 | SUBST (SET_SRC (x), src); |
4887 | } | |
87e3e0c1 | 4888 | |
8079805d RK |
4889 | /* If we are setting CC0 or if the source is a COMPARE, look for the use of |
4890 | the comparison result and try to simplify it unless we already have used | |
4891 | undobuf.other_insn. */ | |
4892 | if ((GET_CODE (src) == COMPARE | |
230d793d | 4893 | #ifdef HAVE_cc0 |
8079805d | 4894 | || dest == cc0_rtx |
230d793d | 4895 | #endif |
8079805d RK |
4896 | ) |
4897 | && (cc_use = find_single_use (dest, subst_insn, &other_insn)) != 0 | |
4898 | && (undobuf.other_insn == 0 || other_insn == undobuf.other_insn) | |
4899 | && GET_RTX_CLASS (GET_CODE (*cc_use)) == '<' | |
c0d3ac4d | 4900 | && rtx_equal_p (XEXP (*cc_use, 0), dest)) |
8079805d RK |
4901 | { |
4902 | enum rtx_code old_code = GET_CODE (*cc_use); | |
4903 | enum rtx_code new_code; | |
4904 | rtx op0, op1; | |
4905 | int other_changed = 0; | |
4906 | enum machine_mode compare_mode = GET_MODE (dest); | |
4907 | ||
4908 | if (GET_CODE (src) == COMPARE) | |
4909 | op0 = XEXP (src, 0), op1 = XEXP (src, 1); | |
4910 | else | |
4911 | op0 = src, op1 = const0_rtx; | |
230d793d | 4912 | |
8079805d RK |
4913 | /* Simplify our comparison, if possible. */ |
4914 | new_code = simplify_comparison (old_code, &op0, &op1); | |
230d793d | 4915 | |
c141a106 | 4916 | #ifdef EXTRA_CC_MODES |
8079805d RK |
4917 | /* If this machine has CC modes other than CCmode, check to see if we |
4918 | need to use a different CC mode here. */ | |
4919 | compare_mode = SELECT_CC_MODE (new_code, op0, op1); | |
c141a106 | 4920 | #endif /* EXTRA_CC_MODES */ |
230d793d | 4921 | |
c141a106 | 4922 | #if !defined (HAVE_cc0) && defined (EXTRA_CC_MODES) |
8079805d RK |
4923 | /* If the mode changed, we have to change SET_DEST, the mode in the |
4924 | compare, and the mode in the place SET_DEST is used. If SET_DEST is | |
4925 | a hard register, just build new versions with the proper mode. If it | |
4926 | is a pseudo, we lose unless it is only time we set the pseudo, in | |
4927 | which case we can safely change its mode. */ | |
4928 | if (compare_mode != GET_MODE (dest)) | |
4929 | { | |
770ae6cc | 4930 | unsigned int regno = REGNO (dest); |
38a448ca | 4931 | rtx new_dest = gen_rtx_REG (compare_mode, regno); |
8079805d RK |
4932 | |
4933 | if (regno < FIRST_PSEUDO_REGISTER | |
b1f21e0a | 4934 | || (REG_N_SETS (regno) == 1 && ! REG_USERVAR_P (dest))) |
230d793d | 4935 | { |
8079805d RK |
4936 | if (regno >= FIRST_PSEUDO_REGISTER) |
4937 | SUBST (regno_reg_rtx[regno], new_dest); | |
230d793d | 4938 | |
8079805d RK |
4939 | SUBST (SET_DEST (x), new_dest); |
4940 | SUBST (XEXP (*cc_use, 0), new_dest); | |
4941 | other_changed = 1; | |
230d793d | 4942 | |
8079805d | 4943 | dest = new_dest; |
230d793d | 4944 | } |
8079805d | 4945 | } |
230d793d RS |
4946 | #endif |
4947 | ||
8079805d RK |
4948 | /* If the code changed, we have to build a new comparison in |
4949 | undobuf.other_insn. */ | |
4950 | if (new_code != old_code) | |
4951 | { | |
4952 | unsigned HOST_WIDE_INT mask; | |
4953 | ||
4954 | SUBST (*cc_use, gen_rtx_combine (new_code, GET_MODE (*cc_use), | |
4955 | dest, const0_rtx)); | |
4956 | ||
4957 | /* If the only change we made was to change an EQ into an NE or | |
4958 | vice versa, OP0 has only one bit that might be nonzero, and OP1 | |
4959 | is zero, check if changing the user of the condition code will | |
4960 | produce a valid insn. If it won't, we can keep the original code | |
4961 | in that insn by surrounding our operation with an XOR. */ | |
4962 | ||
4963 | if (((old_code == NE && new_code == EQ) | |
4964 | || (old_code == EQ && new_code == NE)) | |
4965 | && ! other_changed && op1 == const0_rtx | |
4966 | && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT | |
4967 | && exact_log2 (mask = nonzero_bits (op0, GET_MODE (op0))) >= 0) | |
230d793d | 4968 | { |
8079805d | 4969 | rtx pat = PATTERN (other_insn), note = 0; |
230d793d | 4970 | |
8e2f6e35 | 4971 | if ((recog_for_combine (&pat, other_insn, ¬e) < 0 |
8079805d RK |
4972 | && ! check_asm_operands (pat))) |
4973 | { | |
4974 | PUT_CODE (*cc_use, old_code); | |
4975 | other_insn = 0; | |
230d793d | 4976 | |
8079805d | 4977 | op0 = gen_binary (XOR, GET_MODE (op0), op0, GEN_INT (mask)); |
230d793d | 4978 | } |
230d793d RS |
4979 | } |
4980 | ||
8079805d RK |
4981 | other_changed = 1; |
4982 | } | |
4983 | ||
4984 | if (other_changed) | |
4985 | undobuf.other_insn = other_insn; | |
230d793d RS |
4986 | |
4987 | #ifdef HAVE_cc0 | |
8079805d RK |
4988 | /* If we are now comparing against zero, change our source if |
4989 | needed. If we do not use cc0, we always have a COMPARE. */ | |
4990 | if (op1 == const0_rtx && dest == cc0_rtx) | |
4991 | { | |
4992 | SUBST (SET_SRC (x), op0); | |
4993 | src = op0; | |
4994 | } | |
4995 | else | |
230d793d RS |
4996 | #endif |
4997 | ||
8079805d RK |
4998 | /* Otherwise, if we didn't previously have a COMPARE in the |
4999 | correct mode, we need one. */ | |
5000 | if (GET_CODE (src) != COMPARE || GET_MODE (src) != compare_mode) | |
5001 | { | |
5002 | SUBST (SET_SRC (x), | |
5003 | gen_rtx_combine (COMPARE, compare_mode, op0, op1)); | |
5004 | src = SET_SRC (x); | |
230d793d RS |
5005 | } |
5006 | else | |
5007 | { | |
8079805d RK |
5008 | /* Otherwise, update the COMPARE if needed. */ |
5009 | SUBST (XEXP (src, 0), op0); | |
5010 | SUBST (XEXP (src, 1), op1); | |
230d793d | 5011 | } |
8079805d RK |
5012 | } |
5013 | else | |
5014 | { | |
5015 | /* Get SET_SRC in a form where we have placed back any | |
5016 | compound expressions. Then do the checks below. */ | |
5017 | src = make_compound_operation (src, SET); | |
5018 | SUBST (SET_SRC (x), src); | |
5019 | } | |
230d793d | 5020 | |
8079805d RK |
5021 | /* If we have (set x (subreg:m1 (op:m2 ...) 0)) with OP being some operation, |
5022 | and X being a REG or (subreg (reg)), we may be able to convert this to | |
663522cb | 5023 | (set (subreg:m2 x) (op)). |
df62f951 | 5024 | |
8079805d RK |
5025 | We can always do this if M1 is narrower than M2 because that means that |
5026 | we only care about the low bits of the result. | |
df62f951 | 5027 | |
8079805d | 5028 | However, on machines without WORD_REGISTER_OPERATIONS defined, we cannot |
9ec36da5 | 5029 | perform a narrower operation than requested since the high-order bits will |
8079805d RK |
5030 | be undefined. On machine where it is defined, this transformation is safe |
5031 | as long as M1 and M2 have the same number of words. */ | |
663522cb | 5032 | |
8079805d RK |
5033 | if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src) |
5034 | && GET_RTX_CLASS (GET_CODE (SUBREG_REG (src))) != 'o' | |
5035 | && (((GET_MODE_SIZE (GET_MODE (src)) + (UNITS_PER_WORD - 1)) | |
5036 | / UNITS_PER_WORD) | |
5037 | == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))) | |
5038 | + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)) | |
8baf60bb | 5039 | #ifndef WORD_REGISTER_OPERATIONS |
8079805d RK |
5040 | && (GET_MODE_SIZE (GET_MODE (src)) |
5041 | < GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))) | |
df62f951 | 5042 | #endif |
02188693 | 5043 | #ifdef CLASS_CANNOT_CHANGE_MODE |
f507a070 RK |
5044 | && ! (GET_CODE (dest) == REG && REGNO (dest) < FIRST_PSEUDO_REGISTER |
5045 | && (TEST_HARD_REG_BIT | |
02188693 | 5046 | (reg_class_contents[(int) CLASS_CANNOT_CHANGE_MODE], |
f507a070 | 5047 | REGNO (dest))) |
02188693 RH |
5048 | && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (src), |
5049 | GET_MODE (SUBREG_REG (src)))) | |
663522cb | 5050 | #endif |
8079805d RK |
5051 | && (GET_CODE (dest) == REG |
5052 | || (GET_CODE (dest) == SUBREG | |
5053 | && GET_CODE (SUBREG_REG (dest)) == REG))) | |
5054 | { | |
5055 | SUBST (SET_DEST (x), | |
5056 | gen_lowpart_for_combine (GET_MODE (SUBREG_REG (src)), | |
5057 | dest)); | |
5058 | SUBST (SET_SRC (x), SUBREG_REG (src)); | |
5059 | ||
5060 | src = SET_SRC (x), dest = SET_DEST (x); | |
5061 | } | |
df62f951 | 5062 | |
8baf60bb | 5063 | #ifdef LOAD_EXTEND_OP |
8079805d RK |
5064 | /* If we have (set FOO (subreg:M (mem:N BAR) 0)) with M wider than N, this |
5065 | would require a paradoxical subreg. Replace the subreg with a | |
0f41302f | 5066 | zero_extend to avoid the reload that would otherwise be required. */ |
8079805d RK |
5067 | |
5068 | if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src) | |
5069 | && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))) != NIL | |
5070 | && SUBREG_WORD (src) == 0 | |
5071 | && (GET_MODE_SIZE (GET_MODE (src)) | |
5072 | > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))) | |
5073 | && GET_CODE (SUBREG_REG (src)) == MEM) | |
5074 | { | |
5075 | SUBST (SET_SRC (x), | |
5076 | gen_rtx_combine (LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))), | |
5077 | GET_MODE (src), XEXP (src, 0))); | |
5078 | ||
5079 | src = SET_SRC (x); | |
5080 | } | |
230d793d RS |
5081 | #endif |
5082 | ||
8079805d RK |
5083 | /* If we don't have a conditional move, SET_SRC is an IF_THEN_ELSE, and we |
5084 | are comparing an item known to be 0 or -1 against 0, use a logical | |
5085 | operation instead. Check for one of the arms being an IOR of the other | |
5086 | arm with some value. We compute three terms to be IOR'ed together. In | |
5087 | practice, at most two will be nonzero. Then we do the IOR's. */ | |
5088 | ||
5089 | if (GET_CODE (dest) != PC | |
5090 | && GET_CODE (src) == IF_THEN_ELSE | |
36b8d792 | 5091 | && GET_MODE_CLASS (GET_MODE (src)) == MODE_INT |
8079805d RK |
5092 | && (GET_CODE (XEXP (src, 0)) == EQ || GET_CODE (XEXP (src, 0)) == NE) |
5093 | && XEXP (XEXP (src, 0), 1) == const0_rtx | |
6dd49058 | 5094 | && GET_MODE (src) == GET_MODE (XEXP (XEXP (src, 0), 0)) |
ea414472 DE |
5095 | #ifdef HAVE_conditional_move |
5096 | && ! can_conditionally_move_p (GET_MODE (src)) | |
5097 | #endif | |
8079805d RK |
5098 | && (num_sign_bit_copies (XEXP (XEXP (src, 0), 0), |
5099 | GET_MODE (XEXP (XEXP (src, 0), 0))) | |
5100 | == GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (src, 0), 0)))) | |
5101 | && ! side_effects_p (src)) | |
5102 | { | |
5103 | rtx true = (GET_CODE (XEXP (src, 0)) == NE | |
5104 | ? XEXP (src, 1) : XEXP (src, 2)); | |
5105 | rtx false = (GET_CODE (XEXP (src, 0)) == NE | |
5106 | ? XEXP (src, 2) : XEXP (src, 1)); | |
5107 | rtx term1 = const0_rtx, term2, term3; | |
5108 | ||
5109 | if (GET_CODE (true) == IOR && rtx_equal_p (XEXP (true, 0), false)) | |
5110 | term1 = false, true = XEXP (true, 1), false = const0_rtx; | |
5111 | else if (GET_CODE (true) == IOR | |
5112 | && rtx_equal_p (XEXP (true, 1), false)) | |
5113 | term1 = false, true = XEXP (true, 0), false = const0_rtx; | |
5114 | else if (GET_CODE (false) == IOR | |
5115 | && rtx_equal_p (XEXP (false, 0), true)) | |
5116 | term1 = true, false = XEXP (false, 1), true = const0_rtx; | |
5117 | else if (GET_CODE (false) == IOR | |
5118 | && rtx_equal_p (XEXP (false, 1), true)) | |
5119 | term1 = true, false = XEXP (false, 0), true = const0_rtx; | |
5120 | ||
5121 | term2 = gen_binary (AND, GET_MODE (src), XEXP (XEXP (src, 0), 0), true); | |
5122 | term3 = gen_binary (AND, GET_MODE (src), | |
0c1c8ea6 | 5123 | gen_unary (NOT, GET_MODE (src), GET_MODE (src), |
8079805d RK |
5124 | XEXP (XEXP (src, 0), 0)), |
5125 | false); | |
5126 | ||
5127 | SUBST (SET_SRC (x), | |
5128 | gen_binary (IOR, GET_MODE (src), | |
5129 | gen_binary (IOR, GET_MODE (src), term1, term2), | |
5130 | term3)); | |
5131 | ||
5132 | src = SET_SRC (x); | |
5133 | } | |
230d793d | 5134 | |
c5c76735 JL |
5135 | #ifdef HAVE_conditional_arithmetic |
5136 | /* If we have conditional arithmetic and the operand of a SET is | |
5137 | a conditional expression, replace this with an IF_THEN_ELSE. | |
5138 | We can either have a conditional expression or a MULT of that expression | |
5139 | with a constant. */ | |
5140 | if ((GET_RTX_CLASS (GET_CODE (src)) == '1' | |
5141 | || GET_RTX_CLASS (GET_CODE (src)) == '2' | |
5142 | || GET_RTX_CLASS (GET_CODE (src)) == 'c') | |
5143 | && (GET_RTX_CLASS (GET_CODE (XEXP (src, 0))) == '<' | |
5144 | || (GET_CODE (XEXP (src, 0)) == MULT | |
5145 | && GET_RTX_CLASS (GET_CODE (XEXP (XEXP (src, 0), 0))) == '<' | |
5146 | && GET_CODE (XEXP (XEXP (src, 0), 1)) == CONST_INT))) | |
5147 | { | |
5148 | rtx cond = XEXP (src, 0); | |
5149 | rtx true_val = const1_rtx; | |
5150 | rtx false_arm, true_arm; | |
5151 | ||
5152 | if (GET_CODE (cond) == MULT) | |
5153 | { | |
5154 | true_val = XEXP (cond, 1); | |
5155 | cond = XEXP (cond, 0); | |
5156 | } | |
5157 | ||
5158 | if (GET_RTX_CLASS (GET_CODE (src)) == '1') | |
5159 | { | |
5160 | true_arm = gen_unary (GET_CODE (src), GET_MODE (src), | |
5161 | GET_MODE (XEXP (src, 0)), true_val); | |
5162 | false_arm = gen_unary (GET_CODE (src), GET_MODE (src), | |
5163 | GET_MODE (XEXP (src, 0)), const0_rtx); | |
5164 | } | |
5165 | else | |
5166 | { | |
5167 | true_arm = gen_binary (GET_CODE (src), GET_MODE (src), | |
5168 | true_val, XEXP (src, 1)); | |
5169 | false_arm = gen_binary (GET_CODE (src), GET_MODE (src), | |
5170 | const0_rtx, XEXP (src, 1)); | |
5171 | } | |
5172 | ||
5173 | /* Canonicalize if true_arm is the simpler one. */ | |
5174 | if (GET_RTX_CLASS (GET_CODE (true_arm)) == 'o' | |
5175 | && GET_RTX_CLASS (GET_CODE (false_arm)) != 'o' | |
5176 | && reversible_comparison_p (cond)) | |
5177 | { | |
5178 | rtx temp = true_arm; | |
5179 | ||
5180 | true_arm = false_arm; | |
5181 | false_arm = temp; | |
5182 | ||
5183 | cond = gen_rtx_combine (reverse_condition (GET_CODE (cond)), | |
5184 | GET_MODE (cond), XEXP (cond, 0), | |
5185 | XEXP (cond, 1)); | |
5186 | } | |
5187 | ||
5188 | src = gen_rtx_combine (IF_THEN_ELSE, GET_MODE (src), | |
5189 | gen_rtx_combine (GET_CODE (cond), VOIDmode, | |
5190 | XEXP (cond, 0), | |
5191 | XEXP (cond, 1)), | |
5192 | true_arm, false_arm); | |
5193 | SUBST (SET_SRC (x), src); | |
5194 | } | |
5195 | #endif | |
5196 | ||
246e00f2 RK |
5197 | /* If either SRC or DEST is a CLOBBER of (const_int 0), make this |
5198 | whole thing fail. */ | |
5199 | if (GET_CODE (src) == CLOBBER && XEXP (src, 0) == const0_rtx) | |
5200 | return src; | |
5201 | else if (GET_CODE (dest) == CLOBBER && XEXP (dest, 0) == const0_rtx) | |
5202 | return dest; | |
5203 | else | |
5204 | /* Convert this into a field assignment operation, if possible. */ | |
5205 | return make_field_assignment (x); | |
8079805d RK |
5206 | } |
5207 | \f | |
5208 | /* Simplify, X, and AND, IOR, or XOR operation, and return the simplified | |
5209 | result. LAST is nonzero if this is the last retry. */ | |
5210 | ||
5211 | static rtx | |
5212 | simplify_logical (x, last) | |
5213 | rtx x; | |
5214 | int last; | |
5215 | { | |
5216 | enum machine_mode mode = GET_MODE (x); | |
5217 | rtx op0 = XEXP (x, 0); | |
5218 | rtx op1 = XEXP (x, 1); | |
5219 | ||
5220 | switch (GET_CODE (x)) | |
5221 | { | |
230d793d | 5222 | case AND: |
663522cb | 5223 | /* Convert (A ^ B) & A to A & (~B) since the latter is often a single |
8079805d RK |
5224 | insn (and may simplify more). */ |
5225 | if (GET_CODE (op0) == XOR | |
5226 | && rtx_equal_p (XEXP (op0, 0), op1) | |
5227 | && ! side_effects_p (op1)) | |
0c1c8ea6 RK |
5228 | x = gen_binary (AND, mode, |
5229 | gen_unary (NOT, mode, mode, XEXP (op0, 1)), op1); | |
8079805d RK |
5230 | |
5231 | if (GET_CODE (op0) == XOR | |
5232 | && rtx_equal_p (XEXP (op0, 1), op1) | |
5233 | && ! side_effects_p (op1)) | |
0c1c8ea6 RK |
5234 | x = gen_binary (AND, mode, |
5235 | gen_unary (NOT, mode, mode, XEXP (op0, 0)), op1); | |
8079805d | 5236 | |
663522cb | 5237 | /* Similarly for (~(A ^ B)) & A. */ |
8079805d RK |
5238 | if (GET_CODE (op0) == NOT |
5239 | && GET_CODE (XEXP (op0, 0)) == XOR | |
5240 | && rtx_equal_p (XEXP (XEXP (op0, 0), 0), op1) | |
5241 | && ! side_effects_p (op1)) | |
5242 | x = gen_binary (AND, mode, XEXP (XEXP (op0, 0), 1), op1); | |
5243 | ||
5244 | if (GET_CODE (op0) == NOT | |
5245 | && GET_CODE (XEXP (op0, 0)) == XOR | |
5246 | && rtx_equal_p (XEXP (XEXP (op0, 0), 1), op1) | |
5247 | && ! side_effects_p (op1)) | |
5248 | x = gen_binary (AND, mode, XEXP (XEXP (op0, 0), 0), op1); | |
5249 | ||
2e8f9abf DM |
5250 | /* We can call simplify_and_const_int only if we don't lose |
5251 | any (sign) bits when converting INTVAL (op1) to | |
5252 | "unsigned HOST_WIDE_INT". */ | |
5253 | if (GET_CODE (op1) == CONST_INT | |
5254 | && (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT | |
5255 | || INTVAL (op1) > 0)) | |
230d793d | 5256 | { |
8079805d | 5257 | x = simplify_and_const_int (x, mode, op0, INTVAL (op1)); |
230d793d RS |
5258 | |
5259 | /* If we have (ior (and (X C1) C2)) and the next restart would be | |
5260 | the last, simplify this by making C1 as small as possible | |
0f41302f | 5261 | and then exit. */ |
8079805d RK |
5262 | if (last |
5263 | && GET_CODE (x) == IOR && GET_CODE (op0) == AND | |
5264 | && GET_CODE (XEXP (op0, 1)) == CONST_INT | |
5265 | && GET_CODE (op1) == CONST_INT) | |
5266 | return gen_binary (IOR, mode, | |
5267 | gen_binary (AND, mode, XEXP (op0, 0), | |
5268 | GEN_INT (INTVAL (XEXP (op0, 1)) | |
663522cb | 5269 | & ~INTVAL (op1))), op1); |
230d793d RS |
5270 | |
5271 | if (GET_CODE (x) != AND) | |
8079805d | 5272 | return x; |
0e32506c | 5273 | |
663522cb | 5274 | if (GET_RTX_CLASS (GET_CODE (x)) == 'c' |
0e32506c RK |
5275 | || GET_RTX_CLASS (GET_CODE (x)) == '2') |
5276 | op0 = XEXP (x, 0), op1 = XEXP (x, 1); | |
230d793d RS |
5277 | } |
5278 | ||
5279 | /* Convert (A | B) & A to A. */ | |
8079805d RK |
5280 | if (GET_CODE (op0) == IOR |
5281 | && (rtx_equal_p (XEXP (op0, 0), op1) | |
5282 | || rtx_equal_p (XEXP (op0, 1), op1)) | |
5283 | && ! side_effects_p (XEXP (op0, 0)) | |
5284 | && ! side_effects_p (XEXP (op0, 1))) | |
5285 | return op1; | |
230d793d | 5286 | |
d0ab8cd3 | 5287 | /* In the following group of tests (and those in case IOR below), |
230d793d RS |
5288 | we start with some combination of logical operations and apply |
5289 | the distributive law followed by the inverse distributive law. | |
5290 | Most of the time, this results in no change. However, if some of | |
5291 | the operands are the same or inverses of each other, simplifications | |
5292 | will result. | |
5293 | ||
5294 | For example, (and (ior A B) (not B)) can occur as the result of | |
5295 | expanding a bit field assignment. When we apply the distributive | |
5296 | law to this, we get (ior (and (A (not B))) (and (B (not B)))), | |
663522cb | 5297 | which then simplifies to (and (A (not B))). |
230d793d | 5298 | |
8079805d | 5299 | If we have (and (ior A B) C), apply the distributive law and then |
230d793d RS |
5300 | the inverse distributive law to see if things simplify. */ |
5301 | ||
8079805d | 5302 | if (GET_CODE (op0) == IOR || GET_CODE (op0) == XOR) |
230d793d RS |
5303 | { |
5304 | x = apply_distributive_law | |
8079805d RK |
5305 | (gen_binary (GET_CODE (op0), mode, |
5306 | gen_binary (AND, mode, XEXP (op0, 0), op1), | |
3749f4ca BS |
5307 | gen_binary (AND, mode, XEXP (op0, 1), |
5308 | copy_rtx (op1)))); | |
230d793d | 5309 | if (GET_CODE (x) != AND) |
8079805d | 5310 | return x; |
230d793d RS |
5311 | } |
5312 | ||
8079805d RK |
5313 | if (GET_CODE (op1) == IOR || GET_CODE (op1) == XOR) |
5314 | return apply_distributive_law | |
5315 | (gen_binary (GET_CODE (op1), mode, | |
5316 | gen_binary (AND, mode, XEXP (op1, 0), op0), | |
3749f4ca BS |
5317 | gen_binary (AND, mode, XEXP (op1, 1), |
5318 | copy_rtx (op0)))); | |
230d793d RS |
5319 | |
5320 | /* Similarly, taking advantage of the fact that | |
5321 | (and (not A) (xor B C)) == (xor (ior A B) (ior A C)) */ | |
5322 | ||
8079805d RK |
5323 | if (GET_CODE (op0) == NOT && GET_CODE (op1) == XOR) |
5324 | return apply_distributive_law | |
5325 | (gen_binary (XOR, mode, | |
5326 | gen_binary (IOR, mode, XEXP (op0, 0), XEXP (op1, 0)), | |
3749f4ca BS |
5327 | gen_binary (IOR, mode, copy_rtx (XEXP (op0, 0)), |
5328 | XEXP (op1, 1)))); | |
663522cb | 5329 | |
8079805d RK |
5330 | else if (GET_CODE (op1) == NOT && GET_CODE (op0) == XOR) |
5331 | return apply_distributive_law | |
5332 | (gen_binary (XOR, mode, | |
5333 | gen_binary (IOR, mode, XEXP (op1, 0), XEXP (op0, 0)), | |
3749f4ca | 5334 | gen_binary (IOR, mode, copy_rtx (XEXP (op1, 0)), XEXP (op0, 1)))); |
230d793d RS |
5335 | break; |
5336 | ||
5337 | case IOR: | |
951553af | 5338 | /* (ior A C) is C if all bits of A that might be nonzero are on in C. */ |
8079805d | 5339 | if (GET_CODE (op1) == CONST_INT |
ac49a949 | 5340 | && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT |
663522cb | 5341 | && (nonzero_bits (op0, mode) & ~INTVAL (op1)) == 0) |
8079805d | 5342 | return op1; |
d0ab8cd3 | 5343 | |
230d793d | 5344 | /* Convert (A & B) | A to A. */ |
8079805d RK |
5345 | if (GET_CODE (op0) == AND |
5346 | && (rtx_equal_p (XEXP (op0, 0), op1) | |
5347 | || rtx_equal_p (XEXP (op0, 1), op1)) | |
5348 | && ! side_effects_p (XEXP (op0, 0)) | |
5349 | && ! side_effects_p (XEXP (op0, 1))) | |
5350 | return op1; | |
230d793d RS |
5351 | |
5352 | /* If we have (ior (and A B) C), apply the distributive law and then | |
5353 | the inverse distributive law to see if things simplify. */ | |
5354 | ||
8079805d | 5355 | if (GET_CODE (op0) == AND) |
230d793d RS |
5356 | { |
5357 | x = apply_distributive_law | |
5358 | (gen_binary (AND, mode, | |
8079805d | 5359 | gen_binary (IOR, mode, XEXP (op0, 0), op1), |
3749f4ca BS |
5360 | gen_binary (IOR, mode, XEXP (op0, 1), |
5361 | copy_rtx (op1)))); | |
230d793d RS |
5362 | |
5363 | if (GET_CODE (x) != IOR) | |
8079805d | 5364 | return x; |
230d793d RS |
5365 | } |
5366 | ||
8079805d | 5367 | if (GET_CODE (op1) == AND) |
230d793d RS |
5368 | { |
5369 | x = apply_distributive_law | |
5370 | (gen_binary (AND, mode, | |
8079805d | 5371 | gen_binary (IOR, mode, XEXP (op1, 0), op0), |
3749f4ca BS |
5372 | gen_binary (IOR, mode, XEXP (op1, 1), |
5373 | copy_rtx (op0)))); | |
230d793d RS |
5374 | |
5375 | if (GET_CODE (x) != IOR) | |
8079805d | 5376 | return x; |
230d793d RS |
5377 | } |
5378 | ||
5379 | /* Convert (ior (ashift A CX) (lshiftrt A CY)) where CX+CY equals the | |
5380 | mode size to (rotate A CX). */ | |
5381 | ||
8079805d RK |
5382 | if (((GET_CODE (op0) == ASHIFT && GET_CODE (op1) == LSHIFTRT) |
5383 | || (GET_CODE (op1) == ASHIFT && GET_CODE (op0) == LSHIFTRT)) | |
5384 | && rtx_equal_p (XEXP (op0, 0), XEXP (op1, 0)) | |
5385 | && GET_CODE (XEXP (op0, 1)) == CONST_INT | |
5386 | && GET_CODE (XEXP (op1, 1)) == CONST_INT | |
5387 | && (INTVAL (XEXP (op0, 1)) + INTVAL (XEXP (op1, 1)) | |
230d793d | 5388 | == GET_MODE_BITSIZE (mode))) |
38a448ca RH |
5389 | return gen_rtx_ROTATE (mode, XEXP (op0, 0), |
5390 | (GET_CODE (op0) == ASHIFT | |
5391 | ? XEXP (op0, 1) : XEXP (op1, 1))); | |
230d793d | 5392 | |
71923da7 RK |
5393 | /* If OP0 is (ashiftrt (plus ...) C), it might actually be |
5394 | a (sign_extend (plus ...)). If so, OP1 is a CONST_INT, and the PLUS | |
5395 | does not affect any of the bits in OP1, it can really be done | |
5396 | as a PLUS and we can associate. We do this by seeing if OP1 | |
5397 | can be safely shifted left C bits. */ | |
5398 | if (GET_CODE (op1) == CONST_INT && GET_CODE (op0) == ASHIFTRT | |
5399 | && GET_CODE (XEXP (op0, 0)) == PLUS | |
5400 | && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT | |
5401 | && GET_CODE (XEXP (op0, 1)) == CONST_INT | |
5402 | && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT) | |
5403 | { | |
5404 | int count = INTVAL (XEXP (op0, 1)); | |
5405 | HOST_WIDE_INT mask = INTVAL (op1) << count; | |
5406 | ||
5407 | if (mask >> count == INTVAL (op1) | |
5408 | && (mask & nonzero_bits (XEXP (op0, 0), mode)) == 0) | |
5409 | { | |
5410 | SUBST (XEXP (XEXP (op0, 0), 1), | |
5411 | GEN_INT (INTVAL (XEXP (XEXP (op0, 0), 1)) | mask)); | |
5412 | return op0; | |
5413 | } | |
5414 | } | |
230d793d RS |
5415 | break; |
5416 | ||
5417 | case XOR: | |
79e8185c JH |
5418 | /* If we are XORing two things that have no bits in common, |
5419 | convert them into an IOR. This helps to detect rotation encoded | |
5420 | using those methods and possibly other simplifications. */ | |
5421 | ||
5422 | if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT | |
5423 | && (nonzero_bits (op0, mode) | |
5424 | & nonzero_bits (op1, mode)) == 0) | |
5425 | return (gen_binary (IOR, mode, op0, op1)); | |
5426 | ||
230d793d RS |
5427 | /* Convert (XOR (NOT x) (NOT y)) to (XOR x y). |
5428 | Also convert (XOR (NOT x) y) to (NOT (XOR x y)), similarly for | |
5429 | (NOT y). */ | |
5430 | { | |
5431 | int num_negated = 0; | |
230d793d | 5432 | |
8079805d RK |
5433 | if (GET_CODE (op0) == NOT) |
5434 | num_negated++, op0 = XEXP (op0, 0); | |
5435 | if (GET_CODE (op1) == NOT) | |
5436 | num_negated++, op1 = XEXP (op1, 0); | |
230d793d RS |
5437 | |
5438 | if (num_negated == 2) | |
5439 | { | |
8079805d RK |
5440 | SUBST (XEXP (x, 0), op0); |
5441 | SUBST (XEXP (x, 1), op1); | |
230d793d RS |
5442 | } |
5443 | else if (num_negated == 1) | |
0c1c8ea6 | 5444 | return gen_unary (NOT, mode, mode, gen_binary (XOR, mode, op0, op1)); |
230d793d RS |
5445 | } |
5446 | ||
5447 | /* Convert (xor (and A B) B) to (and (not A) B). The latter may | |
5448 | correspond to a machine insn or result in further simplifications | |
5449 | if B is a constant. */ | |
5450 | ||
8079805d RK |
5451 | if (GET_CODE (op0) == AND |
5452 | && rtx_equal_p (XEXP (op0, 1), op1) | |
5453 | && ! side_effects_p (op1)) | |
0c1c8ea6 RK |
5454 | return gen_binary (AND, mode, |
5455 | gen_unary (NOT, mode, mode, XEXP (op0, 0)), | |
8079805d | 5456 | op1); |
230d793d | 5457 | |
8079805d RK |
5458 | else if (GET_CODE (op0) == AND |
5459 | && rtx_equal_p (XEXP (op0, 0), op1) | |
5460 | && ! side_effects_p (op1)) | |
0c1c8ea6 RK |
5461 | return gen_binary (AND, mode, |
5462 | gen_unary (NOT, mode, mode, XEXP (op0, 1)), | |
8079805d | 5463 | op1); |
230d793d | 5464 | |
230d793d | 5465 | /* (xor (comparison foo bar) (const_int 1)) can become the reversed |
0802d516 RK |
5466 | comparison if STORE_FLAG_VALUE is 1. */ |
5467 | if (STORE_FLAG_VALUE == 1 | |
5468 | && op1 == const1_rtx | |
8079805d RK |
5469 | && GET_RTX_CLASS (GET_CODE (op0)) == '<' |
5470 | && reversible_comparison_p (op0)) | |
5471 | return gen_rtx_combine (reverse_condition (GET_CODE (op0)), | |
5472 | mode, XEXP (op0, 0), XEXP (op0, 1)); | |
500c518b RK |
5473 | |
5474 | /* (lshiftrt foo C) where C is the number of bits in FOO minus 1 | |
5475 | is (lt foo (const_int 0)), so we can perform the above | |
0802d516 | 5476 | simplification if STORE_FLAG_VALUE is 1. */ |
500c518b | 5477 | |
0802d516 RK |
5478 | if (STORE_FLAG_VALUE == 1 |
5479 | && op1 == const1_rtx | |
8079805d RK |
5480 | && GET_CODE (op0) == LSHIFTRT |
5481 | && GET_CODE (XEXP (op0, 1)) == CONST_INT | |
5482 | && INTVAL (XEXP (op0, 1)) == GET_MODE_BITSIZE (mode) - 1) | |
5483 | return gen_rtx_combine (GE, mode, XEXP (op0, 0), const0_rtx); | |
230d793d RS |
5484 | |
5485 | /* (xor (comparison foo bar) (const_int sign-bit)) | |
5486 | when STORE_FLAG_VALUE is the sign bit. */ | |
5f4f0e22 | 5487 | if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT |
0802d516 | 5488 | && ((STORE_FLAG_VALUE & GET_MODE_MASK (mode)) |
e51712db | 5489 | == (unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1)) |
8079805d RK |
5490 | && op1 == const_true_rtx |
5491 | && GET_RTX_CLASS (GET_CODE (op0)) == '<' | |
5492 | && reversible_comparison_p (op0)) | |
5493 | return gen_rtx_combine (reverse_condition (GET_CODE (op0)), | |
5494 | mode, XEXP (op0, 0), XEXP (op0, 1)); | |
0918eca0 | 5495 | |
230d793d | 5496 | break; |
e9a25f70 JL |
5497 | |
5498 | default: | |
5499 | abort (); | |
230d793d RS |
5500 | } |
5501 | ||
5502 | return x; | |
5503 | } | |
5504 | \f | |
5505 | /* We consider ZERO_EXTRACT, SIGN_EXTRACT, and SIGN_EXTEND as "compound | |
5506 | operations" because they can be replaced with two more basic operations. | |
5507 | ZERO_EXTEND is also considered "compound" because it can be replaced with | |
5508 | an AND operation, which is simpler, though only one operation. | |
5509 | ||
5510 | The function expand_compound_operation is called with an rtx expression | |
663522cb | 5511 | and will convert it to the appropriate shifts and AND operations, |
230d793d RS |
5512 | simplifying at each stage. |
5513 | ||
5514 | The function make_compound_operation is called to convert an expression | |
5515 | consisting of shifts and ANDs into the equivalent compound expression. | |
5516 | It is the inverse of this function, loosely speaking. */ | |
5517 | ||
5518 | static rtx | |
5519 | expand_compound_operation (x) | |
5520 | rtx x; | |
5521 | { | |
770ae6cc | 5522 | unsigned HOST_WIDE_INT pos = 0, len; |
230d793d | 5523 | int unsignedp = 0; |
770ae6cc | 5524 | unsigned int modewidth; |
230d793d RS |
5525 | rtx tem; |
5526 | ||
5527 | switch (GET_CODE (x)) | |
5528 | { | |
5529 | case ZERO_EXTEND: | |
5530 | unsignedp = 1; | |
5531 | case SIGN_EXTEND: | |
75473182 RS |
5532 | /* We can't necessarily use a const_int for a multiword mode; |
5533 | it depends on implicitly extending the value. | |
5534 | Since we don't know the right way to extend it, | |
5535 | we can't tell whether the implicit way is right. | |
5536 | ||
5537 | Even for a mode that is no wider than a const_int, | |
5538 | we can't win, because we need to sign extend one of its bits through | |
5539 | the rest of it, and we don't know which bit. */ | |
230d793d | 5540 | if (GET_CODE (XEXP (x, 0)) == CONST_INT) |
75473182 | 5541 | return x; |
230d793d | 5542 | |
8079805d RK |
5543 | /* Return if (subreg:MODE FROM 0) is not a safe replacement for |
5544 | (zero_extend:MODE FROM) or (sign_extend:MODE FROM). It is for any MEM | |
5545 | because (SUBREG (MEM...)) is guaranteed to cause the MEM to be | |
5546 | reloaded. If not for that, MEM's would very rarely be safe. | |
5547 | ||
5548 | Reject MODEs bigger than a word, because we might not be able | |
5549 | to reference a two-register group starting with an arbitrary register | |
5550 | (and currently gen_lowpart might crash for a SUBREG). */ | |
663522cb | 5551 | |
8079805d | 5552 | if (GET_MODE_SIZE (GET_MODE (XEXP (x, 0))) > UNITS_PER_WORD) |
230d793d RS |
5553 | return x; |
5554 | ||
5555 | len = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))); | |
5556 | /* If the inner object has VOIDmode (the only way this can happen | |
5557 | is if it is a ASM_OPERANDS), we can't do anything since we don't | |
5558 | know how much masking to do. */ | |
5559 | if (len == 0) | |
5560 | return x; | |
5561 | ||
5562 | break; | |
5563 | ||
5564 | case ZERO_EXTRACT: | |
5565 | unsignedp = 1; | |
5566 | case SIGN_EXTRACT: | |
5567 | /* If the operand is a CLOBBER, just return it. */ | |
5568 | if (GET_CODE (XEXP (x, 0)) == CLOBBER) | |
5569 | return XEXP (x, 0); | |
5570 | ||
5571 | if (GET_CODE (XEXP (x, 1)) != CONST_INT | |
5572 | || GET_CODE (XEXP (x, 2)) != CONST_INT | |
5573 | || GET_MODE (XEXP (x, 0)) == VOIDmode) | |
5574 | return x; | |
5575 | ||
5576 | len = INTVAL (XEXP (x, 1)); | |
5577 | pos = INTVAL (XEXP (x, 2)); | |
5578 | ||
5579 | /* If this goes outside the object being extracted, replace the object | |
5580 | with a (use (mem ...)) construct that only combine understands | |
5581 | and is used only for this purpose. */ | |
5582 | if (len + pos > GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))) | |
38a448ca | 5583 | SUBST (XEXP (x, 0), gen_rtx_USE (GET_MODE (x), XEXP (x, 0))); |
230d793d | 5584 | |
f76b9db2 ILT |
5585 | if (BITS_BIG_ENDIAN) |
5586 | pos = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - len - pos; | |
5587 | ||
230d793d RS |
5588 | break; |
5589 | ||
5590 | default: | |
5591 | return x; | |
5592 | } | |
0f808b6f JH |
5593 | /* Convert sign extension to zero extension, if we know that the high |
5594 | bit is not set, as this is easier to optimize. It will be converted | |
5595 | back to cheaper alternative in make_extraction. */ | |
5596 | if (GET_CODE (x) == SIGN_EXTEND | |
5597 | && (GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT | |
5598 | && ((nonzero_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0))) | |
663522cb | 5599 | & ~(((unsigned HOST_WIDE_INT) |
0f808b6f JH |
5600 | GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) |
5601 | >> 1)) | |
5602 | == 0))) | |
5603 | { | |
5604 | rtx temp = gen_rtx_ZERO_EXTEND (GET_MODE (x), XEXP (x, 0)); | |
5605 | return expand_compound_operation (temp); | |
5606 | } | |
230d793d | 5607 | |
0f13a422 ILT |
5608 | /* We can optimize some special cases of ZERO_EXTEND. */ |
5609 | if (GET_CODE (x) == ZERO_EXTEND) | |
5610 | { | |
5611 | /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI if we | |
5612 | know that the last value didn't have any inappropriate bits | |
5613 | set. */ | |
5614 | if (GET_CODE (XEXP (x, 0)) == TRUNCATE | |
5615 | && GET_MODE (XEXP (XEXP (x, 0), 0)) == GET_MODE (x) | |
5616 | && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT | |
5617 | && (nonzero_bits (XEXP (XEXP (x, 0), 0), GET_MODE (x)) | |
663522cb | 5618 | & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0) |
0f13a422 ILT |
5619 | return XEXP (XEXP (x, 0), 0); |
5620 | ||
5621 | /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)). */ | |
5622 | if (GET_CODE (XEXP (x, 0)) == SUBREG | |
5623 | && GET_MODE (SUBREG_REG (XEXP (x, 0))) == GET_MODE (x) | |
5624 | && subreg_lowpart_p (XEXP (x, 0)) | |
5625 | && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT | |
5626 | && (nonzero_bits (SUBREG_REG (XEXP (x, 0)), GET_MODE (x)) | |
663522cb | 5627 | & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0) |
0f13a422 ILT |
5628 | return SUBREG_REG (XEXP (x, 0)); |
5629 | ||
5630 | /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI when foo | |
5631 | is a comparison and STORE_FLAG_VALUE permits. This is like | |
5632 | the first case, but it works even when GET_MODE (x) is larger | |
5633 | than HOST_WIDE_INT. */ | |
5634 | if (GET_CODE (XEXP (x, 0)) == TRUNCATE | |
5635 | && GET_MODE (XEXP (XEXP (x, 0), 0)) == GET_MODE (x) | |
5636 | && GET_RTX_CLASS (GET_CODE (XEXP (XEXP (x, 0), 0))) == '<' | |
5637 | && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) | |
5638 | <= HOST_BITS_PER_WIDE_INT) | |
5639 | && ((HOST_WIDE_INT) STORE_FLAG_VALUE | |
663522cb | 5640 | & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0) |
0f13a422 ILT |
5641 | return XEXP (XEXP (x, 0), 0); |
5642 | ||
5643 | /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)). */ | |
5644 | if (GET_CODE (XEXP (x, 0)) == SUBREG | |
5645 | && GET_MODE (SUBREG_REG (XEXP (x, 0))) == GET_MODE (x) | |
5646 | && subreg_lowpart_p (XEXP (x, 0)) | |
5647 | && GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0)))) == '<' | |
5648 | && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) | |
5649 | <= HOST_BITS_PER_WIDE_INT) | |
5650 | && ((HOST_WIDE_INT) STORE_FLAG_VALUE | |
663522cb | 5651 | & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0) |
0f13a422 ILT |
5652 | return SUBREG_REG (XEXP (x, 0)); |
5653 | ||
0f13a422 ILT |
5654 | } |
5655 | ||
230d793d RS |
5656 | /* If we reach here, we want to return a pair of shifts. The inner |
5657 | shift is a left shift of BITSIZE - POS - LEN bits. The outer | |
5658 | shift is a right shift of BITSIZE - LEN bits. It is arithmetic or | |
5659 | logical depending on the value of UNSIGNEDP. | |
5660 | ||
5661 | If this was a ZERO_EXTEND or ZERO_EXTRACT, this pair of shifts will be | |
5662 | converted into an AND of a shift. | |
5663 | ||
5664 | We must check for the case where the left shift would have a negative | |
5665 | count. This can happen in a case like (x >> 31) & 255 on machines | |
5666 | that can't shift by a constant. On those machines, we would first | |
663522cb | 5667 | combine the shift with the AND to produce a variable-position |
230d793d RS |
5668 | extraction. Then the constant of 31 would be substituted in to produce |
5669 | a such a position. */ | |
5670 | ||
5671 | modewidth = GET_MODE_BITSIZE (GET_MODE (x)); | |
770ae6cc | 5672 | if (modewidth + len >= pos) |
5f4f0e22 | 5673 | tem = simplify_shift_const (NULL_RTX, unsignedp ? LSHIFTRT : ASHIFTRT, |
230d793d | 5674 | GET_MODE (x), |
5f4f0e22 CH |
5675 | simplify_shift_const (NULL_RTX, ASHIFT, |
5676 | GET_MODE (x), | |
230d793d RS |
5677 | XEXP (x, 0), |
5678 | modewidth - pos - len), | |
5679 | modewidth - len); | |
5680 | ||
5f4f0e22 CH |
5681 | else if (unsignedp && len < HOST_BITS_PER_WIDE_INT) |
5682 | tem = simplify_and_const_int (NULL_RTX, GET_MODE (x), | |
5683 | simplify_shift_const (NULL_RTX, LSHIFTRT, | |
230d793d RS |
5684 | GET_MODE (x), |
5685 | XEXP (x, 0), pos), | |
5f4f0e22 | 5686 | ((HOST_WIDE_INT) 1 << len) - 1); |
230d793d RS |
5687 | else |
5688 | /* Any other cases we can't handle. */ | |
5689 | return x; | |
230d793d RS |
5690 | |
5691 | /* If we couldn't do this for some reason, return the original | |
5692 | expression. */ | |
5693 | if (GET_CODE (tem) == CLOBBER) | |
5694 | return x; | |
5695 | ||
5696 | return tem; | |
5697 | } | |
5698 | \f | |
5699 | /* X is a SET which contains an assignment of one object into | |
5700 | a part of another (such as a bit-field assignment, STRICT_LOW_PART, | |
5701 | or certain SUBREGS). If possible, convert it into a series of | |
5702 | logical operations. | |
5703 | ||
5704 | We half-heartedly support variable positions, but do not at all | |
5705 | support variable lengths. */ | |
5706 | ||
5707 | static rtx | |
5708 | expand_field_assignment (x) | |
5709 | rtx x; | |
5710 | { | |
5711 | rtx inner; | |
0f41302f | 5712 | rtx pos; /* Always counts from low bit. */ |
230d793d RS |
5713 | int len; |
5714 | rtx mask; | |
5715 | enum machine_mode compute_mode; | |
5716 | ||
5717 | /* Loop until we find something we can't simplify. */ | |
5718 | while (1) | |
5719 | { | |
5720 | if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART | |
5721 | && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG) | |
5722 | { | |
5723 | inner = SUBREG_REG (XEXP (SET_DEST (x), 0)); | |
5724 | len = GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0))); | |
4d9cfc7b | 5725 | pos = GEN_INT (BITS_PER_WORD * SUBREG_WORD (XEXP (SET_DEST (x), 0))); |
230d793d RS |
5726 | } |
5727 | else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT | |
5728 | && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT) | |
5729 | { | |
5730 | inner = XEXP (SET_DEST (x), 0); | |
5731 | len = INTVAL (XEXP (SET_DEST (x), 1)); | |
5732 | pos = XEXP (SET_DEST (x), 2); | |
5733 | ||
5734 | /* If the position is constant and spans the width of INNER, | |
5735 | surround INNER with a USE to indicate this. */ | |
5736 | if (GET_CODE (pos) == CONST_INT | |
5737 | && INTVAL (pos) + len > GET_MODE_BITSIZE (GET_MODE (inner))) | |
38a448ca | 5738 | inner = gen_rtx_USE (GET_MODE (SET_DEST (x)), inner); |
230d793d | 5739 | |
f76b9db2 ILT |
5740 | if (BITS_BIG_ENDIAN) |
5741 | { | |
5742 | if (GET_CODE (pos) == CONST_INT) | |
5743 | pos = GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner)) - len | |
5744 | - INTVAL (pos)); | |
5745 | else if (GET_CODE (pos) == MINUS | |
5746 | && GET_CODE (XEXP (pos, 1)) == CONST_INT | |
5747 | && (INTVAL (XEXP (pos, 1)) | |
5748 | == GET_MODE_BITSIZE (GET_MODE (inner)) - len)) | |
5749 | /* If position is ADJUST - X, new position is X. */ | |
5750 | pos = XEXP (pos, 0); | |
5751 | else | |
5752 | pos = gen_binary (MINUS, GET_MODE (pos), | |
5753 | GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner)) | |
5754 | - len), | |
5755 | pos); | |
5756 | } | |
230d793d RS |
5757 | } |
5758 | ||
5759 | /* A SUBREG between two modes that occupy the same numbers of words | |
5760 | can be done by moving the SUBREG to the source. */ | |
5761 | else if (GET_CODE (SET_DEST (x)) == SUBREG | |
b1e9c8a9 AO |
5762 | /* We need SUBREGs to compute nonzero_bits properly. */ |
5763 | && nonzero_sign_valid | |
230d793d RS |
5764 | && (((GET_MODE_SIZE (GET_MODE (SET_DEST (x))) |
5765 | + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD) | |
5766 | == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x)))) | |
5767 | + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))) | |
5768 | { | |
38a448ca | 5769 | x = gen_rtx_SET (VOIDmode, SUBREG_REG (SET_DEST (x)), |
c5c76735 JL |
5770 | gen_lowpart_for_combine |
5771 | (GET_MODE (SUBREG_REG (SET_DEST (x))), | |
5772 | SET_SRC (x))); | |
230d793d RS |
5773 | continue; |
5774 | } | |
5775 | else | |
5776 | break; | |
5777 | ||
5778 | while (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner)) | |
5779 | inner = SUBREG_REG (inner); | |
5780 | ||
5781 | compute_mode = GET_MODE (inner); | |
5782 | ||
861556b4 RH |
5783 | /* Don't attempt bitwise arithmetic on non-integral modes. */ |
5784 | if (! INTEGRAL_MODE_P (compute_mode)) | |
5785 | { | |
5786 | enum machine_mode imode; | |
5787 | ||
5788 | /* Something is probably seriously wrong if this matches. */ | |
5789 | if (! FLOAT_MODE_P (compute_mode)) | |
5790 | break; | |
5791 | ||
5792 | /* Try to find an integral mode to pun with. */ | |
5793 | imode = mode_for_size (GET_MODE_BITSIZE (compute_mode), MODE_INT, 0); | |
5794 | if (imode == BLKmode) | |
5795 | break; | |
5796 | ||
5797 | compute_mode = imode; | |
5798 | inner = gen_lowpart_for_combine (imode, inner); | |
5799 | } | |
5800 | ||
230d793d | 5801 | /* Compute a mask of LEN bits, if we can do this on the host machine. */ |
5f4f0e22 CH |
5802 | if (len < HOST_BITS_PER_WIDE_INT) |
5803 | mask = GEN_INT (((HOST_WIDE_INT) 1 << len) - 1); | |
230d793d RS |
5804 | else |
5805 | break; | |
5806 | ||
5807 | /* Now compute the equivalent expression. Make a copy of INNER | |
5808 | for the SET_DEST in case it is a MEM into which we will substitute; | |
5809 | we don't want shared RTL in that case. */ | |
c5c76735 JL |
5810 | x = gen_rtx_SET |
5811 | (VOIDmode, copy_rtx (inner), | |
5812 | gen_binary (IOR, compute_mode, | |
5813 | gen_binary (AND, compute_mode, | |
5814 | gen_unary (NOT, compute_mode, | |
5815 | compute_mode, | |
5816 | gen_binary (ASHIFT, | |
5817 | compute_mode, | |
5818 | mask, pos)), | |
5819 | inner), | |
5820 | gen_binary (ASHIFT, compute_mode, | |
5821 | gen_binary (AND, compute_mode, | |
5822 | gen_lowpart_for_combine | |
5823 | (compute_mode, SET_SRC (x)), | |
5824 | mask), | |
5825 | pos))); | |
230d793d RS |
5826 | } |
5827 | ||
5828 | return x; | |
5829 | } | |
5830 | \f | |
8999a12e RK |
5831 | /* Return an RTX for a reference to LEN bits of INNER. If POS_RTX is nonzero, |
5832 | it is an RTX that represents a variable starting position; otherwise, | |
5833 | POS is the (constant) starting bit position (counted from the LSB). | |
230d793d RS |
5834 | |
5835 | INNER may be a USE. This will occur when we started with a bitfield | |
5836 | that went outside the boundary of the object in memory, which is | |
5837 | allowed on most machines. To isolate this case, we produce a USE | |
5838 | whose mode is wide enough and surround the MEM with it. The only | |
5839 | code that understands the USE is this routine. If it is not removed, | |
5840 | it will cause the resulting insn not to match. | |
5841 | ||
663522cb | 5842 | UNSIGNEDP is non-zero for an unsigned reference and zero for a |
230d793d RS |
5843 | signed reference. |
5844 | ||
5845 | IN_DEST is non-zero if this is a reference in the destination of a | |
5846 | SET. This is used when a ZERO_ or SIGN_EXTRACT isn't needed. If non-zero, | |
5847 | a STRICT_LOW_PART will be used, if zero, ZERO_EXTEND or SIGN_EXTEND will | |
5848 | be used. | |
5849 | ||
5850 | IN_COMPARE is non-zero if we are in a COMPARE. This means that a | |
5851 | ZERO_EXTRACT should be built even for bits starting at bit 0. | |
5852 | ||
76184def DE |
5853 | MODE is the desired mode of the result (if IN_DEST == 0). |
5854 | ||
5855 | The result is an RTX for the extraction or NULL_RTX if the target | |
5856 | can't handle it. */ | |
230d793d RS |
5857 | |
5858 | static rtx | |
5859 | make_extraction (mode, inner, pos, pos_rtx, len, | |
5860 | unsignedp, in_dest, in_compare) | |
5861 | enum machine_mode mode; | |
5862 | rtx inner; | |
770ae6cc | 5863 | HOST_WIDE_INT pos; |
230d793d | 5864 | rtx pos_rtx; |
770ae6cc | 5865 | unsigned HOST_WIDE_INT len; |
230d793d RS |
5866 | int unsignedp; |
5867 | int in_dest, in_compare; | |
5868 | { | |
94b4b17a RS |
5869 | /* This mode describes the size of the storage area |
5870 | to fetch the overall value from. Within that, we | |
5871 | ignore the POS lowest bits, etc. */ | |
230d793d RS |
5872 | enum machine_mode is_mode = GET_MODE (inner); |
5873 | enum machine_mode inner_mode; | |
d7cd794f RK |
5874 | enum machine_mode wanted_inner_mode = byte_mode; |
5875 | enum machine_mode wanted_inner_reg_mode = word_mode; | |
230d793d RS |
5876 | enum machine_mode pos_mode = word_mode; |
5877 | enum machine_mode extraction_mode = word_mode; | |
5878 | enum machine_mode tmode = mode_for_size (len, MODE_INT, 1); | |
5879 | int spans_byte = 0; | |
5880 | rtx new = 0; | |
8999a12e | 5881 | rtx orig_pos_rtx = pos_rtx; |
770ae6cc | 5882 | HOST_WIDE_INT orig_pos; |
230d793d RS |
5883 | |
5884 | /* Get some information about INNER and get the innermost object. */ | |
5885 | if (GET_CODE (inner) == USE) | |
94b4b17a | 5886 | /* (use:SI (mem:QI foo)) stands for (mem:SI foo). */ |
230d793d RS |
5887 | /* We don't need to adjust the position because we set up the USE |
5888 | to pretend that it was a full-word object. */ | |
5889 | spans_byte = 1, inner = XEXP (inner, 0); | |
5890 | else if (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner)) | |
94b4b17a RS |
5891 | { |
5892 | /* If going from (subreg:SI (mem:QI ...)) to (mem:QI ...), | |
5893 | consider just the QI as the memory to extract from. | |
5894 | The subreg adds or removes high bits; its mode is | |
5895 | irrelevant to the meaning of this extraction, | |
5896 | since POS and LEN count from the lsb. */ | |
5897 | if (GET_CODE (SUBREG_REG (inner)) == MEM) | |
5898 | is_mode = GET_MODE (SUBREG_REG (inner)); | |
5899 | inner = SUBREG_REG (inner); | |
5900 | } | |
230d793d RS |
5901 | |
5902 | inner_mode = GET_MODE (inner); | |
5903 | ||
5904 | if (pos_rtx && GET_CODE (pos_rtx) == CONST_INT) | |
8999a12e | 5905 | pos = INTVAL (pos_rtx), pos_rtx = 0; |
230d793d RS |
5906 | |
5907 | /* See if this can be done without an extraction. We never can if the | |
5908 | width of the field is not the same as that of some integer mode. For | |
5909 | registers, we can only avoid the extraction if the position is at the | |
5910 | low-order bit and this is either not in the destination or we have the | |
5911 | appropriate STRICT_LOW_PART operation available. | |
5912 | ||
5913 | For MEM, we can avoid an extract if the field starts on an appropriate | |
5914 | boundary and we can change the mode of the memory reference. However, | |
5915 | we cannot directly access the MEM if we have a USE and the underlying | |
5916 | MEM is not TMODE. This combination means that MEM was being used in a | |
5917 | context where bits outside its mode were being referenced; that is only | |
5918 | valid in bit-field insns. */ | |
5919 | ||
5920 | if (tmode != BLKmode | |
5921 | && ! (spans_byte && inner_mode != tmode) | |
4d9cfc7b RK |
5922 | && ((pos_rtx == 0 && (pos % BITS_PER_WORD) == 0 |
5923 | && GET_CODE (inner) != MEM | |
230d793d | 5924 | && (! in_dest |
df62f951 RK |
5925 | || (GET_CODE (inner) == REG |
5926 | && (movstrict_optab->handlers[(int) tmode].insn_code | |
5927 | != CODE_FOR_nothing)))) | |
8999a12e | 5928 | || (GET_CODE (inner) == MEM && pos_rtx == 0 |
dfbe1b2f RK |
5929 | && (pos |
5930 | % (STRICT_ALIGNMENT ? GET_MODE_ALIGNMENT (tmode) | |
5931 | : BITS_PER_UNIT)) == 0 | |
230d793d RS |
5932 | /* We can't do this if we are widening INNER_MODE (it |
5933 | may not be aligned, for one thing). */ | |
5934 | && GET_MODE_BITSIZE (inner_mode) >= GET_MODE_BITSIZE (tmode) | |
5935 | && (inner_mode == tmode | |
5936 | || (! mode_dependent_address_p (XEXP (inner, 0)) | |
5937 | && ! MEM_VOLATILE_P (inner)))))) | |
5938 | { | |
230d793d RS |
5939 | /* If INNER is a MEM, make a new MEM that encompasses just the desired |
5940 | field. If the original and current mode are the same, we need not | |
663522cb | 5941 | adjust the offset. Otherwise, we do if bytes big endian. |
230d793d | 5942 | |
4d9cfc7b RK |
5943 | If INNER is not a MEM, get a piece consisting of just the field |
5944 | of interest (in this case POS % BITS_PER_WORD must be 0). */ | |
230d793d RS |
5945 | |
5946 | if (GET_CODE (inner) == MEM) | |
5947 | { | |
94b4b17a RS |
5948 | int offset; |
5949 | /* POS counts from lsb, but make OFFSET count in memory order. */ | |
5950 | if (BYTES_BIG_ENDIAN) | |
5951 | offset = (GET_MODE_BITSIZE (is_mode) - len - pos) / BITS_PER_UNIT; | |
5952 | else | |
5953 | offset = pos / BITS_PER_UNIT; | |
230d793d | 5954 | |
38a448ca | 5955 | new = gen_rtx_MEM (tmode, plus_constant (XEXP (inner, 0), offset)); |
c6df88cb | 5956 | MEM_COPY_ATTRIBUTES (new, inner); |
230d793d | 5957 | } |
df62f951 | 5958 | else if (GET_CODE (inner) == REG) |
c0d3ac4d RK |
5959 | { |
5960 | /* We can't call gen_lowpart_for_combine here since we always want | |
5961 | a SUBREG and it would sometimes return a new hard register. */ | |
5962 | if (tmode != inner_mode) | |
38a448ca RH |
5963 | new = gen_rtx_SUBREG (tmode, inner, |
5964 | (WORDS_BIG_ENDIAN | |
c5c76735 JL |
5965 | && (GET_MODE_SIZE (inner_mode) |
5966 | > UNITS_PER_WORD) | |
38a448ca RH |
5967 | ? (((GET_MODE_SIZE (inner_mode) |
5968 | - GET_MODE_SIZE (tmode)) | |
5969 | / UNITS_PER_WORD) | |
5970 | - pos / BITS_PER_WORD) | |
5971 | : pos / BITS_PER_WORD)); | |
c0d3ac4d RK |
5972 | else |
5973 | new = inner; | |
5974 | } | |
230d793d | 5975 | else |
6139ff20 RK |
5976 | new = force_to_mode (inner, tmode, |
5977 | len >= HOST_BITS_PER_WIDE_INT | |
e8dc6d50 | 5978 | ? ~(HOST_WIDE_INT) 0 |
729a2125 | 5979 | : ((unsigned HOST_WIDE_INT) 1 << len) - 1, |
e3d616e3 | 5980 | NULL_RTX, 0); |
230d793d | 5981 | |
663522cb | 5982 | /* If this extraction is going into the destination of a SET, |
230d793d RS |
5983 | make a STRICT_LOW_PART unless we made a MEM. */ |
5984 | ||
5985 | if (in_dest) | |
5986 | return (GET_CODE (new) == MEM ? new | |
77fa0940 | 5987 | : (GET_CODE (new) != SUBREG |
38a448ca | 5988 | ? gen_rtx_CLOBBER (tmode, const0_rtx) |
77fa0940 | 5989 | : gen_rtx_combine (STRICT_LOW_PART, VOIDmode, new))); |
230d793d | 5990 | |
0f808b6f JH |
5991 | if (mode == tmode) |
5992 | return new; | |
5993 | ||
5994 | /* If we know that no extraneous bits are set, and that the high | |
5995 | bit is not set, convert the extraction to the cheaper of | |
5996 | sign and zero extension, that are equivalent in these cases. */ | |
5997 | if (flag_expensive_optimizations | |
5998 | && (GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT | |
5999 | && ((nonzero_bits (new, tmode) | |
663522cb KH |
6000 | & ~(((unsigned HOST_WIDE_INT) |
6001 | GET_MODE_MASK (tmode)) | |
6002 | >> 1)) | |
0f808b6f JH |
6003 | == 0))) |
6004 | { | |
6005 | rtx temp = gen_rtx_ZERO_EXTEND (mode, new); | |
6006 | rtx temp1 = gen_rtx_SIGN_EXTEND (mode, new); | |
6007 | ||
6008 | /* Prefer ZERO_EXTENSION, since it gives more information to | |
6009 | backends. */ | |
6010 | if (rtx_cost (temp, SET) < rtx_cost (temp1, SET)) | |
6011 | return temp; | |
6012 | return temp1; | |
6013 | } | |
6014 | ||
230d793d RS |
6015 | /* Otherwise, sign- or zero-extend unless we already are in the |
6016 | proper mode. */ | |
6017 | ||
0f808b6f JH |
6018 | return (gen_rtx_combine (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, |
6019 | mode, new)); | |
230d793d RS |
6020 | } |
6021 | ||
cc471082 RS |
6022 | /* Unless this is a COMPARE or we have a funny memory reference, |
6023 | don't do anything with zero-extending field extracts starting at | |
6024 | the low-order bit since they are simple AND operations. */ | |
8999a12e RK |
6025 | if (pos_rtx == 0 && pos == 0 && ! in_dest |
6026 | && ! in_compare && ! spans_byte && unsignedp) | |
230d793d RS |
6027 | return 0; |
6028 | ||
c5c76735 JL |
6029 | /* Unless we are allowed to span bytes or INNER is not MEM, reject this if |
6030 | we would be spanning bytes or if the position is not a constant and the | |
6031 | length is not 1. In all other cases, we would only be going outside | |
6032 | our object in cases when an original shift would have been | |
e7373556 | 6033 | undefined. */ |
c5c76735 | 6034 | if (! spans_byte && GET_CODE (inner) == MEM |
e7373556 RK |
6035 | && ((pos_rtx == 0 && pos + len > GET_MODE_BITSIZE (is_mode)) |
6036 | || (pos_rtx != 0 && len != 1))) | |
6037 | return 0; | |
6038 | ||
d7cd794f | 6039 | /* Get the mode to use should INNER not be a MEM, the mode for the position, |
230d793d RS |
6040 | and the mode for the result. */ |
6041 | #ifdef HAVE_insv | |
6042 | if (in_dest) | |
6043 | { | |
0d8e55d8 | 6044 | wanted_inner_reg_mode |
a995e389 RH |
6045 | = insn_data[(int) CODE_FOR_insv].operand[0].mode; |
6046 | if (wanted_inner_reg_mode == VOIDmode) | |
6047 | wanted_inner_reg_mode = word_mode; | |
6048 | ||
6049 | pos_mode = insn_data[(int) CODE_FOR_insv].operand[2].mode; | |
6050 | if (pos_mode == VOIDmode) | |
6051 | pos_mode = word_mode; | |
6052 | ||
6053 | extraction_mode = insn_data[(int) CODE_FOR_insv].operand[3].mode; | |
6054 | if (extraction_mode == VOIDmode) | |
6055 | extraction_mode = word_mode; | |
230d793d RS |
6056 | } |
6057 | #endif | |
6058 | ||
6059 | #ifdef HAVE_extzv | |
6060 | if (! in_dest && unsignedp) | |
6061 | { | |
0d8e55d8 | 6062 | wanted_inner_reg_mode |
a995e389 RH |
6063 | = insn_data[(int) CODE_FOR_extzv].operand[1].mode; |
6064 | if (wanted_inner_reg_mode == VOIDmode) | |
6065 | wanted_inner_reg_mode = word_mode; | |
6066 | ||
6067 | pos_mode = insn_data[(int) CODE_FOR_extzv].operand[3].mode; | |
6068 | if (pos_mode == VOIDmode) | |
6069 | pos_mode = word_mode; | |
6070 | ||
6071 | extraction_mode = insn_data[(int) CODE_FOR_extzv].operand[0].mode; | |
6072 | if (extraction_mode == VOIDmode) | |
6073 | extraction_mode = word_mode; | |
230d793d RS |
6074 | } |
6075 | #endif | |
6076 | ||
6077 | #ifdef HAVE_extv | |
6078 | if (! in_dest && ! unsignedp) | |
6079 | { | |
0d8e55d8 | 6080 | wanted_inner_reg_mode |
a995e389 RH |
6081 | = insn_data[(int) CODE_FOR_extv].operand[1].mode; |
6082 | if (wanted_inner_reg_mode == VOIDmode) | |
6083 | wanted_inner_reg_mode = word_mode; | |
6084 | ||
6085 | pos_mode = insn_data[(int) CODE_FOR_extv].operand[3].mode; | |
6086 | if (pos_mode == VOIDmode) | |
6087 | pos_mode = word_mode; | |
6088 | ||
6089 | extraction_mode = insn_data[(int) CODE_FOR_extv].operand[0].mode; | |
6090 | if (extraction_mode == VOIDmode) | |
6091 | extraction_mode = word_mode; | |
230d793d RS |
6092 | } |
6093 | #endif | |
6094 | ||
6095 | /* Never narrow an object, since that might not be safe. */ | |
6096 | ||
6097 | if (mode != VOIDmode | |
6098 | && GET_MODE_SIZE (extraction_mode) < GET_MODE_SIZE (mode)) | |
6099 | extraction_mode = mode; | |
6100 | ||
6101 | if (pos_rtx && GET_MODE (pos_rtx) != VOIDmode | |
6102 | && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx))) | |
6103 | pos_mode = GET_MODE (pos_rtx); | |
6104 | ||
d7cd794f RK |
6105 | /* If this is not from memory, the desired mode is wanted_inner_reg_mode; |
6106 | if we have to change the mode of memory and cannot, the desired mode is | |
6107 | EXTRACTION_MODE. */ | |
6108 | if (GET_CODE (inner) != MEM) | |
6109 | wanted_inner_mode = wanted_inner_reg_mode; | |
6110 | else if (inner_mode != wanted_inner_mode | |
6111 | && (mode_dependent_address_p (XEXP (inner, 0)) | |
6112 | || MEM_VOLATILE_P (inner))) | |
6113 | wanted_inner_mode = extraction_mode; | |
230d793d | 6114 | |
6139ff20 RK |
6115 | orig_pos = pos; |
6116 | ||
f76b9db2 ILT |
6117 | if (BITS_BIG_ENDIAN) |
6118 | { | |
cf54c2cd DE |
6119 | /* POS is passed as if BITS_BIG_ENDIAN == 0, so we need to convert it to |
6120 | BITS_BIG_ENDIAN style. If position is constant, compute new | |
6121 | position. Otherwise, build subtraction. | |
6122 | Note that POS is relative to the mode of the original argument. | |
6123 | If it's a MEM we need to recompute POS relative to that. | |
6124 | However, if we're extracting from (or inserting into) a register, | |
6125 | we want to recompute POS relative to wanted_inner_mode. */ | |
6126 | int width = (GET_CODE (inner) == MEM | |
6127 | ? GET_MODE_BITSIZE (is_mode) | |
6128 | : GET_MODE_BITSIZE (wanted_inner_mode)); | |
6129 | ||
f76b9db2 | 6130 | if (pos_rtx == 0) |
cf54c2cd | 6131 | pos = width - len - pos; |
f76b9db2 ILT |
6132 | else |
6133 | pos_rtx | |
6134 | = gen_rtx_combine (MINUS, GET_MODE (pos_rtx), | |
cf54c2cd DE |
6135 | GEN_INT (width - len), pos_rtx); |
6136 | /* POS may be less than 0 now, but we check for that below. | |
6137 | Note that it can only be less than 0 if GET_CODE (inner) != MEM. */ | |
f76b9db2 | 6138 | } |
230d793d RS |
6139 | |
6140 | /* If INNER has a wider mode, make it smaller. If this is a constant | |
6141 | extract, try to adjust the byte to point to the byte containing | |
6142 | the value. */ | |
d7cd794f RK |
6143 | if (wanted_inner_mode != VOIDmode |
6144 | && GET_MODE_SIZE (wanted_inner_mode) < GET_MODE_SIZE (is_mode) | |
230d793d | 6145 | && ((GET_CODE (inner) == MEM |
d7cd794f | 6146 | && (inner_mode == wanted_inner_mode |
230d793d RS |
6147 | || (! mode_dependent_address_p (XEXP (inner, 0)) |
6148 | && ! MEM_VOLATILE_P (inner)))))) | |
6149 | { | |
6150 | int offset = 0; | |
6151 | ||
6152 | /* The computations below will be correct if the machine is big | |
6153 | endian in both bits and bytes or little endian in bits and bytes. | |
6154 | If it is mixed, we must adjust. */ | |
663522cb | 6155 | |
230d793d | 6156 | /* If bytes are big endian and we had a paradoxical SUBREG, we must |
0f41302f | 6157 | adjust OFFSET to compensate. */ |
f76b9db2 ILT |
6158 | if (BYTES_BIG_ENDIAN |
6159 | && ! spans_byte | |
230d793d RS |
6160 | && GET_MODE_SIZE (inner_mode) < GET_MODE_SIZE (is_mode)) |
6161 | offset -= GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (inner_mode); | |
230d793d RS |
6162 | |
6163 | /* If this is a constant position, we can move to the desired byte. */ | |
8999a12e | 6164 | if (pos_rtx == 0) |
230d793d RS |
6165 | { |
6166 | offset += pos / BITS_PER_UNIT; | |
d7cd794f | 6167 | pos %= GET_MODE_BITSIZE (wanted_inner_mode); |
230d793d RS |
6168 | } |
6169 | ||
f76b9db2 ILT |
6170 | if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN |
6171 | && ! spans_byte | |
d7cd794f | 6172 | && is_mode != wanted_inner_mode) |
c6b3f1f2 | 6173 | offset = (GET_MODE_SIZE (is_mode) |
d7cd794f | 6174 | - GET_MODE_SIZE (wanted_inner_mode) - offset); |
c6b3f1f2 | 6175 | |
d7cd794f | 6176 | if (offset != 0 || inner_mode != wanted_inner_mode) |
230d793d | 6177 | { |
38a448ca RH |
6178 | rtx newmem = gen_rtx_MEM (wanted_inner_mode, |
6179 | plus_constant (XEXP (inner, 0), offset)); | |
bf49b139 | 6180 | |
c6df88cb | 6181 | MEM_COPY_ATTRIBUTES (newmem, inner); |
230d793d RS |
6182 | inner = newmem; |
6183 | } | |
6184 | } | |
6185 | ||
9e74dc41 RK |
6186 | /* If INNER is not memory, we can always get it into the proper mode. If we |
6187 | are changing its mode, POS must be a constant and smaller than the size | |
6188 | of the new mode. */ | |
230d793d | 6189 | else if (GET_CODE (inner) != MEM) |
9e74dc41 RK |
6190 | { |
6191 | if (GET_MODE (inner) != wanted_inner_mode | |
6192 | && (pos_rtx != 0 | |
6193 | || orig_pos + len > GET_MODE_BITSIZE (wanted_inner_mode))) | |
6194 | return 0; | |
6195 | ||
6196 | inner = force_to_mode (inner, wanted_inner_mode, | |
6197 | pos_rtx | |
6198 | || len + orig_pos >= HOST_BITS_PER_WIDE_INT | |
e8dc6d50 | 6199 | ? ~(HOST_WIDE_INT) 0 |
729a2125 RK |
6200 | : ((((unsigned HOST_WIDE_INT) 1 << len) - 1) |
6201 | << orig_pos), | |
9e74dc41 RK |
6202 | NULL_RTX, 0); |
6203 | } | |
230d793d RS |
6204 | |
6205 | /* Adjust mode of POS_RTX, if needed. If we want a wider mode, we | |
6206 | have to zero extend. Otherwise, we can just use a SUBREG. */ | |
8999a12e | 6207 | if (pos_rtx != 0 |
230d793d | 6208 | && GET_MODE_SIZE (pos_mode) > GET_MODE_SIZE (GET_MODE (pos_rtx))) |
0f808b6f JH |
6209 | { |
6210 | rtx temp = gen_rtx_combine (ZERO_EXTEND, pos_mode, pos_rtx); | |
6211 | ||
6212 | /* If we know that no extraneous bits are set, and that the high | |
6213 | bit is not set, convert extraction to cheaper one - eighter | |
6214 | SIGN_EXTENSION or ZERO_EXTENSION, that are equivalent in these | |
6215 | cases. */ | |
6216 | if (flag_expensive_optimizations | |
6217 | && (GET_MODE_BITSIZE (GET_MODE (pos_rtx)) <= HOST_BITS_PER_WIDE_INT | |
6218 | && ((nonzero_bits (pos_rtx, GET_MODE (pos_rtx)) | |
663522cb KH |
6219 | & ~(((unsigned HOST_WIDE_INT) |
6220 | GET_MODE_MASK (GET_MODE (pos_rtx))) | |
6221 | >> 1)) | |
0f808b6f JH |
6222 | == 0))) |
6223 | { | |
6224 | rtx temp1 = gen_rtx_SIGN_EXTEND (pos_mode, pos_rtx); | |
6225 | ||
6226 | /* Preffer ZERO_EXTENSION, since it gives more information to | |
6227 | backends. */ | |
6228 | if (rtx_cost (temp1, SET) < rtx_cost (temp, SET)) | |
6229 | temp = temp1; | |
6230 | } | |
6231 | pos_rtx = temp; | |
6232 | } | |
8999a12e | 6233 | else if (pos_rtx != 0 |
230d793d RS |
6234 | && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx))) |
6235 | pos_rtx = gen_lowpart_for_combine (pos_mode, pos_rtx); | |
6236 | ||
8999a12e RK |
6237 | /* Make POS_RTX unless we already have it and it is correct. If we don't |
6238 | have a POS_RTX but we do have an ORIG_POS_RTX, the latter must | |
0f41302f | 6239 | be a CONST_INT. */ |
8999a12e RK |
6240 | if (pos_rtx == 0 && orig_pos_rtx != 0 && INTVAL (orig_pos_rtx) == pos) |
6241 | pos_rtx = orig_pos_rtx; | |
6242 | ||
6243 | else if (pos_rtx == 0) | |
5f4f0e22 | 6244 | pos_rtx = GEN_INT (pos); |
230d793d RS |
6245 | |
6246 | /* Make the required operation. See if we can use existing rtx. */ | |
6247 | new = gen_rtx_combine (unsignedp ? ZERO_EXTRACT : SIGN_EXTRACT, | |
5f4f0e22 | 6248 | extraction_mode, inner, GEN_INT (len), pos_rtx); |
230d793d RS |
6249 | if (! in_dest) |
6250 | new = gen_lowpart_for_combine (mode, new); | |
6251 | ||
6252 | return new; | |
6253 | } | |
6254 | \f | |
71923da7 RK |
6255 | /* See if X contains an ASHIFT of COUNT or more bits that can be commuted |
6256 | with any other operations in X. Return X without that shift if so. */ | |
6257 | ||
6258 | static rtx | |
6259 | extract_left_shift (x, count) | |
6260 | rtx x; | |
6261 | int count; | |
6262 | { | |
6263 | enum rtx_code code = GET_CODE (x); | |
6264 | enum machine_mode mode = GET_MODE (x); | |
6265 | rtx tem; | |
6266 | ||
6267 | switch (code) | |
6268 | { | |
6269 | case ASHIFT: | |
6270 | /* This is the shift itself. If it is wide enough, we will return | |
6271 | either the value being shifted if the shift count is equal to | |
6272 | COUNT or a shift for the difference. */ | |
6273 | if (GET_CODE (XEXP (x, 1)) == CONST_INT | |
6274 | && INTVAL (XEXP (x, 1)) >= count) | |
6275 | return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (x, 0), | |
6276 | INTVAL (XEXP (x, 1)) - count); | |
6277 | break; | |
6278 | ||
6279 | case NEG: case NOT: | |
6280 | if ((tem = extract_left_shift (XEXP (x, 0), count)) != 0) | |
0c1c8ea6 | 6281 | return gen_unary (code, mode, mode, tem); |
71923da7 RK |
6282 | |
6283 | break; | |
6284 | ||
6285 | case PLUS: case IOR: case XOR: case AND: | |
6286 | /* If we can safely shift this constant and we find the inner shift, | |
6287 | make a new operation. */ | |
6288 | if (GET_CODE (XEXP (x,1)) == CONST_INT | |
b729186a | 6289 | && (INTVAL (XEXP (x, 1)) & ((((HOST_WIDE_INT) 1 << count)) - 1)) == 0 |
71923da7 | 6290 | && (tem = extract_left_shift (XEXP (x, 0), count)) != 0) |
663522cb | 6291 | return gen_binary (code, mode, tem, |
71923da7 RK |
6292 | GEN_INT (INTVAL (XEXP (x, 1)) >> count)); |
6293 | ||
6294 | break; | |
663522cb | 6295 | |
e9a25f70 JL |
6296 | default: |
6297 | break; | |
71923da7 RK |
6298 | } |
6299 | ||
6300 | return 0; | |
6301 | } | |
6302 | \f | |
230d793d RS |
6303 | /* Look at the expression rooted at X. Look for expressions |
6304 | equivalent to ZERO_EXTRACT, SIGN_EXTRACT, ZERO_EXTEND, SIGN_EXTEND. | |
6305 | Form these expressions. | |
6306 | ||
6307 | Return the new rtx, usually just X. | |
6308 | ||
6309 | Also, for machines like the Vax that don't have logical shift insns, | |
6310 | try to convert logical to arithmetic shift operations in cases where | |
6311 | they are equivalent. This undoes the canonicalizations to logical | |
6312 | shifts done elsewhere. | |
6313 | ||
6314 | We try, as much as possible, to re-use rtl expressions to save memory. | |
6315 | ||
6316 | IN_CODE says what kind of expression we are processing. Normally, it is | |
42495ca0 RK |
6317 | SET. In a memory address (inside a MEM, PLUS or minus, the latter two |
6318 | being kludges), it is MEM. When processing the arguments of a comparison | |
230d793d RS |
6319 | or a COMPARE against zero, it is COMPARE. */ |
6320 | ||
6321 | static rtx | |
6322 | make_compound_operation (x, in_code) | |
6323 | rtx x; | |
6324 | enum rtx_code in_code; | |
6325 | { | |
6326 | enum rtx_code code = GET_CODE (x); | |
6327 | enum machine_mode mode = GET_MODE (x); | |
6328 | int mode_width = GET_MODE_BITSIZE (mode); | |
71923da7 | 6329 | rtx rhs, lhs; |
230d793d | 6330 | enum rtx_code next_code; |
f24ad0e4 | 6331 | int i; |
230d793d | 6332 | rtx new = 0; |
280f58ba | 6333 | rtx tem; |
6f7d635c | 6334 | const char *fmt; |
230d793d RS |
6335 | |
6336 | /* Select the code to be used in recursive calls. Once we are inside an | |
6337 | address, we stay there. If we have a comparison, set to COMPARE, | |
6338 | but once inside, go back to our default of SET. */ | |
6339 | ||
42495ca0 | 6340 | next_code = (code == MEM || code == PLUS || code == MINUS ? MEM |
230d793d RS |
6341 | : ((code == COMPARE || GET_RTX_CLASS (code) == '<') |
6342 | && XEXP (x, 1) == const0_rtx) ? COMPARE | |
6343 | : in_code == COMPARE ? SET : in_code); | |
6344 | ||
6345 | /* Process depending on the code of this operation. If NEW is set | |
6346 | non-zero, it will be returned. */ | |
6347 | ||
6348 | switch (code) | |
6349 | { | |
6350 | case ASHIFT: | |
230d793d RS |
6351 | /* Convert shifts by constants into multiplications if inside |
6352 | an address. */ | |
6353 | if (in_code == MEM && GET_CODE (XEXP (x, 1)) == CONST_INT | |
5f4f0e22 | 6354 | && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT |
230d793d | 6355 | && INTVAL (XEXP (x, 1)) >= 0) |
280f58ba RK |
6356 | { |
6357 | new = make_compound_operation (XEXP (x, 0), next_code); | |
6358 | new = gen_rtx_combine (MULT, mode, new, | |
6359 | GEN_INT ((HOST_WIDE_INT) 1 | |
6360 | << INTVAL (XEXP (x, 1)))); | |
6361 | } | |
230d793d RS |
6362 | break; |
6363 | ||
6364 | case AND: | |
6365 | /* If the second operand is not a constant, we can't do anything | |
6366 | with it. */ | |
6367 | if (GET_CODE (XEXP (x, 1)) != CONST_INT) | |
6368 | break; | |
6369 | ||
6370 | /* If the constant is a power of two minus one and the first operand | |
6371 | is a logical right shift, make an extraction. */ | |
6372 | if (GET_CODE (XEXP (x, 0)) == LSHIFTRT | |
6373 | && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0) | |
280f58ba RK |
6374 | { |
6375 | new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code); | |
6376 | new = make_extraction (mode, new, 0, XEXP (XEXP (x, 0), 1), i, 1, | |
6377 | 0, in_code == COMPARE); | |
6378 | } | |
dfbe1b2f | 6379 | |
230d793d RS |
6380 | /* Same as previous, but for (subreg (lshiftrt ...)) in first op. */ |
6381 | else if (GET_CODE (XEXP (x, 0)) == SUBREG | |
6382 | && subreg_lowpart_p (XEXP (x, 0)) | |
6383 | && GET_CODE (SUBREG_REG (XEXP (x, 0))) == LSHIFTRT | |
6384 | && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0) | |
280f58ba RK |
6385 | { |
6386 | new = make_compound_operation (XEXP (SUBREG_REG (XEXP (x, 0)), 0), | |
6387 | next_code); | |
2f99f437 | 6388 | new = make_extraction (GET_MODE (SUBREG_REG (XEXP (x, 0))), new, 0, |
280f58ba RK |
6389 | XEXP (SUBREG_REG (XEXP (x, 0)), 1), i, 1, |
6390 | 0, in_code == COMPARE); | |
6391 | } | |
45620ed4 | 6392 | /* Same as previous, but for (xor/ior (lshiftrt...) (lshiftrt...)). */ |
c2f9f64e JW |
6393 | else if ((GET_CODE (XEXP (x, 0)) == XOR |
6394 | || GET_CODE (XEXP (x, 0)) == IOR) | |
6395 | && GET_CODE (XEXP (XEXP (x, 0), 0)) == LSHIFTRT | |
6396 | && GET_CODE (XEXP (XEXP (x, 0), 1)) == LSHIFTRT | |
6397 | && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0) | |
6398 | { | |
6399 | /* Apply the distributive law, and then try to make extractions. */ | |
6400 | new = gen_rtx_combine (GET_CODE (XEXP (x, 0)), mode, | |
38a448ca RH |
6401 | gen_rtx_AND (mode, XEXP (XEXP (x, 0), 0), |
6402 | XEXP (x, 1)), | |
6403 | gen_rtx_AND (mode, XEXP (XEXP (x, 0), 1), | |
6404 | XEXP (x, 1))); | |
c2f9f64e JW |
6405 | new = make_compound_operation (new, in_code); |
6406 | } | |
a7c99304 RK |
6407 | |
6408 | /* If we are have (and (rotate X C) M) and C is larger than the number | |
6409 | of bits in M, this is an extraction. */ | |
6410 | ||
6411 | else if (GET_CODE (XEXP (x, 0)) == ROTATE | |
6412 | && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT | |
6413 | && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0 | |
6414 | && i <= INTVAL (XEXP (XEXP (x, 0), 1))) | |
280f58ba RK |
6415 | { |
6416 | new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code); | |
6417 | new = make_extraction (mode, new, | |
6418 | (GET_MODE_BITSIZE (mode) | |
6419 | - INTVAL (XEXP (XEXP (x, 0), 1))), | |
6420 | NULL_RTX, i, 1, 0, in_code == COMPARE); | |
6421 | } | |
a7c99304 RK |
6422 | |
6423 | /* On machines without logical shifts, if the operand of the AND is | |
230d793d RS |
6424 | a logical shift and our mask turns off all the propagated sign |
6425 | bits, we can replace the logical shift with an arithmetic shift. */ | |
d0ab8cd3 RK |
6426 | else if (ashr_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing |
6427 | && (lshr_optab->handlers[(int) mode].insn_code | |
6428 | == CODE_FOR_nothing) | |
230d793d RS |
6429 | && GET_CODE (XEXP (x, 0)) == LSHIFTRT |
6430 | && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT | |
6431 | && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0 | |
5f4f0e22 CH |
6432 | && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT |
6433 | && mode_width <= HOST_BITS_PER_WIDE_INT) | |
230d793d | 6434 | { |
5f4f0e22 | 6435 | unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode); |
230d793d RS |
6436 | |
6437 | mask >>= INTVAL (XEXP (XEXP (x, 0), 1)); | |
6438 | if ((INTVAL (XEXP (x, 1)) & ~mask) == 0) | |
6439 | SUBST (XEXP (x, 0), | |
280f58ba RK |
6440 | gen_rtx_combine (ASHIFTRT, mode, |
6441 | make_compound_operation (XEXP (XEXP (x, 0), 0), | |
6442 | next_code), | |
230d793d RS |
6443 | XEXP (XEXP (x, 0), 1))); |
6444 | } | |
6445 | ||
6446 | /* If the constant is one less than a power of two, this might be | |
6447 | representable by an extraction even if no shift is present. | |
6448 | If it doesn't end up being a ZERO_EXTEND, we will ignore it unless | |
6449 | we are in a COMPARE. */ | |
6450 | else if ((i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0) | |
280f58ba RK |
6451 | new = make_extraction (mode, |
6452 | make_compound_operation (XEXP (x, 0), | |
6453 | next_code), | |
6454 | 0, NULL_RTX, i, 1, 0, in_code == COMPARE); | |
230d793d RS |
6455 | |
6456 | /* If we are in a comparison and this is an AND with a power of two, | |
6457 | convert this into the appropriate bit extract. */ | |
6458 | else if (in_code == COMPARE | |
6459 | && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0) | |
280f58ba RK |
6460 | new = make_extraction (mode, |
6461 | make_compound_operation (XEXP (x, 0), | |
6462 | next_code), | |
6463 | i, NULL_RTX, 1, 1, 0, 1); | |
230d793d RS |
6464 | |
6465 | break; | |
6466 | ||
6467 | case LSHIFTRT: | |
6468 | /* If the sign bit is known to be zero, replace this with an | |
6469 | arithmetic shift. */ | |
d0ab8cd3 RK |
6470 | if (ashr_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing |
6471 | && lshr_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing | |
5f4f0e22 | 6472 | && mode_width <= HOST_BITS_PER_WIDE_INT |
951553af | 6473 | && (nonzero_bits (XEXP (x, 0), mode) & (1 << (mode_width - 1))) == 0) |
230d793d | 6474 | { |
280f58ba RK |
6475 | new = gen_rtx_combine (ASHIFTRT, mode, |
6476 | make_compound_operation (XEXP (x, 0), | |
6477 | next_code), | |
6478 | XEXP (x, 1)); | |
230d793d RS |
6479 | break; |
6480 | } | |
6481 | ||
0f41302f | 6482 | /* ... fall through ... */ |
230d793d RS |
6483 | |
6484 | case ASHIFTRT: | |
71923da7 RK |
6485 | lhs = XEXP (x, 0); |
6486 | rhs = XEXP (x, 1); | |
6487 | ||
230d793d RS |
6488 | /* If we have (ashiftrt (ashift foo C1) C2) with C2 >= C1, |
6489 | this is a SIGN_EXTRACT. */ | |
71923da7 RK |
6490 | if (GET_CODE (rhs) == CONST_INT |
6491 | && GET_CODE (lhs) == ASHIFT | |
6492 | && GET_CODE (XEXP (lhs, 1)) == CONST_INT | |
6493 | && INTVAL (rhs) >= INTVAL (XEXP (lhs, 1))) | |
280f58ba | 6494 | { |
71923da7 | 6495 | new = make_compound_operation (XEXP (lhs, 0), next_code); |
280f58ba | 6496 | new = make_extraction (mode, new, |
71923da7 RK |
6497 | INTVAL (rhs) - INTVAL (XEXP (lhs, 1)), |
6498 | NULL_RTX, mode_width - INTVAL (rhs), | |
d0ab8cd3 RK |
6499 | code == LSHIFTRT, 0, in_code == COMPARE); |
6500 | } | |
6501 | ||
71923da7 RK |
6502 | /* See if we have operations between an ASHIFTRT and an ASHIFT. |
6503 | If so, try to merge the shifts into a SIGN_EXTEND. We could | |
6504 | also do this for some cases of SIGN_EXTRACT, but it doesn't | |
6505 | seem worth the effort; the case checked for occurs on Alpha. */ | |
663522cb | 6506 | |
71923da7 RK |
6507 | if (GET_RTX_CLASS (GET_CODE (lhs)) != 'o' |
6508 | && ! (GET_CODE (lhs) == SUBREG | |
6509 | && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (lhs))) == 'o')) | |
6510 | && GET_CODE (rhs) == CONST_INT | |
6511 | && INTVAL (rhs) < HOST_BITS_PER_WIDE_INT | |
6512 | && (new = extract_left_shift (lhs, INTVAL (rhs))) != 0) | |
6513 | new = make_extraction (mode, make_compound_operation (new, next_code), | |
6514 | 0, NULL_RTX, mode_width - INTVAL (rhs), | |
6515 | code == LSHIFTRT, 0, in_code == COMPARE); | |
663522cb | 6516 | |
230d793d | 6517 | break; |
280f58ba RK |
6518 | |
6519 | case SUBREG: | |
6520 | /* Call ourselves recursively on the inner expression. If we are | |
6521 | narrowing the object and it has a different RTL code from | |
6522 | what it originally did, do this SUBREG as a force_to_mode. */ | |
6523 | ||
0a5cbff6 | 6524 | tem = make_compound_operation (SUBREG_REG (x), in_code); |
280f58ba RK |
6525 | if (GET_CODE (tem) != GET_CODE (SUBREG_REG (x)) |
6526 | && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (tem)) | |
6527 | && subreg_lowpart_p (x)) | |
0a5cbff6 | 6528 | { |
e8dc6d50 JH |
6529 | rtx newer = force_to_mode (tem, mode, ~(HOST_WIDE_INT) 0, |
6530 | NULL_RTX, 0); | |
0a5cbff6 RK |
6531 | |
6532 | /* If we have something other than a SUBREG, we might have | |
6533 | done an expansion, so rerun outselves. */ | |
6534 | if (GET_CODE (newer) != SUBREG) | |
6535 | newer = make_compound_operation (newer, in_code); | |
6536 | ||
6537 | return newer; | |
6538 | } | |
6f28d3e9 RH |
6539 | |
6540 | /* If this is a paradoxical subreg, and the new code is a sign or | |
6541 | zero extension, omit the subreg and widen the extension. If it | |
6542 | is a regular subreg, we can still get rid of the subreg by not | |
6543 | widening so much, or in fact removing the extension entirely. */ | |
6544 | if ((GET_CODE (tem) == SIGN_EXTEND | |
6545 | || GET_CODE (tem) == ZERO_EXTEND) | |
6546 | && subreg_lowpart_p (x)) | |
6547 | { | |
6548 | if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (tem)) | |
6549 | || (GET_MODE_SIZE (mode) > | |
6550 | GET_MODE_SIZE (GET_MODE (XEXP (tem, 0))))) | |
6551 | tem = gen_rtx_combine (GET_CODE (tem), mode, XEXP (tem, 0)); | |
6552 | else | |
6553 | tem = gen_lowpart_for_combine (mode, XEXP (tem, 0)); | |
6554 | return tem; | |
6555 | } | |
e9a25f70 | 6556 | break; |
663522cb | 6557 | |
e9a25f70 JL |
6558 | default: |
6559 | break; | |
230d793d RS |
6560 | } |
6561 | ||
6562 | if (new) | |
6563 | { | |
df62f951 | 6564 | x = gen_lowpart_for_combine (mode, new); |
230d793d RS |
6565 | code = GET_CODE (x); |
6566 | } | |
6567 | ||
6568 | /* Now recursively process each operand of this operation. */ | |
6569 | fmt = GET_RTX_FORMAT (code); | |
6570 | for (i = 0; i < GET_RTX_LENGTH (code); i++) | |
6571 | if (fmt[i] == 'e') | |
6572 | { | |
6573 | new = make_compound_operation (XEXP (x, i), next_code); | |
6574 | SUBST (XEXP (x, i), new); | |
6575 | } | |
6576 | ||
6577 | return x; | |
6578 | } | |
6579 | \f | |
6580 | /* Given M see if it is a value that would select a field of bits | |
663522cb KH |
6581 | within an item, but not the entire word. Return -1 if not. |
6582 | Otherwise, return the starting position of the field, where 0 is the | |
6583 | low-order bit. | |
230d793d RS |
6584 | |
6585 | *PLEN is set to the length of the field. */ | |
6586 | ||
6587 | static int | |
6588 | get_pos_from_mask (m, plen) | |
5f4f0e22 | 6589 | unsigned HOST_WIDE_INT m; |
770ae6cc | 6590 | unsigned HOST_WIDE_INT *plen; |
230d793d RS |
6591 | { |
6592 | /* Get the bit number of the first 1 bit from the right, -1 if none. */ | |
663522cb | 6593 | int pos = exact_log2 (m & -m); |
d3bc8938 | 6594 | int len; |
230d793d RS |
6595 | |
6596 | if (pos < 0) | |
6597 | return -1; | |
6598 | ||
6599 | /* Now shift off the low-order zero bits and see if we have a power of | |
6600 | two minus 1. */ | |
d3bc8938 | 6601 | len = exact_log2 ((m >> pos) + 1); |
230d793d | 6602 | |
d3bc8938 | 6603 | if (len <= 0) |
230d793d RS |
6604 | return -1; |
6605 | ||
d3bc8938 | 6606 | *plen = len; |
230d793d RS |
6607 | return pos; |
6608 | } | |
6609 | \f | |
6139ff20 RK |
6610 | /* See if X can be simplified knowing that we will only refer to it in |
6611 | MODE and will only refer to those bits that are nonzero in MASK. | |
6612 | If other bits are being computed or if masking operations are done | |
6613 | that select a superset of the bits in MASK, they can sometimes be | |
6614 | ignored. | |
6615 | ||
6616 | Return a possibly simplified expression, but always convert X to | |
6617 | MODE. If X is a CONST_INT, AND the CONST_INT with MASK. | |
dfbe1b2f | 6618 | |
663522cb | 6619 | Also, if REG is non-zero and X is a register equal in value to REG, |
e3d616e3 RK |
6620 | replace X with REG. |
6621 | ||
6622 | If JUST_SELECT is nonzero, don't optimize by noticing that bits in MASK | |
6623 | are all off in X. This is used when X will be complemented, by either | |
180b8e4b | 6624 | NOT, NEG, or XOR. */ |
dfbe1b2f RK |
6625 | |
6626 | static rtx | |
e3d616e3 | 6627 | force_to_mode (x, mode, mask, reg, just_select) |
dfbe1b2f RK |
6628 | rtx x; |
6629 | enum machine_mode mode; | |
6139ff20 | 6630 | unsigned HOST_WIDE_INT mask; |
dfbe1b2f | 6631 | rtx reg; |
e3d616e3 | 6632 | int just_select; |
dfbe1b2f RK |
6633 | { |
6634 | enum rtx_code code = GET_CODE (x); | |
180b8e4b | 6635 | int next_select = just_select || code == XOR || code == NOT || code == NEG; |
ef026f91 RS |
6636 | enum machine_mode op_mode; |
6637 | unsigned HOST_WIDE_INT fuller_mask, nonzero; | |
6139ff20 RK |
6638 | rtx op0, op1, temp; |
6639 | ||
132d2040 RK |
6640 | /* If this is a CALL or ASM_OPERANDS, don't do anything. Some of the |
6641 | code below will do the wrong thing since the mode of such an | |
663522cb | 6642 | expression is VOIDmode. |
be3d27d6 CI |
6643 | |
6644 | Also do nothing if X is a CLOBBER; this can happen if X was | |
6645 | the return value from a call to gen_lowpart_for_combine. */ | |
6646 | if (code == CALL || code == ASM_OPERANDS || code == CLOBBER) | |
246e00f2 RK |
6647 | return x; |
6648 | ||
6139ff20 RK |
6649 | /* We want to perform the operation is its present mode unless we know |
6650 | that the operation is valid in MODE, in which case we do the operation | |
6651 | in MODE. */ | |
1c75dfa4 RK |
6652 | op_mode = ((GET_MODE_CLASS (mode) == GET_MODE_CLASS (GET_MODE (x)) |
6653 | && code_to_optab[(int) code] != 0 | |
ef026f91 RS |
6654 | && (code_to_optab[(int) code]->handlers[(int) mode].insn_code |
6655 | != CODE_FOR_nothing)) | |
6656 | ? mode : GET_MODE (x)); | |
e3d616e3 | 6657 | |
aa988991 RS |
6658 | /* It is not valid to do a right-shift in a narrower mode |
6659 | than the one it came in with. */ | |
6660 | if ((code == LSHIFTRT || code == ASHIFTRT) | |
6661 | && GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (GET_MODE (x))) | |
6662 | op_mode = GET_MODE (x); | |
ef026f91 RS |
6663 | |
6664 | /* Truncate MASK to fit OP_MODE. */ | |
6665 | if (op_mode) | |
6666 | mask &= GET_MODE_MASK (op_mode); | |
6139ff20 RK |
6667 | |
6668 | /* When we have an arithmetic operation, or a shift whose count we | |
6669 | do not know, we need to assume that all bit the up to the highest-order | |
6670 | bit in MASK will be needed. This is how we form such a mask. */ | |
ef026f91 RS |
6671 | if (op_mode) |
6672 | fuller_mask = (GET_MODE_BITSIZE (op_mode) >= HOST_BITS_PER_WIDE_INT | |
6673 | ? GET_MODE_MASK (op_mode) | |
729a2125 RK |
6674 | : (((unsigned HOST_WIDE_INT) 1 << (floor_log2 (mask) + 1)) |
6675 | - 1)); | |
ef026f91 | 6676 | else |
663522cb | 6677 | fuller_mask = ~(HOST_WIDE_INT) 0; |
ef026f91 RS |
6678 | |
6679 | /* Determine what bits of X are guaranteed to be (non)zero. */ | |
6680 | nonzero = nonzero_bits (x, mode); | |
6139ff20 RK |
6681 | |
6682 | /* If none of the bits in X are needed, return a zero. */ | |
e3d616e3 | 6683 | if (! just_select && (nonzero & mask) == 0) |
6139ff20 | 6684 | return const0_rtx; |
dfbe1b2f | 6685 | |
6139ff20 RK |
6686 | /* If X is a CONST_INT, return a new one. Do this here since the |
6687 | test below will fail. */ | |
6688 | if (GET_CODE (x) == CONST_INT) | |
ceb7983c RK |
6689 | { |
6690 | HOST_WIDE_INT cval = INTVAL (x) & mask; | |
6691 | int width = GET_MODE_BITSIZE (mode); | |
6692 | ||
6693 | /* If MODE is narrower that HOST_WIDE_INT and CVAL is a negative | |
6694 | number, sign extend it. */ | |
6695 | if (width > 0 && width < HOST_BITS_PER_WIDE_INT | |
6696 | && (cval & ((HOST_WIDE_INT) 1 << (width - 1))) != 0) | |
6697 | cval |= (HOST_WIDE_INT) -1 << width; | |
663522cb | 6698 | |
ceb7983c RK |
6699 | return GEN_INT (cval); |
6700 | } | |
dfbe1b2f | 6701 | |
180b8e4b RK |
6702 | /* If X is narrower than MODE and we want all the bits in X's mode, just |
6703 | get X in the proper mode. */ | |
6704 | if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode) | |
663522cb | 6705 | && (GET_MODE_MASK (GET_MODE (x)) & ~mask) == 0) |
dfbe1b2f RK |
6706 | return gen_lowpart_for_combine (mode, x); |
6707 | ||
71923da7 RK |
6708 | /* If we aren't changing the mode, X is not a SUBREG, and all zero bits in |
6709 | MASK are already known to be zero in X, we need not do anything. */ | |
663522cb | 6710 | if (GET_MODE (x) == mode && code != SUBREG && (~mask & nonzero) == 0) |
6139ff20 RK |
6711 | return x; |
6712 | ||
dfbe1b2f RK |
6713 | switch (code) |
6714 | { | |
6139ff20 RK |
6715 | case CLOBBER: |
6716 | /* If X is a (clobber (const_int)), return it since we know we are | |
0f41302f | 6717 | generating something that won't match. */ |
6139ff20 RK |
6718 | return x; |
6719 | ||
6139ff20 RK |
6720 | case USE: |
6721 | /* X is a (use (mem ..)) that was made from a bit-field extraction that | |
6722 | spanned the boundary of the MEM. If we are now masking so it is | |
6723 | within that boundary, we don't need the USE any more. */ | |
f76b9db2 | 6724 | if (! BITS_BIG_ENDIAN |
663522cb | 6725 | && (mask & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0) |
e3d616e3 | 6726 | return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select); |
f76b9db2 | 6727 | break; |
6139ff20 | 6728 | |
dfbe1b2f RK |
6729 | case SIGN_EXTEND: |
6730 | case ZERO_EXTEND: | |
6731 | case ZERO_EXTRACT: | |
6732 | case SIGN_EXTRACT: | |
6733 | x = expand_compound_operation (x); | |
6734 | if (GET_CODE (x) != code) | |
e3d616e3 | 6735 | return force_to_mode (x, mode, mask, reg, next_select); |
dfbe1b2f RK |
6736 | break; |
6737 | ||
6738 | case REG: | |
6739 | if (reg != 0 && (rtx_equal_p (get_last_value (reg), x) | |
6740 | || rtx_equal_p (reg, get_last_value (x)))) | |
6741 | x = reg; | |
6742 | break; | |
6743 | ||
dfbe1b2f | 6744 | case SUBREG: |
6139ff20 | 6745 | if (subreg_lowpart_p (x) |
180b8e4b RK |
6746 | /* We can ignore the effect of this SUBREG if it narrows the mode or |
6747 | if the constant masks to zero all the bits the mode doesn't | |
6748 | have. */ | |
6139ff20 RK |
6749 | && ((GET_MODE_SIZE (GET_MODE (x)) |
6750 | < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))) | |
6139ff20 RK |
6751 | || (0 == (mask |
6752 | & GET_MODE_MASK (GET_MODE (x)) | |
663522cb | 6753 | & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x))))))) |
e3d616e3 | 6754 | return force_to_mode (SUBREG_REG (x), mode, mask, reg, next_select); |
dfbe1b2f RK |
6755 | break; |
6756 | ||
6757 | case AND: | |
6139ff20 RK |
6758 | /* If this is an AND with a constant, convert it into an AND |
6759 | whose constant is the AND of that constant with MASK. If it | |
6760 | remains an AND of MASK, delete it since it is redundant. */ | |
dfbe1b2f | 6761 | |
2ca9ae17 | 6762 | if (GET_CODE (XEXP (x, 1)) == CONST_INT) |
dfbe1b2f | 6763 | { |
6139ff20 RK |
6764 | x = simplify_and_const_int (x, op_mode, XEXP (x, 0), |
6765 | mask & INTVAL (XEXP (x, 1))); | |
dfbe1b2f RK |
6766 | |
6767 | /* If X is still an AND, see if it is an AND with a mask that | |
71923da7 RK |
6768 | is just some low-order bits. If so, and it is MASK, we don't |
6769 | need it. */ | |
dfbe1b2f RK |
6770 | |
6771 | if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT | |
e51712db | 6772 | && (unsigned HOST_WIDE_INT) INTVAL (XEXP (x, 1)) == mask) |
dfbe1b2f | 6773 | x = XEXP (x, 0); |
d0ab8cd3 | 6774 | |
71923da7 RK |
6775 | /* If it remains an AND, try making another AND with the bits |
6776 | in the mode mask that aren't in MASK turned on. If the | |
6777 | constant in the AND is wide enough, this might make a | |
6778 | cheaper constant. */ | |
6779 | ||
6780 | if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT | |
2ca9ae17 JW |
6781 | && GET_MODE_MASK (GET_MODE (x)) != mask |
6782 | && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT) | |
71923da7 RK |
6783 | { |
6784 | HOST_WIDE_INT cval = (INTVAL (XEXP (x, 1)) | |
663522cb | 6785 | | (GET_MODE_MASK (GET_MODE (x)) & ~mask)); |
71923da7 RK |
6786 | int width = GET_MODE_BITSIZE (GET_MODE (x)); |
6787 | rtx y; | |
6788 | ||
6789 | /* If MODE is narrower that HOST_WIDE_INT and CVAL is a negative | |
6790 | number, sign extend it. */ | |
6791 | if (width > 0 && width < HOST_BITS_PER_WIDE_INT | |
6792 | && (cval & ((HOST_WIDE_INT) 1 << (width - 1))) != 0) | |
6793 | cval |= (HOST_WIDE_INT) -1 << width; | |
6794 | ||
6795 | y = gen_binary (AND, GET_MODE (x), XEXP (x, 0), GEN_INT (cval)); | |
6796 | if (rtx_cost (y, SET) < rtx_cost (x, SET)) | |
6797 | x = y; | |
6798 | } | |
6799 | ||
d0ab8cd3 | 6800 | break; |
dfbe1b2f RK |
6801 | } |
6802 | ||
6139ff20 | 6803 | goto binop; |
dfbe1b2f RK |
6804 | |
6805 | case PLUS: | |
6139ff20 RK |
6806 | /* In (and (plus FOO C1) M), if M is a mask that just turns off |
6807 | low-order bits (as in an alignment operation) and FOO is already | |
6808 | aligned to that boundary, mask C1 to that boundary as well. | |
6809 | This may eliminate that PLUS and, later, the AND. */ | |
9fa6d012 TG |
6810 | |
6811 | { | |
770ae6cc | 6812 | unsigned int width = GET_MODE_BITSIZE (mode); |
9fa6d012 TG |
6813 | unsigned HOST_WIDE_INT smask = mask; |
6814 | ||
6815 | /* If MODE is narrower than HOST_WIDE_INT and mask is a negative | |
6816 | number, sign extend it. */ | |
6817 | ||
6818 | if (width < HOST_BITS_PER_WIDE_INT | |
6819 | && (smask & ((HOST_WIDE_INT) 1 << (width - 1))) != 0) | |
6820 | smask |= (HOST_WIDE_INT) -1 << width; | |
6821 | ||
6822 | if (GET_CODE (XEXP (x, 1)) == CONST_INT | |
0e9ff885 DM |
6823 | && exact_log2 (- smask) >= 0) |
6824 | { | |
6825 | #ifdef STACK_BIAS | |
6826 | if (STACK_BIAS | |
6827 | && (XEXP (x, 0) == stack_pointer_rtx | |
6828 | || XEXP (x, 0) == frame_pointer_rtx)) | |
6829 | { | |
663522cb KH |
6830 | int sp_alignment = STACK_BOUNDARY / BITS_PER_UNIT; |
6831 | unsigned HOST_WIDE_INT sp_mask = GET_MODE_MASK (mode); | |
6832 | ||
6833 | sp_mask &= ~(sp_alignment - 1); | |
6834 | if ((sp_mask & ~smask) == 0 | |
6835 | && ((INTVAL (XEXP (x, 1)) - STACK_BIAS) & ~smask) != 0) | |
0e9ff885 | 6836 | return force_to_mode (plus_constant (XEXP (x, 0), |
663522cb | 6837 | ((INTVAL (XEXP (x, 1)) - |
835c8e04 | 6838 | STACK_BIAS) & smask) |
0e9ff885 | 6839 | + STACK_BIAS), |
663522cb KH |
6840 | mode, smask, reg, next_select); |
6841 | } | |
0e9ff885 | 6842 | #endif |
663522cb KH |
6843 | if ((nonzero_bits (XEXP (x, 0), mode) & ~smask) == 0 |
6844 | && (INTVAL (XEXP (x, 1)) & ~smask) != 0) | |
0e9ff885 | 6845 | return force_to_mode (plus_constant (XEXP (x, 0), |
663522cb | 6846 | (INTVAL (XEXP (x, 1)) |
835c8e04 DT |
6847 | & smask)), |
6848 | mode, smask, reg, next_select); | |
0e9ff885 | 6849 | } |
9fa6d012 | 6850 | } |
6139ff20 | 6851 | |
0f41302f | 6852 | /* ... fall through ... */ |
6139ff20 | 6853 | |
dfbe1b2f | 6854 | case MULT: |
6139ff20 RK |
6855 | /* For PLUS, MINUS and MULT, we need any bits less significant than the |
6856 | most significant bit in MASK since carries from those bits will | |
6857 | affect the bits we are interested in. */ | |
6858 | mask = fuller_mask; | |
6859 | goto binop; | |
6860 | ||
d41638e4 RH |
6861 | case MINUS: |
6862 | /* If X is (minus C Y) where C's least set bit is larger than any bit | |
6863 | in the mask, then we may replace with (neg Y). */ | |
6864 | if (GET_CODE (XEXP (x, 0)) == CONST_INT | |
6865 | && (INTVAL (XEXP (x, 0)) & -INTVAL (XEXP (x, 0))) > mask) | |
6866 | { | |
6867 | x = gen_unary (NEG, GET_MODE (x), GET_MODE (x), XEXP (x, 1)); | |
6868 | return force_to_mode (x, mode, mask, reg, next_select); | |
6869 | } | |
6870 | ||
6871 | /* Similarly, if C contains every bit in the mask, then we may | |
6872 | replace with (not Y). */ | |
6873 | if (GET_CODE (XEXP (x, 0)) == CONST_INT | |
663522cb | 6874 | && (INTVAL (XEXP (x, 0)) | mask) == INTVAL (XEXP (x, 0))) |
d41638e4 RH |
6875 | { |
6876 | x = gen_unary (NOT, GET_MODE (x), GET_MODE (x), XEXP (x, 1)); | |
6877 | return force_to_mode (x, mode, mask, reg, next_select); | |
6878 | } | |
6879 | ||
6880 | mask = fuller_mask; | |
6881 | goto binop; | |
6882 | ||
dfbe1b2f RK |
6883 | case IOR: |
6884 | case XOR: | |
6139ff20 RK |
6885 | /* If X is (ior (lshiftrt FOO C1) C2), try to commute the IOR and |
6886 | LSHIFTRT so we end up with an (and (lshiftrt (ior ...) ...) ...) | |
6887 | operation which may be a bitfield extraction. Ensure that the | |
6888 | constant we form is not wider than the mode of X. */ | |
6889 | ||
6890 | if (GET_CODE (XEXP (x, 0)) == LSHIFTRT | |
6891 | && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT | |
6892 | && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0 | |
6893 | && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT | |
6894 | && GET_CODE (XEXP (x, 1)) == CONST_INT | |
6895 | && ((INTVAL (XEXP (XEXP (x, 0), 1)) | |
6896 | + floor_log2 (INTVAL (XEXP (x, 1)))) | |
6897 | < GET_MODE_BITSIZE (GET_MODE (x))) | |
6898 | && (INTVAL (XEXP (x, 1)) | |
663522cb | 6899 | & ~nonzero_bits (XEXP (x, 0), GET_MODE (x))) == 0) |
6139ff20 RK |
6900 | { |
6901 | temp = GEN_INT ((INTVAL (XEXP (x, 1)) & mask) | |
663522cb | 6902 | << INTVAL (XEXP (XEXP (x, 0), 1))); |
6139ff20 RK |
6903 | temp = gen_binary (GET_CODE (x), GET_MODE (x), |
6904 | XEXP (XEXP (x, 0), 0), temp); | |
d4d2b13f RK |
6905 | x = gen_binary (LSHIFTRT, GET_MODE (x), temp, |
6906 | XEXP (XEXP (x, 0), 1)); | |
e3d616e3 | 6907 | return force_to_mode (x, mode, mask, reg, next_select); |
6139ff20 RK |
6908 | } |
6909 | ||
6910 | binop: | |
dfbe1b2f | 6911 | /* For most binary operations, just propagate into the operation and |
6139ff20 RK |
6912 | change the mode if we have an operation of that mode. */ |
6913 | ||
e3d616e3 RK |
6914 | op0 = gen_lowpart_for_combine (op_mode, |
6915 | force_to_mode (XEXP (x, 0), mode, mask, | |
6916 | reg, next_select)); | |
6917 | op1 = gen_lowpart_for_combine (op_mode, | |
6918 | force_to_mode (XEXP (x, 1), mode, mask, | |
6919 | reg, next_select)); | |
6139ff20 | 6920 | |
2dd484ed RK |
6921 | /* If OP1 is a CONST_INT and X is an IOR or XOR, clear bits outside |
6922 | MASK since OP1 might have been sign-extended but we never want | |
6923 | to turn on extra bits, since combine might have previously relied | |
6924 | on them being off. */ | |
6925 | if (GET_CODE (op1) == CONST_INT && (code == IOR || code == XOR) | |
6926 | && (INTVAL (op1) & mask) != 0) | |
6927 | op1 = GEN_INT (INTVAL (op1) & mask); | |
663522cb | 6928 | |
6139ff20 RK |
6929 | if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0) || op1 != XEXP (x, 1)) |
6930 | x = gen_binary (code, op_mode, op0, op1); | |
d0ab8cd3 | 6931 | break; |
dfbe1b2f RK |
6932 | |
6933 | case ASHIFT: | |
dfbe1b2f | 6934 | /* For left shifts, do the same, but just for the first operand. |
f6785026 RK |
6935 | However, we cannot do anything with shifts where we cannot |
6936 | guarantee that the counts are smaller than the size of the mode | |
6937 | because such a count will have a different meaning in a | |
6139ff20 | 6938 | wider mode. */ |
f6785026 RK |
6939 | |
6940 | if (! (GET_CODE (XEXP (x, 1)) == CONST_INT | |
6139ff20 | 6941 | && INTVAL (XEXP (x, 1)) >= 0 |
f6785026 RK |
6942 | && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (mode)) |
6943 | && ! (GET_MODE (XEXP (x, 1)) != VOIDmode | |
6944 | && (nonzero_bits (XEXP (x, 1), GET_MODE (XEXP (x, 1))) | |
adb7a1cb | 6945 | < (unsigned HOST_WIDE_INT) GET_MODE_BITSIZE (mode)))) |
f6785026 | 6946 | break; |
663522cb | 6947 | |
6139ff20 RK |
6948 | /* If the shift count is a constant and we can do arithmetic in |
6949 | the mode of the shift, refine which bits we need. Otherwise, use the | |
6950 | conservative form of the mask. */ | |
6951 | if (GET_CODE (XEXP (x, 1)) == CONST_INT | |
6952 | && INTVAL (XEXP (x, 1)) >= 0 | |
6953 | && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (op_mode) | |
6954 | && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT) | |
6955 | mask >>= INTVAL (XEXP (x, 1)); | |
6956 | else | |
6957 | mask = fuller_mask; | |
6958 | ||
6959 | op0 = gen_lowpart_for_combine (op_mode, | |
6960 | force_to_mode (XEXP (x, 0), op_mode, | |
e3d616e3 | 6961 | mask, reg, next_select)); |
6139ff20 RK |
6962 | |
6963 | if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0)) | |
663522cb | 6964 | x = gen_binary (code, op_mode, op0, XEXP (x, 1)); |
d0ab8cd3 | 6965 | break; |
dfbe1b2f RK |
6966 | |
6967 | case LSHIFTRT: | |
1347292b JW |
6968 | /* Here we can only do something if the shift count is a constant, |
6969 | this shift constant is valid for the host, and we can do arithmetic | |
6970 | in OP_MODE. */ | |
dfbe1b2f RK |
6971 | |
6972 | if (GET_CODE (XEXP (x, 1)) == CONST_INT | |
1347292b | 6973 | && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT |
6139ff20 | 6974 | && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT) |
d0ab8cd3 | 6975 | { |
6139ff20 | 6976 | rtx inner = XEXP (x, 0); |
402b6c2a | 6977 | unsigned HOST_WIDE_INT inner_mask; |
6139ff20 RK |
6978 | |
6979 | /* Select the mask of the bits we need for the shift operand. */ | |
402b6c2a | 6980 | inner_mask = mask << INTVAL (XEXP (x, 1)); |
d0ab8cd3 | 6981 | |
6139ff20 | 6982 | /* We can only change the mode of the shift if we can do arithmetic |
402b6c2a JW |
6983 | in the mode of the shift and INNER_MASK is no wider than the |
6984 | width of OP_MODE. */ | |
6139ff20 | 6985 | if (GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT |
663522cb | 6986 | || (inner_mask & ~GET_MODE_MASK (op_mode)) != 0) |
d0ab8cd3 RK |
6987 | op_mode = GET_MODE (x); |
6988 | ||
402b6c2a | 6989 | inner = force_to_mode (inner, op_mode, inner_mask, reg, next_select); |
6139ff20 RK |
6990 | |
6991 | if (GET_MODE (x) != op_mode || inner != XEXP (x, 0)) | |
6992 | x = gen_binary (LSHIFTRT, op_mode, inner, XEXP (x, 1)); | |
d0ab8cd3 | 6993 | } |
6139ff20 RK |
6994 | |
6995 | /* If we have (and (lshiftrt FOO C1) C2) where the combination of the | |
6996 | shift and AND produces only copies of the sign bit (C2 is one less | |
6997 | than a power of two), we can do this with just a shift. */ | |
6998 | ||
6999 | if (GET_CODE (x) == LSHIFTRT | |
7000 | && GET_CODE (XEXP (x, 1)) == CONST_INT | |
cfff35c1 JW |
7001 | /* The shift puts one of the sign bit copies in the least significant |
7002 | bit. */ | |
6139ff20 RK |
7003 | && ((INTVAL (XEXP (x, 1)) |
7004 | + num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))) | |
7005 | >= GET_MODE_BITSIZE (GET_MODE (x))) | |
7006 | && exact_log2 (mask + 1) >= 0 | |
cfff35c1 JW |
7007 | /* Number of bits left after the shift must be more than the mask |
7008 | needs. */ | |
7009 | && ((INTVAL (XEXP (x, 1)) + exact_log2 (mask + 1)) | |
7010 | <= GET_MODE_BITSIZE (GET_MODE (x))) | |
7011 | /* Must be more sign bit copies than the mask needs. */ | |
770ae6cc | 7012 | && ((int) num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0))) |
6139ff20 RK |
7013 | >= exact_log2 (mask + 1))) |
7014 | x = gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0), | |
7015 | GEN_INT (GET_MODE_BITSIZE (GET_MODE (x)) | |
7016 | - exact_log2 (mask + 1))); | |
fae2db47 JW |
7017 | |
7018 | goto shiftrt; | |
d0ab8cd3 RK |
7019 | |
7020 | case ASHIFTRT: | |
6139ff20 RK |
7021 | /* If we are just looking for the sign bit, we don't need this shift at |
7022 | all, even if it has a variable count. */ | |
9bf22b75 | 7023 | if (GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT |
e51712db | 7024 | && (mask == ((unsigned HOST_WIDE_INT) 1 |
9bf22b75 | 7025 | << (GET_MODE_BITSIZE (GET_MODE (x)) - 1)))) |
e3d616e3 | 7026 | return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select); |
6139ff20 RK |
7027 | |
7028 | /* If this is a shift by a constant, get a mask that contains those bits | |
7029 | that are not copies of the sign bit. We then have two cases: If | |
7030 | MASK only includes those bits, this can be a logical shift, which may | |
7031 | allow simplifications. If MASK is a single-bit field not within | |
7032 | those bits, we are requesting a copy of the sign bit and hence can | |
7033 | shift the sign bit to the appropriate location. */ | |
7034 | ||
7035 | if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) >= 0 | |
7036 | && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT) | |
7037 | { | |
7038 | int i = -1; | |
7039 | ||
b69960ac RK |
7040 | /* If the considered data is wider then HOST_WIDE_INT, we can't |
7041 | represent a mask for all its bits in a single scalar. | |
7042 | But we only care about the lower bits, so calculate these. */ | |
7043 | ||
6a11342f | 7044 | if (GET_MODE_BITSIZE (GET_MODE (x)) > HOST_BITS_PER_WIDE_INT) |
b69960ac | 7045 | { |
663522cb | 7046 | nonzero = ~(HOST_WIDE_INT) 0; |
b69960ac RK |
7047 | |
7048 | /* GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1)) | |
7049 | is the number of bits a full-width mask would have set. | |
7050 | We need only shift if these are fewer than nonzero can | |
7051 | hold. If not, we must keep all bits set in nonzero. */ | |
7052 | ||
7053 | if (GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1)) | |
7054 | < HOST_BITS_PER_WIDE_INT) | |
7055 | nonzero >>= INTVAL (XEXP (x, 1)) | |
7056 | + HOST_BITS_PER_WIDE_INT | |
7057 | - GET_MODE_BITSIZE (GET_MODE (x)) ; | |
7058 | } | |
7059 | else | |
7060 | { | |
7061 | nonzero = GET_MODE_MASK (GET_MODE (x)); | |
7062 | nonzero >>= INTVAL (XEXP (x, 1)); | |
7063 | } | |
6139ff20 | 7064 | |
663522cb | 7065 | if ((mask & ~nonzero) == 0 |
6139ff20 RK |
7066 | || (i = exact_log2 (mask)) >= 0) |
7067 | { | |
7068 | x = simplify_shift_const | |
7069 | (x, LSHIFTRT, GET_MODE (x), XEXP (x, 0), | |
7070 | i < 0 ? INTVAL (XEXP (x, 1)) | |
7071 | : GET_MODE_BITSIZE (GET_MODE (x)) - 1 - i); | |
7072 | ||
7073 | if (GET_CODE (x) != ASHIFTRT) | |
e3d616e3 | 7074 | return force_to_mode (x, mode, mask, reg, next_select); |
6139ff20 RK |
7075 | } |
7076 | } | |
7077 | ||
7078 | /* If MASK is 1, convert this to a LSHIFTRT. This can be done | |
7079 | even if the shift count isn't a constant. */ | |
7080 | if (mask == 1) | |
7081 | x = gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)); | |
7082 | ||
fae2db47 JW |
7083 | shiftrt: |
7084 | ||
7085 | /* If this is a zero- or sign-extension operation that just affects bits | |
4c002f29 RK |
7086 | we don't care about, remove it. Be sure the call above returned |
7087 | something that is still a shift. */ | |
d0ab8cd3 | 7088 | |
4c002f29 RK |
7089 | if ((GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ASHIFTRT) |
7090 | && GET_CODE (XEXP (x, 1)) == CONST_INT | |
d0ab8cd3 | 7091 | && INTVAL (XEXP (x, 1)) >= 0 |
6139ff20 RK |
7092 | && (INTVAL (XEXP (x, 1)) |
7093 | <= GET_MODE_BITSIZE (GET_MODE (x)) - (floor_log2 (mask) + 1)) | |
d0ab8cd3 RK |
7094 | && GET_CODE (XEXP (x, 0)) == ASHIFT |
7095 | && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT | |
7096 | && INTVAL (XEXP (XEXP (x, 0), 1)) == INTVAL (XEXP (x, 1))) | |
e3d616e3 RK |
7097 | return force_to_mode (XEXP (XEXP (x, 0), 0), mode, mask, |
7098 | reg, next_select); | |
6139ff20 | 7099 | |
dfbe1b2f RK |
7100 | break; |
7101 | ||
6139ff20 RK |
7102 | case ROTATE: |
7103 | case ROTATERT: | |
7104 | /* If the shift count is constant and we can do computations | |
7105 | in the mode of X, compute where the bits we care about are. | |
7106 | Otherwise, we can't do anything. Don't change the mode of | |
7107 | the shift or propagate MODE into the shift, though. */ | |
7108 | if (GET_CODE (XEXP (x, 1)) == CONST_INT | |
7109 | && INTVAL (XEXP (x, 1)) >= 0) | |
7110 | { | |
7111 | temp = simplify_binary_operation (code == ROTATE ? ROTATERT : ROTATE, | |
7112 | GET_MODE (x), GEN_INT (mask), | |
7113 | XEXP (x, 1)); | |
7d171a1e | 7114 | if (temp && GET_CODE(temp) == CONST_INT) |
6139ff20 RK |
7115 | SUBST (XEXP (x, 0), |
7116 | force_to_mode (XEXP (x, 0), GET_MODE (x), | |
e3d616e3 | 7117 | INTVAL (temp), reg, next_select)); |
6139ff20 RK |
7118 | } |
7119 | break; | |
663522cb | 7120 | |
dfbe1b2f | 7121 | case NEG: |
180b8e4b RK |
7122 | /* If we just want the low-order bit, the NEG isn't needed since it |
7123 | won't change the low-order bit. */ | |
7124 | if (mask == 1) | |
7125 | return force_to_mode (XEXP (x, 0), mode, mask, reg, just_select); | |
7126 | ||
6139ff20 RK |
7127 | /* We need any bits less significant than the most significant bit in |
7128 | MASK since carries from those bits will affect the bits we are | |
7129 | interested in. */ | |
7130 | mask = fuller_mask; | |
7131 | goto unop; | |
7132 | ||
dfbe1b2f | 7133 | case NOT: |
6139ff20 RK |
7134 | /* (not FOO) is (xor FOO CONST), so if FOO is an LSHIFTRT, we can do the |
7135 | same as the XOR case above. Ensure that the constant we form is not | |
7136 | wider than the mode of X. */ | |
7137 | ||
7138 | if (GET_CODE (XEXP (x, 0)) == LSHIFTRT | |
7139 | && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT | |
7140 | && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0 | |
7141 | && (INTVAL (XEXP (XEXP (x, 0), 1)) + floor_log2 (mask) | |
7142 | < GET_MODE_BITSIZE (GET_MODE (x))) | |
7143 | && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT) | |
7144 | { | |
7145 | temp = GEN_INT (mask << INTVAL (XEXP (XEXP (x, 0), 1))); | |
7146 | temp = gen_binary (XOR, GET_MODE (x), XEXP (XEXP (x, 0), 0), temp); | |
7147 | x = gen_binary (LSHIFTRT, GET_MODE (x), temp, XEXP (XEXP (x, 0), 1)); | |
7148 | ||
e3d616e3 | 7149 | return force_to_mode (x, mode, mask, reg, next_select); |
6139ff20 RK |
7150 | } |
7151 | ||
f82da7d2 JW |
7152 | /* (and (not FOO) CONST) is (not (or FOO (not CONST))), so we must |
7153 | use the full mask inside the NOT. */ | |
7154 | mask = fuller_mask; | |
7155 | ||
6139ff20 | 7156 | unop: |
e3d616e3 RK |
7157 | op0 = gen_lowpart_for_combine (op_mode, |
7158 | force_to_mode (XEXP (x, 0), mode, mask, | |
7159 | reg, next_select)); | |
6139ff20 | 7160 | if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0)) |
0c1c8ea6 | 7161 | x = gen_unary (code, op_mode, op_mode, op0); |
6139ff20 RK |
7162 | break; |
7163 | ||
7164 | case NE: | |
7165 | /* (and (ne FOO 0) CONST) can be (and FOO CONST) if CONST is included | |
3aceff0d | 7166 | in STORE_FLAG_VALUE and FOO has a single bit that might be nonzero, |
1a6ec070 | 7167 | which is equal to STORE_FLAG_VALUE. */ |
663522cb | 7168 | if ((mask & ~STORE_FLAG_VALUE) == 0 && XEXP (x, 1) == const0_rtx |
3aceff0d | 7169 | && exact_log2 (nonzero_bits (XEXP (x, 0), mode)) >= 0 |
1a6ec070 | 7170 | && nonzero_bits (XEXP (x, 0), mode) == STORE_FLAG_VALUE) |
e3d616e3 | 7171 | return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select); |
6139ff20 | 7172 | |
d0ab8cd3 RK |
7173 | break; |
7174 | ||
7175 | case IF_THEN_ELSE: | |
7176 | /* We have no way of knowing if the IF_THEN_ELSE can itself be | |
7177 | written in a narrower mode. We play it safe and do not do so. */ | |
7178 | ||
7179 | SUBST (XEXP (x, 1), | |
7180 | gen_lowpart_for_combine (GET_MODE (x), | |
7181 | force_to_mode (XEXP (x, 1), mode, | |
e3d616e3 | 7182 | mask, reg, next_select))); |
d0ab8cd3 RK |
7183 | SUBST (XEXP (x, 2), |
7184 | gen_lowpart_for_combine (GET_MODE (x), | |
7185 | force_to_mode (XEXP (x, 2), mode, | |
e3d616e3 | 7186 | mask, reg,next_select))); |
d0ab8cd3 | 7187 | break; |
663522cb | 7188 | |
e9a25f70 JL |
7189 | default: |
7190 | break; | |
dfbe1b2f RK |
7191 | } |
7192 | ||
d0ab8cd3 | 7193 | /* Ensure we return a value of the proper mode. */ |
dfbe1b2f RK |
7194 | return gen_lowpart_for_combine (mode, x); |
7195 | } | |
7196 | \f | |
abe6e52f RK |
7197 | /* Return nonzero if X is an expression that has one of two values depending on |
7198 | whether some other value is zero or nonzero. In that case, we return the | |
7199 | value that is being tested, *PTRUE is set to the value if the rtx being | |
7200 | returned has a nonzero value, and *PFALSE is set to the other alternative. | |
7201 | ||
7202 | If we return zero, we set *PTRUE and *PFALSE to X. */ | |
7203 | ||
7204 | static rtx | |
7205 | if_then_else_cond (x, ptrue, pfalse) | |
7206 | rtx x; | |
7207 | rtx *ptrue, *pfalse; | |
7208 | { | |
7209 | enum machine_mode mode = GET_MODE (x); | |
7210 | enum rtx_code code = GET_CODE (x); | |
abe6e52f RK |
7211 | rtx cond0, cond1, true0, true1, false0, false1; |
7212 | unsigned HOST_WIDE_INT nz; | |
7213 | ||
14a774a9 RK |
7214 | /* If we are comparing a value against zero, we are done. */ |
7215 | if ((code == NE || code == EQ) | |
7216 | && GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) == 0) | |
7217 | { | |
e8758a3a JL |
7218 | *ptrue = (code == NE) ? const_true_rtx : const0_rtx; |
7219 | *pfalse = (code == NE) ? const0_rtx : const_true_rtx; | |
14a774a9 RK |
7220 | return XEXP (x, 0); |
7221 | } | |
7222 | ||
abe6e52f RK |
7223 | /* If this is a unary operation whose operand has one of two values, apply |
7224 | our opcode to compute those values. */ | |
14a774a9 RK |
7225 | else if (GET_RTX_CLASS (code) == '1' |
7226 | && (cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0)) != 0) | |
abe6e52f | 7227 | { |
0c1c8ea6 RK |
7228 | *ptrue = gen_unary (code, mode, GET_MODE (XEXP (x, 0)), true0); |
7229 | *pfalse = gen_unary (code, mode, GET_MODE (XEXP (x, 0)), false0); | |
abe6e52f RK |
7230 | return cond0; |
7231 | } | |
7232 | ||
3a19aabc | 7233 | /* If this is a COMPARE, do nothing, since the IF_THEN_ELSE we would |
ddd5a7c1 | 7234 | make can't possibly match and would suppress other optimizations. */ |
3a19aabc RK |
7235 | else if (code == COMPARE) |
7236 | ; | |
7237 | ||
abe6e52f RK |
7238 | /* If this is a binary operation, see if either side has only one of two |
7239 | values. If either one does or if both do and they are conditional on | |
7240 | the same value, compute the new true and false values. */ | |
7241 | else if (GET_RTX_CLASS (code) == 'c' || GET_RTX_CLASS (code) == '2' | |
7242 | || GET_RTX_CLASS (code) == '<') | |
7243 | { | |
7244 | cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0); | |
7245 | cond1 = if_then_else_cond (XEXP (x, 1), &true1, &false1); | |
7246 | ||
7247 | if ((cond0 != 0 || cond1 != 0) | |
7248 | && ! (cond0 != 0 && cond1 != 0 && ! rtx_equal_p (cond0, cond1))) | |
7249 | { | |
987e845a JW |
7250 | /* If if_then_else_cond returned zero, then true/false are the |
7251 | same rtl. We must copy one of them to prevent invalid rtl | |
7252 | sharing. */ | |
7253 | if (cond0 == 0) | |
7254 | true0 = copy_rtx (true0); | |
7255 | else if (cond1 == 0) | |
7256 | true1 = copy_rtx (true1); | |
7257 | ||
abe6e52f RK |
7258 | *ptrue = gen_binary (code, mode, true0, true1); |
7259 | *pfalse = gen_binary (code, mode, false0, false1); | |
7260 | return cond0 ? cond0 : cond1; | |
7261 | } | |
9210df58 | 7262 | |
9210df58 | 7263 | /* See if we have PLUS, IOR, XOR, MINUS or UMAX, where one of the |
0802d516 RK |
7264 | operands is zero when the other is non-zero, and vice-versa, |
7265 | and STORE_FLAG_VALUE is 1 or -1. */ | |
9210df58 | 7266 | |
0802d516 RK |
7267 | if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1) |
7268 | && (code == PLUS || code == IOR || code == XOR || code == MINUS | |
663522cb | 7269 | || code == UMAX) |
9210df58 RK |
7270 | && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT) |
7271 | { | |
7272 | rtx op0 = XEXP (XEXP (x, 0), 1); | |
7273 | rtx op1 = XEXP (XEXP (x, 1), 1); | |
7274 | ||
7275 | cond0 = XEXP (XEXP (x, 0), 0); | |
7276 | cond1 = XEXP (XEXP (x, 1), 0); | |
7277 | ||
7278 | if (GET_RTX_CLASS (GET_CODE (cond0)) == '<' | |
7279 | && GET_RTX_CLASS (GET_CODE (cond1)) == '<' | |
7280 | && reversible_comparison_p (cond1) | |
7281 | && ((GET_CODE (cond0) == reverse_condition (GET_CODE (cond1)) | |
7282 | && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0)) | |
7283 | && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1))) | |
7284 | || ((swap_condition (GET_CODE (cond0)) | |
7285 | == reverse_condition (GET_CODE (cond1))) | |
7286 | && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1)) | |
7287 | && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0)))) | |
7288 | && ! side_effects_p (x)) | |
7289 | { | |
7290 | *ptrue = gen_binary (MULT, mode, op0, const_true_rtx); | |
663522cb KH |
7291 | *pfalse = gen_binary (MULT, mode, |
7292 | (code == MINUS | |
0c1c8ea6 | 7293 | ? gen_unary (NEG, mode, mode, op1) : op1), |
9210df58 RK |
7294 | const_true_rtx); |
7295 | return cond0; | |
7296 | } | |
7297 | } | |
7298 | ||
7299 | /* Similarly for MULT, AND and UMIN, execpt that for these the result | |
7300 | is always zero. */ | |
0802d516 RK |
7301 | if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1) |
7302 | && (code == MULT || code == AND || code == UMIN) | |
9210df58 RK |
7303 | && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT) |
7304 | { | |
7305 | cond0 = XEXP (XEXP (x, 0), 0); | |
7306 | cond1 = XEXP (XEXP (x, 1), 0); | |
7307 | ||
7308 | if (GET_RTX_CLASS (GET_CODE (cond0)) == '<' | |
7309 | && GET_RTX_CLASS (GET_CODE (cond1)) == '<' | |
7310 | && reversible_comparison_p (cond1) | |
7311 | && ((GET_CODE (cond0) == reverse_condition (GET_CODE (cond1)) | |
7312 | && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0)) | |
7313 | && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1))) | |
7314 | || ((swap_condition (GET_CODE (cond0)) | |
7315 | == reverse_condition (GET_CODE (cond1))) | |
7316 | && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1)) | |
7317 | && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0)))) | |
7318 | && ! side_effects_p (x)) | |
7319 | { | |
7320 | *ptrue = *pfalse = const0_rtx; | |
7321 | return cond0; | |
7322 | } | |
7323 | } | |
abe6e52f RK |
7324 | } |
7325 | ||
7326 | else if (code == IF_THEN_ELSE) | |
7327 | { | |
7328 | /* If we have IF_THEN_ELSE already, extract the condition and | |
7329 | canonicalize it if it is NE or EQ. */ | |
7330 | cond0 = XEXP (x, 0); | |
7331 | *ptrue = XEXP (x, 1), *pfalse = XEXP (x, 2); | |
7332 | if (GET_CODE (cond0) == NE && XEXP (cond0, 1) == const0_rtx) | |
7333 | return XEXP (cond0, 0); | |
7334 | else if (GET_CODE (cond0) == EQ && XEXP (cond0, 1) == const0_rtx) | |
7335 | { | |
7336 | *ptrue = XEXP (x, 2), *pfalse = XEXP (x, 1); | |
7337 | return XEXP (cond0, 0); | |
7338 | } | |
7339 | else | |
7340 | return cond0; | |
7341 | } | |
7342 | ||
7343 | /* If X is a normal SUBREG with both inner and outer modes integral, | |
7344 | we can narrow both the true and false values of the inner expression, | |
7345 | if there is a condition. */ | |
7346 | else if (code == SUBREG && GET_MODE_CLASS (mode) == MODE_INT | |
7347 | && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT | |
7348 | && GET_MODE_SIZE (mode) <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) | |
7349 | && 0 != (cond0 = if_then_else_cond (SUBREG_REG (x), | |
7350 | &true0, &false0))) | |
7351 | { | |
668bcf76 JL |
7352 | if ((GET_CODE (SUBREG_REG (x)) == REG |
7353 | || GET_CODE (SUBREG_REG (x)) == MEM | |
7354 | || CONSTANT_P (SUBREG_REG (x))) | |
7355 | && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) > UNITS_PER_WORD | |
54f3b5c2 R |
7356 | && (WORDS_BIG_ENDIAN || SUBREG_WORD (x) != 0)) |
7357 | { | |
7358 | true0 = operand_subword (true0, SUBREG_WORD (x), 0, mode); | |
7359 | false0 = operand_subword (false0, SUBREG_WORD (x), 0, mode); | |
7360 | } | |
49219895 | 7361 | *ptrue = force_to_mode (true0, mode, ~(HOST_WIDE_INT) 0, NULL_RTX, 0); |
00244e6b | 7362 | *pfalse |
49219895 | 7363 | = force_to_mode (false0, mode, ~(HOST_WIDE_INT) 0, NULL_RTX, 0); |
abe6e52f | 7364 | |
abe6e52f RK |
7365 | return cond0; |
7366 | } | |
7367 | ||
7368 | /* If X is a constant, this isn't special and will cause confusions | |
7369 | if we treat it as such. Likewise if it is equivalent to a constant. */ | |
7370 | else if (CONSTANT_P (x) | |
7371 | || ((cond0 = get_last_value (x)) != 0 && CONSTANT_P (cond0))) | |
7372 | ; | |
7373 | ||
663522cb | 7374 | /* If X is known to be either 0 or -1, those are the true and |
abe6e52f | 7375 | false values when testing X. */ |
49219895 JH |
7376 | else if (x == constm1_rtx || x == const0_rtx |
7377 | || (mode != VOIDmode | |
7378 | && num_sign_bit_copies (x, mode) == GET_MODE_BITSIZE (mode))) | |
abe6e52f RK |
7379 | { |
7380 | *ptrue = constm1_rtx, *pfalse = const0_rtx; | |
7381 | return x; | |
7382 | } | |
7383 | ||
7384 | /* Likewise for 0 or a single bit. */ | |
49219895 JH |
7385 | else if (mode != VOIDmode |
7386 | && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT | |
7387 | && exact_log2 (nz = nonzero_bits (x, mode)) >= 0) | |
abe6e52f RK |
7388 | { |
7389 | *ptrue = GEN_INT (nz), *pfalse = const0_rtx; | |
7390 | return x; | |
7391 | } | |
7392 | ||
7393 | /* Otherwise fail; show no condition with true and false values the same. */ | |
7394 | *ptrue = *pfalse = x; | |
7395 | return 0; | |
7396 | } | |
7397 | \f | |
1a26b032 RK |
7398 | /* Return the value of expression X given the fact that condition COND |
7399 | is known to be true when applied to REG as its first operand and VAL | |
7400 | as its second. X is known to not be shared and so can be modified in | |
7401 | place. | |
7402 | ||
7403 | We only handle the simplest cases, and specifically those cases that | |
7404 | arise with IF_THEN_ELSE expressions. */ | |
7405 | ||
7406 | static rtx | |
7407 | known_cond (x, cond, reg, val) | |
7408 | rtx x; | |
7409 | enum rtx_code cond; | |
7410 | rtx reg, val; | |
7411 | { | |
7412 | enum rtx_code code = GET_CODE (x); | |
f24ad0e4 | 7413 | rtx temp; |
6f7d635c | 7414 | const char *fmt; |
1a26b032 RK |
7415 | int i, j; |
7416 | ||
7417 | if (side_effects_p (x)) | |
7418 | return x; | |
7419 | ||
7420 | if (cond == EQ && rtx_equal_p (x, reg)) | |
7421 | return val; | |
7422 | ||
7423 | /* If X is (abs REG) and we know something about REG's relationship | |
7424 | with zero, we may be able to simplify this. */ | |
7425 | ||
7426 | if (code == ABS && rtx_equal_p (XEXP (x, 0), reg) && val == const0_rtx) | |
7427 | switch (cond) | |
7428 | { | |
7429 | case GE: case GT: case EQ: | |
7430 | return XEXP (x, 0); | |
7431 | case LT: case LE: | |
0c1c8ea6 RK |
7432 | return gen_unary (NEG, GET_MODE (XEXP (x, 0)), GET_MODE (XEXP (x, 0)), |
7433 | XEXP (x, 0)); | |
e9a25f70 JL |
7434 | default: |
7435 | break; | |
1a26b032 RK |
7436 | } |
7437 | ||
7438 | /* The only other cases we handle are MIN, MAX, and comparisons if the | |
7439 | operands are the same as REG and VAL. */ | |
7440 | ||
7441 | else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c') | |
7442 | { | |
7443 | if (rtx_equal_p (XEXP (x, 0), val)) | |
7444 | cond = swap_condition (cond), temp = val, val = reg, reg = temp; | |
7445 | ||
7446 | if (rtx_equal_p (XEXP (x, 0), reg) && rtx_equal_p (XEXP (x, 1), val)) | |
7447 | { | |
7448 | if (GET_RTX_CLASS (code) == '<') | |
1eb8759b RH |
7449 | { |
7450 | if (comparison_dominates_p (cond, code)) | |
7451 | return const_true_rtx; | |
1a26b032 | 7452 | |
1eb8759b RH |
7453 | code = reverse_condition (code); |
7454 | if (code != UNKNOWN | |
7455 | && comparison_dominates_p (cond, code)) | |
7456 | return const0_rtx; | |
7457 | else | |
7458 | return x; | |
7459 | } | |
1a26b032 RK |
7460 | else if (code == SMAX || code == SMIN |
7461 | || code == UMIN || code == UMAX) | |
7462 | { | |
7463 | int unsignedp = (code == UMIN || code == UMAX); | |
7464 | ||
7465 | if (code == SMAX || code == UMAX) | |
7466 | cond = reverse_condition (cond); | |
7467 | ||
7468 | switch (cond) | |
7469 | { | |
7470 | case GE: case GT: | |
7471 | return unsignedp ? x : XEXP (x, 1); | |
7472 | case LE: case LT: | |
7473 | return unsignedp ? x : XEXP (x, 0); | |
7474 | case GEU: case GTU: | |
7475 | return unsignedp ? XEXP (x, 1) : x; | |
7476 | case LEU: case LTU: | |
7477 | return unsignedp ? XEXP (x, 0) : x; | |
e9a25f70 JL |
7478 | default: |
7479 | break; | |
1a26b032 RK |
7480 | } |
7481 | } | |
7482 | } | |
7483 | } | |
7484 | ||
7485 | fmt = GET_RTX_FORMAT (code); | |
7486 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
7487 | { | |
7488 | if (fmt[i] == 'e') | |
7489 | SUBST (XEXP (x, i), known_cond (XEXP (x, i), cond, reg, val)); | |
7490 | else if (fmt[i] == 'E') | |
7491 | for (j = XVECLEN (x, i) - 1; j >= 0; j--) | |
7492 | SUBST (XVECEXP (x, i, j), known_cond (XVECEXP (x, i, j), | |
7493 | cond, reg, val)); | |
7494 | } | |
7495 | ||
7496 | return x; | |
7497 | } | |
7498 | \f | |
e11fa86f RK |
7499 | /* See if X and Y are equal for the purposes of seeing if we can rewrite an |
7500 | assignment as a field assignment. */ | |
7501 | ||
7502 | static int | |
7503 | rtx_equal_for_field_assignment_p (x, y) | |
7504 | rtx x; | |
7505 | rtx y; | |
7506 | { | |
e11fa86f RK |
7507 | if (x == y || rtx_equal_p (x, y)) |
7508 | return 1; | |
7509 | ||
7510 | if (x == 0 || y == 0 || GET_MODE (x) != GET_MODE (y)) | |
7511 | return 0; | |
7512 | ||
7513 | /* Check for a paradoxical SUBREG of a MEM compared with the MEM. | |
7514 | Note that all SUBREGs of MEM are paradoxical; otherwise they | |
7515 | would have been rewritten. */ | |
7516 | if (GET_CODE (x) == MEM && GET_CODE (y) == SUBREG | |
7517 | && GET_CODE (SUBREG_REG (y)) == MEM | |
7518 | && rtx_equal_p (SUBREG_REG (y), | |
7519 | gen_lowpart_for_combine (GET_MODE (SUBREG_REG (y)), x))) | |
7520 | return 1; | |
7521 | ||
7522 | if (GET_CODE (y) == MEM && GET_CODE (x) == SUBREG | |
7523 | && GET_CODE (SUBREG_REG (x)) == MEM | |
7524 | && rtx_equal_p (SUBREG_REG (x), | |
7525 | gen_lowpart_for_combine (GET_MODE (SUBREG_REG (x)), y))) | |
7526 | return 1; | |
7527 | ||
9ec36da5 JL |
7528 | /* We used to see if get_last_value of X and Y were the same but that's |
7529 | not correct. In one direction, we'll cause the assignment to have | |
7530 | the wrong destination and in the case, we'll import a register into this | |
7531 | insn that might have already have been dead. So fail if none of the | |
7532 | above cases are true. */ | |
7533 | return 0; | |
e11fa86f RK |
7534 | } |
7535 | \f | |
230d793d RS |
7536 | /* See if X, a SET operation, can be rewritten as a bit-field assignment. |
7537 | Return that assignment if so. | |
7538 | ||
7539 | We only handle the most common cases. */ | |
7540 | ||
7541 | static rtx | |
7542 | make_field_assignment (x) | |
7543 | rtx x; | |
7544 | { | |
7545 | rtx dest = SET_DEST (x); | |
7546 | rtx src = SET_SRC (x); | |
dfbe1b2f | 7547 | rtx assign; |
e11fa86f | 7548 | rtx rhs, lhs; |
5f4f0e22 | 7549 | HOST_WIDE_INT c1; |
770ae6cc RK |
7550 | HOST_WIDE_INT pos; |
7551 | unsigned HOST_WIDE_INT len; | |
dfbe1b2f RK |
7552 | rtx other; |
7553 | enum machine_mode mode; | |
230d793d RS |
7554 | |
7555 | /* If SRC was (and (not (ashift (const_int 1) POS)) DEST), this is | |
7556 | a clear of a one-bit field. We will have changed it to | |
7557 | (and (rotate (const_int -2) POS) DEST), so check for that. Also check | |
7558 | for a SUBREG. */ | |
7559 | ||
7560 | if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == ROTATE | |
7561 | && GET_CODE (XEXP (XEXP (src, 0), 0)) == CONST_INT | |
7562 | && INTVAL (XEXP (XEXP (src, 0), 0)) == -2 | |
e11fa86f | 7563 | && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1))) |
230d793d | 7564 | { |
8999a12e | 7565 | assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1), |
230d793d | 7566 | 1, 1, 1, 0); |
76184def | 7567 | if (assign != 0) |
38a448ca | 7568 | return gen_rtx_SET (VOIDmode, assign, const0_rtx); |
76184def | 7569 | return x; |
230d793d RS |
7570 | } |
7571 | ||
7572 | else if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == SUBREG | |
7573 | && subreg_lowpart_p (XEXP (src, 0)) | |
663522cb | 7574 | && (GET_MODE_SIZE (GET_MODE (XEXP (src, 0))) |
230d793d RS |
7575 | < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (src, 0))))) |
7576 | && GET_CODE (SUBREG_REG (XEXP (src, 0))) == ROTATE | |
7577 | && INTVAL (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == -2 | |
e11fa86f | 7578 | && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1))) |
230d793d | 7579 | { |
8999a12e | 7580 | assign = make_extraction (VOIDmode, dest, 0, |
230d793d RS |
7581 | XEXP (SUBREG_REG (XEXP (src, 0)), 1), |
7582 | 1, 1, 1, 0); | |
76184def | 7583 | if (assign != 0) |
38a448ca | 7584 | return gen_rtx_SET (VOIDmode, assign, const0_rtx); |
76184def | 7585 | return x; |
230d793d RS |
7586 | } |
7587 | ||
9dd11dcb | 7588 | /* If SRC is (ior (ashift (const_int 1) POS) DEST), this is a set of a |
230d793d RS |
7589 | one-bit field. */ |
7590 | else if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == ASHIFT | |
7591 | && XEXP (XEXP (src, 0), 0) == const1_rtx | |
e11fa86f | 7592 | && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1))) |
230d793d | 7593 | { |
8999a12e | 7594 | assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1), |
230d793d | 7595 | 1, 1, 1, 0); |
76184def | 7596 | if (assign != 0) |
38a448ca | 7597 | return gen_rtx_SET (VOIDmode, assign, const1_rtx); |
76184def | 7598 | return x; |
230d793d RS |
7599 | } |
7600 | ||
dfbe1b2f | 7601 | /* The other case we handle is assignments into a constant-position |
9dd11dcb | 7602 | field. They look like (ior/xor (and DEST C1) OTHER). If C1 represents |
dfbe1b2f RK |
7603 | a mask that has all one bits except for a group of zero bits and |
7604 | OTHER is known to have zeros where C1 has ones, this is such an | |
7605 | assignment. Compute the position and length from C1. Shift OTHER | |
7606 | to the appropriate position, force it to the required mode, and | |
7607 | make the extraction. Check for the AND in both operands. */ | |
7608 | ||
9dd11dcb | 7609 | if (GET_CODE (src) != IOR && GET_CODE (src) != XOR) |
e11fa86f RK |
7610 | return x; |
7611 | ||
7612 | rhs = expand_compound_operation (XEXP (src, 0)); | |
7613 | lhs = expand_compound_operation (XEXP (src, 1)); | |
7614 | ||
7615 | if (GET_CODE (rhs) == AND | |
7616 | && GET_CODE (XEXP (rhs, 1)) == CONST_INT | |
7617 | && rtx_equal_for_field_assignment_p (XEXP (rhs, 0), dest)) | |
7618 | c1 = INTVAL (XEXP (rhs, 1)), other = lhs; | |
7619 | else if (GET_CODE (lhs) == AND | |
7620 | && GET_CODE (XEXP (lhs, 1)) == CONST_INT | |
7621 | && rtx_equal_for_field_assignment_p (XEXP (lhs, 0), dest)) | |
7622 | c1 = INTVAL (XEXP (lhs, 1)), other = rhs; | |
dfbe1b2f RK |
7623 | else |
7624 | return x; | |
230d793d | 7625 | |
663522cb | 7626 | pos = get_pos_from_mask ((~c1) & GET_MODE_MASK (GET_MODE (dest)), &len); |
dfbe1b2f | 7627 | if (pos < 0 || pos + len > GET_MODE_BITSIZE (GET_MODE (dest)) |
e5e809f4 JL |
7628 | || GET_MODE_BITSIZE (GET_MODE (dest)) > HOST_BITS_PER_WIDE_INT |
7629 | || (c1 & nonzero_bits (other, GET_MODE (dest))) != 0) | |
dfbe1b2f | 7630 | return x; |
230d793d | 7631 | |
5f4f0e22 | 7632 | assign = make_extraction (VOIDmode, dest, pos, NULL_RTX, len, 1, 1, 0); |
76184def DE |
7633 | if (assign == 0) |
7634 | return x; | |
230d793d | 7635 | |
dfbe1b2f RK |
7636 | /* The mode to use for the source is the mode of the assignment, or of |
7637 | what is inside a possible STRICT_LOW_PART. */ | |
663522cb | 7638 | mode = (GET_CODE (assign) == STRICT_LOW_PART |
dfbe1b2f | 7639 | ? GET_MODE (XEXP (assign, 0)) : GET_MODE (assign)); |
230d793d | 7640 | |
dfbe1b2f RK |
7641 | /* Shift OTHER right POS places and make it the source, restricting it |
7642 | to the proper length and mode. */ | |
230d793d | 7643 | |
5f4f0e22 CH |
7644 | src = force_to_mode (simplify_shift_const (NULL_RTX, LSHIFTRT, |
7645 | GET_MODE (src), other, pos), | |
6139ff20 RK |
7646 | mode, |
7647 | GET_MODE_BITSIZE (mode) >= HOST_BITS_PER_WIDE_INT | |
e8dc6d50 | 7648 | ? ~(HOST_WIDE_INT) 0 |
729a2125 | 7649 | : ((unsigned HOST_WIDE_INT) 1 << len) - 1, |
e3d616e3 | 7650 | dest, 0); |
230d793d | 7651 | |
dfbe1b2f | 7652 | return gen_rtx_combine (SET, VOIDmode, assign, src); |
230d793d RS |
7653 | } |
7654 | \f | |
7655 | /* See if X is of the form (+ (* a c) (* b c)) and convert to (* (+ a b) c) | |
7656 | if so. */ | |
7657 | ||
7658 | static rtx | |
7659 | apply_distributive_law (x) | |
7660 | rtx x; | |
7661 | { | |
7662 | enum rtx_code code = GET_CODE (x); | |
7663 | rtx lhs, rhs, other; | |
7664 | rtx tem; | |
7665 | enum rtx_code inner_code; | |
7666 | ||
d8a8a4da RS |
7667 | /* Distributivity is not true for floating point. |
7668 | It can change the value. So don't do it. | |
7669 | -- rms and moshier@world.std.com. */ | |
3ad2180a | 7670 | if (FLOAT_MODE_P (GET_MODE (x))) |
d8a8a4da RS |
7671 | return x; |
7672 | ||
230d793d RS |
7673 | /* The outer operation can only be one of the following: */ |
7674 | if (code != IOR && code != AND && code != XOR | |
7675 | && code != PLUS && code != MINUS) | |
7676 | return x; | |
7677 | ||
7678 | lhs = XEXP (x, 0), rhs = XEXP (x, 1); | |
7679 | ||
0f41302f MS |
7680 | /* If either operand is a primitive we can't do anything, so get out |
7681 | fast. */ | |
230d793d | 7682 | if (GET_RTX_CLASS (GET_CODE (lhs)) == 'o' |
dfbe1b2f | 7683 | || GET_RTX_CLASS (GET_CODE (rhs)) == 'o') |
230d793d RS |
7684 | return x; |
7685 | ||
7686 | lhs = expand_compound_operation (lhs); | |
7687 | rhs = expand_compound_operation (rhs); | |
7688 | inner_code = GET_CODE (lhs); | |
7689 | if (inner_code != GET_CODE (rhs)) | |
7690 | return x; | |
7691 | ||
7692 | /* See if the inner and outer operations distribute. */ | |
7693 | switch (inner_code) | |
7694 | { | |
7695 | case LSHIFTRT: | |
7696 | case ASHIFTRT: | |
7697 | case AND: | |
7698 | case IOR: | |
7699 | /* These all distribute except over PLUS. */ | |
7700 | if (code == PLUS || code == MINUS) | |
7701 | return x; | |
7702 | break; | |
7703 | ||
7704 | case MULT: | |
7705 | if (code != PLUS && code != MINUS) | |
7706 | return x; | |
7707 | break; | |
7708 | ||
7709 | case ASHIFT: | |
45620ed4 | 7710 | /* This is also a multiply, so it distributes over everything. */ |
230d793d RS |
7711 | break; |
7712 | ||
7713 | case SUBREG: | |
dfbe1b2f RK |
7714 | /* Non-paradoxical SUBREGs distributes over all operations, provided |
7715 | the inner modes and word numbers are the same, this is an extraction | |
2b4bd1bc JW |
7716 | of a low-order part, we don't convert an fp operation to int or |
7717 | vice versa, and we would not be converting a single-word | |
dfbe1b2f | 7718 | operation into a multi-word operation. The latter test is not |
2b4bd1bc | 7719 | required, but it prevents generating unneeded multi-word operations. |
dfbe1b2f RK |
7720 | Some of the previous tests are redundant given the latter test, but |
7721 | are retained because they are required for correctness. | |
7722 | ||
7723 | We produce the result slightly differently in this case. */ | |
7724 | ||
7725 | if (GET_MODE (SUBREG_REG (lhs)) != GET_MODE (SUBREG_REG (rhs)) | |
7726 | || SUBREG_WORD (lhs) != SUBREG_WORD (rhs) | |
7727 | || ! subreg_lowpart_p (lhs) | |
2b4bd1bc JW |
7728 | || (GET_MODE_CLASS (GET_MODE (lhs)) |
7729 | != GET_MODE_CLASS (GET_MODE (SUBREG_REG (lhs)))) | |
dfbe1b2f | 7730 | || (GET_MODE_SIZE (GET_MODE (lhs)) |
8af24e26 | 7731 | > GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs)))) |
dfbe1b2f | 7732 | || GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))) > UNITS_PER_WORD) |
230d793d RS |
7733 | return x; |
7734 | ||
7735 | tem = gen_binary (code, GET_MODE (SUBREG_REG (lhs)), | |
7736 | SUBREG_REG (lhs), SUBREG_REG (rhs)); | |
7737 | return gen_lowpart_for_combine (GET_MODE (x), tem); | |
7738 | ||
7739 | default: | |
7740 | return x; | |
7741 | } | |
7742 | ||
7743 | /* Set LHS and RHS to the inner operands (A and B in the example | |
7744 | above) and set OTHER to the common operand (C in the example). | |
7745 | These is only one way to do this unless the inner operation is | |
7746 | commutative. */ | |
7747 | if (GET_RTX_CLASS (inner_code) == 'c' | |
7748 | && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 0))) | |
7749 | other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 1); | |
7750 | else if (GET_RTX_CLASS (inner_code) == 'c' | |
7751 | && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 1))) | |
7752 | other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 0); | |
7753 | else if (GET_RTX_CLASS (inner_code) == 'c' | |
7754 | && rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 0))) | |
7755 | other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 1); | |
7756 | else if (rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 1))) | |
7757 | other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 0); | |
7758 | else | |
7759 | return x; | |
7760 | ||
7761 | /* Form the new inner operation, seeing if it simplifies first. */ | |
7762 | tem = gen_binary (code, GET_MODE (x), lhs, rhs); | |
7763 | ||
7764 | /* There is one exception to the general way of distributing: | |
7765 | (a ^ b) | (a ^ c) -> (~a) & (b ^ c) */ | |
7766 | if (code == XOR && inner_code == IOR) | |
7767 | { | |
7768 | inner_code = AND; | |
0c1c8ea6 | 7769 | other = gen_unary (NOT, GET_MODE (x), GET_MODE (x), other); |
230d793d RS |
7770 | } |
7771 | ||
7772 | /* We may be able to continuing distributing the result, so call | |
7773 | ourselves recursively on the inner operation before forming the | |
7774 | outer operation, which we return. */ | |
7775 | return gen_binary (inner_code, GET_MODE (x), | |
7776 | apply_distributive_law (tem), other); | |
7777 | } | |
7778 | \f | |
7779 | /* We have X, a logical `and' of VAROP with the constant CONSTOP, to be done | |
7780 | in MODE. | |
7781 | ||
7782 | Return an equivalent form, if different from X. Otherwise, return X. If | |
7783 | X is zero, we are to always construct the equivalent form. */ | |
7784 | ||
7785 | static rtx | |
7786 | simplify_and_const_int (x, mode, varop, constop) | |
7787 | rtx x; | |
7788 | enum machine_mode mode; | |
7789 | rtx varop; | |
5f4f0e22 | 7790 | unsigned HOST_WIDE_INT constop; |
230d793d | 7791 | { |
951553af | 7792 | unsigned HOST_WIDE_INT nonzero; |
42301240 | 7793 | int i; |
230d793d | 7794 | |
6139ff20 RK |
7795 | /* Simplify VAROP knowing that we will be only looking at some of the |
7796 | bits in it. */ | |
e3d616e3 | 7797 | varop = force_to_mode (varop, mode, constop, NULL_RTX, 0); |
230d793d | 7798 | |
6139ff20 RK |
7799 | /* If VAROP is a CLOBBER, we will fail so return it; if it is a |
7800 | CONST_INT, we are done. */ | |
7801 | if (GET_CODE (varop) == CLOBBER || GET_CODE (varop) == CONST_INT) | |
7802 | return varop; | |
230d793d | 7803 | |
fc06d7aa RK |
7804 | /* See what bits may be nonzero in VAROP. Unlike the general case of |
7805 | a call to nonzero_bits, here we don't care about bits outside | |
7806 | MODE. */ | |
7807 | ||
7808 | nonzero = nonzero_bits (varop, mode) & GET_MODE_MASK (mode); | |
7e4ce834 | 7809 | nonzero = trunc_int_for_mode (nonzero, mode); |
9fa6d012 | 7810 | |
230d793d | 7811 | /* Turn off all bits in the constant that are known to already be zero. |
951553af | 7812 | Thus, if the AND isn't needed at all, we will have CONSTOP == NONZERO_BITS |
230d793d RS |
7813 | which is tested below. */ |
7814 | ||
951553af | 7815 | constop &= nonzero; |
230d793d RS |
7816 | |
7817 | /* If we don't have any bits left, return zero. */ | |
7818 | if (constop == 0) | |
7819 | return const0_rtx; | |
7820 | ||
42301240 RK |
7821 | /* If VAROP is a NEG of something known to be zero or 1 and CONSTOP is |
7822 | a power of two, we can replace this with a ASHIFT. */ | |
7823 | if (GET_CODE (varop) == NEG && nonzero_bits (XEXP (varop, 0), mode) == 1 | |
7824 | && (i = exact_log2 (constop)) >= 0) | |
7825 | return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (varop, 0), i); | |
663522cb | 7826 | |
6139ff20 RK |
7827 | /* If VAROP is an IOR or XOR, apply the AND to both branches of the IOR |
7828 | or XOR, then try to apply the distributive law. This may eliminate | |
7829 | operations if either branch can be simplified because of the AND. | |
7830 | It may also make some cases more complex, but those cases probably | |
7831 | won't match a pattern either with or without this. */ | |
7832 | ||
7833 | if (GET_CODE (varop) == IOR || GET_CODE (varop) == XOR) | |
7834 | return | |
7835 | gen_lowpart_for_combine | |
7836 | (mode, | |
7837 | apply_distributive_law | |
7838 | (gen_binary (GET_CODE (varop), GET_MODE (varop), | |
7839 | simplify_and_const_int (NULL_RTX, GET_MODE (varop), | |
7840 | XEXP (varop, 0), constop), | |
7841 | simplify_and_const_int (NULL_RTX, GET_MODE (varop), | |
7842 | XEXP (varop, 1), constop)))); | |
7843 | ||
230d793d RS |
7844 | /* Get VAROP in MODE. Try to get a SUBREG if not. Don't make a new SUBREG |
7845 | if we already had one (just check for the simplest cases). */ | |
7846 | if (x && GET_CODE (XEXP (x, 0)) == SUBREG | |
7847 | && GET_MODE (XEXP (x, 0)) == mode | |
7848 | && SUBREG_REG (XEXP (x, 0)) == varop) | |
7849 | varop = XEXP (x, 0); | |
7850 | else | |
7851 | varop = gen_lowpart_for_combine (mode, varop); | |
7852 | ||
0f41302f | 7853 | /* If we can't make the SUBREG, try to return what we were given. */ |
230d793d RS |
7854 | if (GET_CODE (varop) == CLOBBER) |
7855 | return x ? x : varop; | |
7856 | ||
7857 | /* If we are only masking insignificant bits, return VAROP. */ | |
951553af | 7858 | if (constop == nonzero) |
230d793d RS |
7859 | x = varop; |
7860 | ||
7861 | /* Otherwise, return an AND. See how much, if any, of X we can use. */ | |
7862 | else if (x == 0 || GET_CODE (x) != AND || GET_MODE (x) != mode) | |
6139ff20 | 7863 | x = gen_binary (AND, mode, varop, GEN_INT (constop)); |
230d793d RS |
7864 | |
7865 | else | |
7866 | { | |
7867 | if (GET_CODE (XEXP (x, 1)) != CONST_INT | |
e51712db | 7868 | || (unsigned HOST_WIDE_INT) INTVAL (XEXP (x, 1)) != constop) |
5f4f0e22 | 7869 | SUBST (XEXP (x, 1), GEN_INT (constop)); |
230d793d RS |
7870 | |
7871 | SUBST (XEXP (x, 0), varop); | |
7872 | } | |
7873 | ||
7874 | return x; | |
7875 | } | |
7876 | \f | |
b3728b0e JW |
7877 | /* We let num_sign_bit_copies recur into nonzero_bits as that is useful. |
7878 | We don't let nonzero_bits recur into num_sign_bit_copies, because that | |
7879 | is less useful. We can't allow both, because that results in exponential | |
956d6950 | 7880 | run time recursion. There is a nullstone testcase that triggered |
b3728b0e JW |
7881 | this. This macro avoids accidental uses of num_sign_bit_copies. */ |
7882 | #define num_sign_bit_copies() | |
7883 | ||
230d793d RS |
7884 | /* Given an expression, X, compute which bits in X can be non-zero. |
7885 | We don't care about bits outside of those defined in MODE. | |
7886 | ||
7887 | For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is | |
7888 | a shift, AND, or zero_extract, we can do better. */ | |
7889 | ||
5f4f0e22 | 7890 | static unsigned HOST_WIDE_INT |
951553af | 7891 | nonzero_bits (x, mode) |
230d793d RS |
7892 | rtx x; |
7893 | enum machine_mode mode; | |
7894 | { | |
951553af RK |
7895 | unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode); |
7896 | unsigned HOST_WIDE_INT inner_nz; | |
230d793d | 7897 | enum rtx_code code; |
770ae6cc | 7898 | unsigned int mode_width = GET_MODE_BITSIZE (mode); |
230d793d RS |
7899 | rtx tem; |
7900 | ||
1c75dfa4 RK |
7901 | /* For floating-point values, assume all bits are needed. */ |
7902 | if (FLOAT_MODE_P (GET_MODE (x)) || FLOAT_MODE_P (mode)) | |
7903 | return nonzero; | |
7904 | ||
230d793d RS |
7905 | /* If X is wider than MODE, use its mode instead. */ |
7906 | if (GET_MODE_BITSIZE (GET_MODE (x)) > mode_width) | |
7907 | { | |
7908 | mode = GET_MODE (x); | |
951553af | 7909 | nonzero = GET_MODE_MASK (mode); |
230d793d RS |
7910 | mode_width = GET_MODE_BITSIZE (mode); |
7911 | } | |
7912 | ||
5f4f0e22 | 7913 | if (mode_width > HOST_BITS_PER_WIDE_INT) |
230d793d RS |
7914 | /* Our only callers in this case look for single bit values. So |
7915 | just return the mode mask. Those tests will then be false. */ | |
951553af | 7916 | return nonzero; |
230d793d | 7917 | |
8baf60bb | 7918 | #ifndef WORD_REGISTER_OPERATIONS |
c6965c0f | 7919 | /* If MODE is wider than X, but both are a single word for both the host |
663522cb | 7920 | and target machines, we can compute this from which bits of the |
0840fd91 RK |
7921 | object might be nonzero in its own mode, taking into account the fact |
7922 | that on many CISC machines, accessing an object in a wider mode | |
7923 | causes the high-order bits to become undefined. So they are | |
7924 | not known to be zero. */ | |
7925 | ||
7926 | if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode | |
7927 | && GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD | |
7928 | && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT | |
c6965c0f | 7929 | && GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (GET_MODE (x))) |
0840fd91 RK |
7930 | { |
7931 | nonzero &= nonzero_bits (x, GET_MODE (x)); | |
663522cb | 7932 | nonzero |= GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x)); |
0840fd91 RK |
7933 | return nonzero; |
7934 | } | |
7935 | #endif | |
7936 | ||
230d793d RS |
7937 | code = GET_CODE (x); |
7938 | switch (code) | |
7939 | { | |
7940 | case REG: | |
320dd7a7 RK |
7941 | #ifdef POINTERS_EXTEND_UNSIGNED |
7942 | /* If pointers extend unsigned and this is a pointer in Pmode, say that | |
7943 | all the bits above ptr_mode are known to be zero. */ | |
7944 | if (POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode | |
7945 | && REGNO_POINTER_FLAG (REGNO (x))) | |
7946 | nonzero &= GET_MODE_MASK (ptr_mode); | |
7947 | #endif | |
7948 | ||
b0d71df9 RK |
7949 | #ifdef STACK_BOUNDARY |
7950 | /* If this is the stack pointer, we may know something about its | |
7951 | alignment. If PUSH_ROUNDING is defined, it is possible for the | |
230d793d RS |
7952 | stack to be momentarily aligned only to that amount, so we pick |
7953 | the least alignment. */ | |
7954 | ||
ee49a9c7 JW |
7955 | /* We can't check for arg_pointer_rtx here, because it is not |
7956 | guaranteed to have as much alignment as the stack pointer. | |
7957 | In particular, in the Irix6 n64 ABI, the stack has 128 bit | |
7958 | alignment but the argument pointer has only 64 bit alignment. */ | |
7959 | ||
0e9ff885 DM |
7960 | if ((x == frame_pointer_rtx |
7961 | || x == stack_pointer_rtx | |
7962 | || x == hard_frame_pointer_rtx | |
7963 | || (REGNO (x) >= FIRST_VIRTUAL_REGISTER | |
7964 | && REGNO (x) <= LAST_VIRTUAL_REGISTER)) | |
7965 | #ifdef STACK_BIAS | |
7966 | && !STACK_BIAS | |
663522cb | 7967 | #endif |
0e9ff885 | 7968 | ) |
230d793d | 7969 | { |
b0d71df9 | 7970 | int sp_alignment = STACK_BOUNDARY / BITS_PER_UNIT; |
230d793d RS |
7971 | |
7972 | #ifdef PUSH_ROUNDING | |
f73ad30e | 7973 | if (REGNO (x) == STACK_POINTER_REGNUM && PUSH_ARGS) |
b0d71df9 | 7974 | sp_alignment = MIN (PUSH_ROUNDING (1), sp_alignment); |
230d793d RS |
7975 | #endif |
7976 | ||
320dd7a7 RK |
7977 | /* We must return here, otherwise we may get a worse result from |
7978 | one of the choices below. There is nothing useful below as | |
7979 | far as the stack pointer is concerned. */ | |
663522cb | 7980 | return nonzero &= ~(sp_alignment - 1); |
230d793d | 7981 | } |
b0d71df9 | 7982 | #endif |
230d793d | 7983 | |
55310dad RK |
7984 | /* If X is a register whose nonzero bits value is current, use it. |
7985 | Otherwise, if X is a register whose value we can find, use that | |
7986 | value. Otherwise, use the previously-computed global nonzero bits | |
7987 | for this register. */ | |
7988 | ||
7989 | if (reg_last_set_value[REGNO (x)] != 0 | |
7990 | && reg_last_set_mode[REGNO (x)] == mode | |
57cf50a4 GRK |
7991 | && (reg_last_set_label[REGNO (x)] == label_tick |
7992 | || (REGNO (x) >= FIRST_PSEUDO_REGISTER | |
7993 | && REG_N_SETS (REGNO (x)) == 1 | |
663522cb | 7994 | && ! REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, |
57cf50a4 | 7995 | REGNO (x)))) |
55310dad RK |
7996 | && INSN_CUID (reg_last_set[REGNO (x)]) < subst_low_cuid) |
7997 | return reg_last_set_nonzero_bits[REGNO (x)]; | |
230d793d RS |
7998 | |
7999 | tem = get_last_value (x); | |
9afa3d54 | 8000 | |
230d793d | 8001 | if (tem) |
9afa3d54 RK |
8002 | { |
8003 | #ifdef SHORT_IMMEDIATES_SIGN_EXTEND | |
8004 | /* If X is narrower than MODE and TEM is a non-negative | |
8005 | constant that would appear negative in the mode of X, | |
8006 | sign-extend it for use in reg_nonzero_bits because some | |
8007 | machines (maybe most) will actually do the sign-extension | |
663522cb | 8008 | and this is the conservative approach. |
9afa3d54 RK |
8009 | |
8010 | ??? For 2.5, try to tighten up the MD files in this regard | |
8011 | instead of this kludge. */ | |
8012 | ||
8013 | if (GET_MODE_BITSIZE (GET_MODE (x)) < mode_width | |
8014 | && GET_CODE (tem) == CONST_INT | |
8015 | && INTVAL (tem) > 0 | |
8016 | && 0 != (INTVAL (tem) | |
8017 | & ((HOST_WIDE_INT) 1 | |
9e69be8c | 8018 | << (GET_MODE_BITSIZE (GET_MODE (x)) - 1)))) |
9afa3d54 RK |
8019 | tem = GEN_INT (INTVAL (tem) |
8020 | | ((HOST_WIDE_INT) (-1) | |
8021 | << GET_MODE_BITSIZE (GET_MODE (x)))); | |
8022 | #endif | |
8023 | return nonzero_bits (tem, mode); | |
8024 | } | |
951553af RK |
8025 | else if (nonzero_sign_valid && reg_nonzero_bits[REGNO (x)]) |
8026 | return reg_nonzero_bits[REGNO (x)] & nonzero; | |
230d793d | 8027 | else |
951553af | 8028 | return nonzero; |
230d793d RS |
8029 | |
8030 | case CONST_INT: | |
9afa3d54 RK |
8031 | #ifdef SHORT_IMMEDIATES_SIGN_EXTEND |
8032 | /* If X is negative in MODE, sign-extend the value. */ | |
9e69be8c RK |
8033 | if (INTVAL (x) > 0 && mode_width < BITS_PER_WORD |
8034 | && 0 != (INTVAL (x) & ((HOST_WIDE_INT) 1 << (mode_width - 1)))) | |
8035 | return (INTVAL (x) | ((HOST_WIDE_INT) (-1) << mode_width)); | |
9afa3d54 RK |
8036 | #endif |
8037 | ||
230d793d RS |
8038 | return INTVAL (x); |
8039 | ||
230d793d | 8040 | case MEM: |
8baf60bb | 8041 | #ifdef LOAD_EXTEND_OP |
230d793d RS |
8042 | /* In many, if not most, RISC machines, reading a byte from memory |
8043 | zeros the rest of the register. Noticing that fact saves a lot | |
8044 | of extra zero-extends. */ | |
8baf60bb RK |
8045 | if (LOAD_EXTEND_OP (GET_MODE (x)) == ZERO_EXTEND) |
8046 | nonzero &= GET_MODE_MASK (GET_MODE (x)); | |
230d793d | 8047 | #endif |
8baf60bb | 8048 | break; |
230d793d | 8049 | |
230d793d RS |
8050 | case EQ: case NE: |
8051 | case GT: case GTU: | |
8052 | case LT: case LTU: | |
8053 | case GE: case GEU: | |
8054 | case LE: case LEU: | |
3f508eca | 8055 | |
c6965c0f RK |
8056 | /* If this produces an integer result, we know which bits are set. |
8057 | Code here used to clear bits outside the mode of X, but that is | |
8058 | now done above. */ | |
230d793d | 8059 | |
c6965c0f RK |
8060 | if (GET_MODE_CLASS (mode) == MODE_INT |
8061 | && mode_width <= HOST_BITS_PER_WIDE_INT) | |
8062 | nonzero = STORE_FLAG_VALUE; | |
230d793d | 8063 | break; |
230d793d | 8064 | |
230d793d | 8065 | case NEG: |
b3728b0e JW |
8066 | #if 0 |
8067 | /* Disabled to avoid exponential mutual recursion between nonzero_bits | |
8068 | and num_sign_bit_copies. */ | |
d0ab8cd3 RK |
8069 | if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x)) |
8070 | == GET_MODE_BITSIZE (GET_MODE (x))) | |
951553af | 8071 | nonzero = 1; |
b3728b0e | 8072 | #endif |
230d793d RS |
8073 | |
8074 | if (GET_MODE_SIZE (GET_MODE (x)) < mode_width) | |
663522cb | 8075 | nonzero |= (GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x))); |
230d793d | 8076 | break; |
d0ab8cd3 RK |
8077 | |
8078 | case ABS: | |
b3728b0e JW |
8079 | #if 0 |
8080 | /* Disabled to avoid exponential mutual recursion between nonzero_bits | |
8081 | and num_sign_bit_copies. */ | |
d0ab8cd3 RK |
8082 | if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x)) |
8083 | == GET_MODE_BITSIZE (GET_MODE (x))) | |
951553af | 8084 | nonzero = 1; |
b3728b0e | 8085 | #endif |
d0ab8cd3 | 8086 | break; |
230d793d RS |
8087 | |
8088 | case TRUNCATE: | |
951553af | 8089 | nonzero &= (nonzero_bits (XEXP (x, 0), mode) & GET_MODE_MASK (mode)); |
230d793d RS |
8090 | break; |
8091 | ||
8092 | case ZERO_EXTEND: | |
951553af | 8093 | nonzero &= nonzero_bits (XEXP (x, 0), mode); |
230d793d | 8094 | if (GET_MODE (XEXP (x, 0)) != VOIDmode) |
951553af | 8095 | nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0))); |
230d793d RS |
8096 | break; |
8097 | ||
8098 | case SIGN_EXTEND: | |
8099 | /* If the sign bit is known clear, this is the same as ZERO_EXTEND. | |
8100 | Otherwise, show all the bits in the outer mode but not the inner | |
8101 | may be non-zero. */ | |
951553af | 8102 | inner_nz = nonzero_bits (XEXP (x, 0), mode); |
230d793d RS |
8103 | if (GET_MODE (XEXP (x, 0)) != VOIDmode) |
8104 | { | |
951553af | 8105 | inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0))); |
e3da301d MS |
8106 | if (inner_nz |
8107 | & (((HOST_WIDE_INT) 1 | |
8108 | << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1)))) | |
951553af | 8109 | inner_nz |= (GET_MODE_MASK (mode) |
663522cb | 8110 | & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))); |
230d793d RS |
8111 | } |
8112 | ||
951553af | 8113 | nonzero &= inner_nz; |
230d793d RS |
8114 | break; |
8115 | ||
8116 | case AND: | |
951553af RK |
8117 | nonzero &= (nonzero_bits (XEXP (x, 0), mode) |
8118 | & nonzero_bits (XEXP (x, 1), mode)); | |
230d793d RS |
8119 | break; |
8120 | ||
d0ab8cd3 RK |
8121 | case XOR: case IOR: |
8122 | case UMIN: case UMAX: case SMIN: case SMAX: | |
951553af RK |
8123 | nonzero &= (nonzero_bits (XEXP (x, 0), mode) |
8124 | | nonzero_bits (XEXP (x, 1), mode)); | |
230d793d RS |
8125 | break; |
8126 | ||
8127 | case PLUS: case MINUS: | |
8128 | case MULT: | |
8129 | case DIV: case UDIV: | |
8130 | case MOD: case UMOD: | |
8131 | /* We can apply the rules of arithmetic to compute the number of | |
8132 | high- and low-order zero bits of these operations. We start by | |
8133 | computing the width (position of the highest-order non-zero bit) | |
8134 | and the number of low-order zero bits for each value. */ | |
8135 | { | |
951553af RK |
8136 | unsigned HOST_WIDE_INT nz0 = nonzero_bits (XEXP (x, 0), mode); |
8137 | unsigned HOST_WIDE_INT nz1 = nonzero_bits (XEXP (x, 1), mode); | |
8138 | int width0 = floor_log2 (nz0) + 1; | |
8139 | int width1 = floor_log2 (nz1) + 1; | |
8140 | int low0 = floor_log2 (nz0 & -nz0); | |
8141 | int low1 = floor_log2 (nz1 & -nz1); | |
318b149c RK |
8142 | HOST_WIDE_INT op0_maybe_minusp |
8143 | = (nz0 & ((HOST_WIDE_INT) 1 << (mode_width - 1))); | |
8144 | HOST_WIDE_INT op1_maybe_minusp | |
8145 | = (nz1 & ((HOST_WIDE_INT) 1 << (mode_width - 1))); | |
770ae6cc | 8146 | unsigned int result_width = mode_width; |
230d793d RS |
8147 | int result_low = 0; |
8148 | ||
8149 | switch (code) | |
8150 | { | |
8151 | case PLUS: | |
0e9ff885 DM |
8152 | #ifdef STACK_BIAS |
8153 | if (STACK_BIAS | |
663522cb KH |
8154 | && (XEXP (x, 0) == stack_pointer_rtx |
8155 | || XEXP (x, 0) == frame_pointer_rtx) | |
8156 | && GET_CODE (XEXP (x, 1)) == CONST_INT) | |
0e9ff885 DM |
8157 | { |
8158 | int sp_alignment = STACK_BOUNDARY / BITS_PER_UNIT; | |
8159 | ||
663522cb KH |
8160 | nz0 = (GET_MODE_MASK (mode) & ~(sp_alignment - 1)); |
8161 | nz1 = INTVAL (XEXP (x, 1)) - STACK_BIAS; | |
8162 | width0 = floor_log2 (nz0) + 1; | |
8163 | width1 = floor_log2 (nz1) + 1; | |
8164 | low0 = floor_log2 (nz0 & -nz0); | |
8165 | low1 = floor_log2 (nz1 & -nz1); | |
0e9ff885 | 8166 | } |
663522cb | 8167 | #endif |
230d793d RS |
8168 | result_width = MAX (width0, width1) + 1; |
8169 | result_low = MIN (low0, low1); | |
8170 | break; | |
8171 | case MINUS: | |
8172 | result_low = MIN (low0, low1); | |
8173 | break; | |
8174 | case MULT: | |
8175 | result_width = width0 + width1; | |
8176 | result_low = low0 + low1; | |
8177 | break; | |
8178 | case DIV: | |
8179 | if (! op0_maybe_minusp && ! op1_maybe_minusp) | |
8180 | result_width = width0; | |
8181 | break; | |
8182 | case UDIV: | |
8183 | result_width = width0; | |
8184 | break; | |
8185 | case MOD: | |
8186 | if (! op0_maybe_minusp && ! op1_maybe_minusp) | |
8187 | result_width = MIN (width0, width1); | |
8188 | result_low = MIN (low0, low1); | |
8189 | break; | |
8190 | case UMOD: | |
8191 | result_width = MIN (width0, width1); | |
8192 | result_low = MIN (low0, low1); | |
8193 | break; | |
e9a25f70 JL |
8194 | default: |
8195 | abort (); | |
230d793d RS |
8196 | } |
8197 | ||
8198 | if (result_width < mode_width) | |
951553af | 8199 | nonzero &= ((HOST_WIDE_INT) 1 << result_width) - 1; |
230d793d RS |
8200 | |
8201 | if (result_low > 0) | |
663522cb | 8202 | nonzero &= ~(((HOST_WIDE_INT) 1 << result_low) - 1); |
230d793d RS |
8203 | } |
8204 | break; | |
8205 | ||
8206 | case ZERO_EXTRACT: | |
8207 | if (GET_CODE (XEXP (x, 1)) == CONST_INT | |
5f4f0e22 | 8208 | && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT) |
951553af | 8209 | nonzero &= ((HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1; |
230d793d RS |
8210 | break; |
8211 | ||
8212 | case SUBREG: | |
c3c2cb37 RK |
8213 | /* If this is a SUBREG formed for a promoted variable that has |
8214 | been zero-extended, we know that at least the high-order bits | |
8215 | are zero, though others might be too. */ | |
8216 | ||
8217 | if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x)) | |
951553af RK |
8218 | nonzero = (GET_MODE_MASK (GET_MODE (x)) |
8219 | & nonzero_bits (SUBREG_REG (x), GET_MODE (x))); | |
c3c2cb37 | 8220 | |
230d793d RS |
8221 | /* If the inner mode is a single word for both the host and target |
8222 | machines, we can compute this from which bits of the inner | |
951553af | 8223 | object might be nonzero. */ |
230d793d | 8224 | if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) <= BITS_PER_WORD |
5f4f0e22 CH |
8225 | && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) |
8226 | <= HOST_BITS_PER_WIDE_INT)) | |
230d793d | 8227 | { |
951553af | 8228 | nonzero &= nonzero_bits (SUBREG_REG (x), mode); |
8baf60bb | 8229 | |
b52ce03d R |
8230 | #if defined (WORD_REGISTER_OPERATIONS) && defined (LOAD_EXTEND_OP) |
8231 | /* If this is a typical RISC machine, we only have to worry | |
8232 | about the way loads are extended. */ | |
8233 | if (LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND | |
729a2125 RK |
8234 | ? (((nonzero |
8235 | & (((unsigned HOST_WIDE_INT) 1 | |
8236 | << (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) - 1)))) | |
8237 | != 0)) | |
b52ce03d | 8238 | : LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) != ZERO_EXTEND) |
230d793d | 8239 | #endif |
b52ce03d R |
8240 | { |
8241 | /* On many CISC machines, accessing an object in a wider mode | |
8242 | causes the high-order bits to become undefined. So they are | |
8243 | not known to be zero. */ | |
8244 | if (GET_MODE_SIZE (GET_MODE (x)) | |
8245 | > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))) | |
8246 | nonzero |= (GET_MODE_MASK (GET_MODE (x)) | |
663522cb | 8247 | & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x)))); |
b52ce03d | 8248 | } |
230d793d RS |
8249 | } |
8250 | break; | |
8251 | ||
8252 | case ASHIFTRT: | |
8253 | case LSHIFTRT: | |
8254 | case ASHIFT: | |
230d793d | 8255 | case ROTATE: |
951553af | 8256 | /* The nonzero bits are in two classes: any bits within MODE |
230d793d | 8257 | that aren't in GET_MODE (x) are always significant. The rest of the |
951553af | 8258 | nonzero bits are those that are significant in the operand of |
230d793d RS |
8259 | the shift when shifted the appropriate number of bits. This |
8260 | shows that high-order bits are cleared by the right shift and | |
8261 | low-order bits by left shifts. */ | |
8262 | if (GET_CODE (XEXP (x, 1)) == CONST_INT | |
8263 | && INTVAL (XEXP (x, 1)) >= 0 | |
5f4f0e22 | 8264 | && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT) |
230d793d RS |
8265 | { |
8266 | enum machine_mode inner_mode = GET_MODE (x); | |
770ae6cc | 8267 | unsigned int width = GET_MODE_BITSIZE (inner_mode); |
230d793d | 8268 | int count = INTVAL (XEXP (x, 1)); |
5f4f0e22 | 8269 | unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode); |
951553af RK |
8270 | unsigned HOST_WIDE_INT op_nonzero = nonzero_bits (XEXP (x, 0), mode); |
8271 | unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask; | |
5f4f0e22 | 8272 | unsigned HOST_WIDE_INT outer = 0; |
230d793d RS |
8273 | |
8274 | if (mode_width > width) | |
663522cb | 8275 | outer = (op_nonzero & nonzero & ~mode_mask); |
230d793d RS |
8276 | |
8277 | if (code == LSHIFTRT) | |
8278 | inner >>= count; | |
8279 | else if (code == ASHIFTRT) | |
8280 | { | |
8281 | inner >>= count; | |
8282 | ||
951553af | 8283 | /* If the sign bit may have been nonzero before the shift, we |
230d793d | 8284 | need to mark all the places it could have been copied to |
951553af | 8285 | by the shift as possibly nonzero. */ |
5f4f0e22 CH |
8286 | if (inner & ((HOST_WIDE_INT) 1 << (width - 1 - count))) |
8287 | inner |= (((HOST_WIDE_INT) 1 << count) - 1) << (width - count); | |
230d793d | 8288 | } |
45620ed4 | 8289 | else if (code == ASHIFT) |
230d793d RS |
8290 | inner <<= count; |
8291 | else | |
8292 | inner = ((inner << (count % width) | |
8293 | | (inner >> (width - (count % width)))) & mode_mask); | |
8294 | ||
951553af | 8295 | nonzero &= (outer | inner); |
230d793d RS |
8296 | } |
8297 | break; | |
8298 | ||
8299 | case FFS: | |
8300 | /* This is at most the number of bits in the mode. */ | |
951553af | 8301 | nonzero = ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width) + 1)) - 1; |
230d793d | 8302 | break; |
d0ab8cd3 RK |
8303 | |
8304 | case IF_THEN_ELSE: | |
951553af RK |
8305 | nonzero &= (nonzero_bits (XEXP (x, 1), mode) |
8306 | | nonzero_bits (XEXP (x, 2), mode)); | |
d0ab8cd3 | 8307 | break; |
663522cb | 8308 | |
e9a25f70 JL |
8309 | default: |
8310 | break; | |
230d793d RS |
8311 | } |
8312 | ||
951553af | 8313 | return nonzero; |
230d793d | 8314 | } |
b3728b0e JW |
8315 | |
8316 | /* See the macro definition above. */ | |
8317 | #undef num_sign_bit_copies | |
230d793d | 8318 | \f |
d0ab8cd3 | 8319 | /* Return the number of bits at the high-order end of X that are known to |
5109d49f RK |
8320 | be equal to the sign bit. X will be used in mode MODE; if MODE is |
8321 | VOIDmode, X will be used in its own mode. The returned value will always | |
8322 | be between 1 and the number of bits in MODE. */ | |
d0ab8cd3 | 8323 | |
770ae6cc | 8324 | static unsigned int |
d0ab8cd3 RK |
8325 | num_sign_bit_copies (x, mode) |
8326 | rtx x; | |
8327 | enum machine_mode mode; | |
8328 | { | |
8329 | enum rtx_code code = GET_CODE (x); | |
770ae6cc | 8330 | unsigned int bitwidth; |
d0ab8cd3 | 8331 | int num0, num1, result; |
951553af | 8332 | unsigned HOST_WIDE_INT nonzero; |
d0ab8cd3 RK |
8333 | rtx tem; |
8334 | ||
8335 | /* If we weren't given a mode, use the mode of X. If the mode is still | |
1c75dfa4 RK |
8336 | VOIDmode, we don't know anything. Likewise if one of the modes is |
8337 | floating-point. */ | |
d0ab8cd3 RK |
8338 | |
8339 | if (mode == VOIDmode) | |
8340 | mode = GET_MODE (x); | |
8341 | ||
1c75dfa4 | 8342 | if (mode == VOIDmode || FLOAT_MODE_P (mode) || FLOAT_MODE_P (GET_MODE (x))) |
6752e8d2 | 8343 | return 1; |
d0ab8cd3 RK |
8344 | |
8345 | bitwidth = GET_MODE_BITSIZE (mode); | |
8346 | ||
0f41302f | 8347 | /* For a smaller object, just ignore the high bits. */ |
312def2e | 8348 | if (bitwidth < GET_MODE_BITSIZE (GET_MODE (x))) |
770ae6cc RK |
8349 | { |
8350 | num0 = num_sign_bit_copies (x, GET_MODE (x)); | |
8351 | return MAX (1, | |
8352 | num0 - (int) (GET_MODE_BITSIZE (GET_MODE (x)) - bitwidth)); | |
8353 | } | |
663522cb | 8354 | |
e9a25f70 JL |
8355 | if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_BITSIZE (GET_MODE (x))) |
8356 | { | |
0c314d1a RK |
8357 | #ifndef WORD_REGISTER_OPERATIONS |
8358 | /* If this machine does not do all register operations on the entire | |
8359 | register and MODE is wider than the mode of X, we can say nothing | |
8360 | at all about the high-order bits. */ | |
e9a25f70 JL |
8361 | return 1; |
8362 | #else | |
8363 | /* Likewise on machines that do, if the mode of the object is smaller | |
8364 | than a word and loads of that size don't sign extend, we can say | |
8365 | nothing about the high order bits. */ | |
8366 | if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD | |
8367 | #ifdef LOAD_EXTEND_OP | |
8368 | && LOAD_EXTEND_OP (GET_MODE (x)) != SIGN_EXTEND | |
8369 | #endif | |
8370 | ) | |
8371 | return 1; | |
0c314d1a | 8372 | #endif |
e9a25f70 | 8373 | } |
0c314d1a | 8374 | |
d0ab8cd3 RK |
8375 | switch (code) |
8376 | { | |
8377 | case REG: | |
55310dad | 8378 | |
ff0dbdd1 RK |
8379 | #ifdef POINTERS_EXTEND_UNSIGNED |
8380 | /* If pointers extend signed and this is a pointer in Pmode, say that | |
8381 | all the bits above ptr_mode are known to be sign bit copies. */ | |
8382 | if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode && mode == Pmode | |
8383 | && REGNO_POINTER_FLAG (REGNO (x))) | |
8384 | return GET_MODE_BITSIZE (Pmode) - GET_MODE_BITSIZE (ptr_mode) + 1; | |
8385 | #endif | |
8386 | ||
55310dad RK |
8387 | if (reg_last_set_value[REGNO (x)] != 0 |
8388 | && reg_last_set_mode[REGNO (x)] == mode | |
57cf50a4 GRK |
8389 | && (reg_last_set_label[REGNO (x)] == label_tick |
8390 | || (REGNO (x) >= FIRST_PSEUDO_REGISTER | |
8391 | && REG_N_SETS (REGNO (x)) == 1 | |
8392 | && ! REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, | |
8393 | REGNO (x)))) | |
55310dad RK |
8394 | && INSN_CUID (reg_last_set[REGNO (x)]) < subst_low_cuid) |
8395 | return reg_last_set_sign_bit_copies[REGNO (x)]; | |
d0ab8cd3 | 8396 | |
663522cb | 8397 | tem = get_last_value (x); |
d0ab8cd3 RK |
8398 | if (tem != 0) |
8399 | return num_sign_bit_copies (tem, mode); | |
55310dad RK |
8400 | |
8401 | if (nonzero_sign_valid && reg_sign_bit_copies[REGNO (x)] != 0) | |
8402 | return reg_sign_bit_copies[REGNO (x)]; | |
d0ab8cd3 RK |
8403 | break; |
8404 | ||
457816e2 | 8405 | case MEM: |
8baf60bb | 8406 | #ifdef LOAD_EXTEND_OP |
457816e2 | 8407 | /* Some RISC machines sign-extend all loads of smaller than a word. */ |
8baf60bb | 8408 | if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND) |
770ae6cc RK |
8409 | return MAX (1, ((int) bitwidth |
8410 | - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1)); | |
457816e2 | 8411 | #endif |
8baf60bb | 8412 | break; |
457816e2 | 8413 | |
d0ab8cd3 RK |
8414 | case CONST_INT: |
8415 | /* If the constant is negative, take its 1's complement and remask. | |
8416 | Then see how many zero bits we have. */ | |
951553af | 8417 | nonzero = INTVAL (x) & GET_MODE_MASK (mode); |
ac49a949 | 8418 | if (bitwidth <= HOST_BITS_PER_WIDE_INT |
951553af | 8419 | && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0) |
663522cb | 8420 | nonzero = (~nonzero) & GET_MODE_MASK (mode); |
d0ab8cd3 | 8421 | |
951553af | 8422 | return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1); |
d0ab8cd3 RK |
8423 | |
8424 | case SUBREG: | |
c3c2cb37 RK |
8425 | /* If this is a SUBREG for a promoted object that is sign-extended |
8426 | and we are looking at it in a wider mode, we know that at least the | |
8427 | high-order bits are known to be sign bit copies. */ | |
8428 | ||
8429 | if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x)) | |
770ae6cc RK |
8430 | { |
8431 | num0 = num_sign_bit_copies (SUBREG_REG (x), mode); | |
8432 | return MAX ((int) bitwidth | |
8433 | - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1, | |
8434 | num0); | |
8435 | } | |
663522cb | 8436 | |
0f41302f | 8437 | /* For a smaller object, just ignore the high bits. */ |
d0ab8cd3 RK |
8438 | if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))) |
8439 | { | |
8440 | num0 = num_sign_bit_copies (SUBREG_REG (x), VOIDmode); | |
8441 | return MAX (1, (num0 | |
770ae6cc RK |
8442 | - (int) (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) |
8443 | - bitwidth))); | |
d0ab8cd3 | 8444 | } |
457816e2 | 8445 | |
8baf60bb | 8446 | #ifdef WORD_REGISTER_OPERATIONS |
2aec5b7a | 8447 | #ifdef LOAD_EXTEND_OP |
8baf60bb RK |
8448 | /* For paradoxical SUBREGs on machines where all register operations |
8449 | affect the entire register, just look inside. Note that we are | |
8450 | passing MODE to the recursive call, so the number of sign bit copies | |
8451 | will remain relative to that mode, not the inner mode. */ | |
457816e2 | 8452 | |
2aec5b7a JW |
8453 | /* This works only if loads sign extend. Otherwise, if we get a |
8454 | reload for the inner part, it may be loaded from the stack, and | |
8455 | then we lose all sign bit copies that existed before the store | |
8456 | to the stack. */ | |
8457 | ||
8458 | if ((GET_MODE_SIZE (GET_MODE (x)) | |
8459 | > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))) | |
8460 | && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND) | |
457816e2 | 8461 | return num_sign_bit_copies (SUBREG_REG (x), mode); |
2aec5b7a | 8462 | #endif |
457816e2 | 8463 | #endif |
d0ab8cd3 RK |
8464 | break; |
8465 | ||
8466 | case SIGN_EXTRACT: | |
8467 | if (GET_CODE (XEXP (x, 1)) == CONST_INT) | |
770ae6cc | 8468 | return MAX (1, (int) bitwidth - INTVAL (XEXP (x, 1))); |
d0ab8cd3 RK |
8469 | break; |
8470 | ||
663522cb | 8471 | case SIGN_EXTEND: |
d0ab8cd3 RK |
8472 | return (bitwidth - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) |
8473 | + num_sign_bit_copies (XEXP (x, 0), VOIDmode)); | |
8474 | ||
8475 | case TRUNCATE: | |
0f41302f | 8476 | /* For a smaller object, just ignore the high bits. */ |
d0ab8cd3 | 8477 | num0 = num_sign_bit_copies (XEXP (x, 0), VOIDmode); |
770ae6cc RK |
8478 | return MAX (1, (num0 - (int) (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) |
8479 | - bitwidth))); | |
d0ab8cd3 RK |
8480 | |
8481 | case NOT: | |
8482 | return num_sign_bit_copies (XEXP (x, 0), mode); | |
8483 | ||
8484 | case ROTATE: case ROTATERT: | |
8485 | /* If we are rotating left by a number of bits less than the number | |
8486 | of sign bit copies, we can just subtract that amount from the | |
8487 | number. */ | |
8488 | if (GET_CODE (XEXP (x, 1)) == CONST_INT | |
8489 | && INTVAL (XEXP (x, 1)) >= 0 && INTVAL (XEXP (x, 1)) < bitwidth) | |
8490 | { | |
8491 | num0 = num_sign_bit_copies (XEXP (x, 0), mode); | |
8492 | return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1)) | |
770ae6cc | 8493 | : (int) bitwidth - INTVAL (XEXP (x, 1)))); |
d0ab8cd3 RK |
8494 | } |
8495 | break; | |
8496 | ||
8497 | case NEG: | |
8498 | /* In general, this subtracts one sign bit copy. But if the value | |
8499 | is known to be positive, the number of sign bit copies is the | |
951553af RK |
8500 | same as that of the input. Finally, if the input has just one bit |
8501 | that might be nonzero, all the bits are copies of the sign bit. */ | |
70186b34 BS |
8502 | num0 = num_sign_bit_copies (XEXP (x, 0), mode); |
8503 | if (bitwidth > HOST_BITS_PER_WIDE_INT) | |
8504 | return num0 > 1 ? num0 - 1 : 1; | |
8505 | ||
951553af RK |
8506 | nonzero = nonzero_bits (XEXP (x, 0), mode); |
8507 | if (nonzero == 1) | |
d0ab8cd3 RK |
8508 | return bitwidth; |
8509 | ||
d0ab8cd3 | 8510 | if (num0 > 1 |
951553af | 8511 | && (((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero)) |
d0ab8cd3 RK |
8512 | num0--; |
8513 | ||
8514 | return num0; | |
8515 | ||
8516 | case IOR: case AND: case XOR: | |
8517 | case SMIN: case SMAX: case UMIN: case UMAX: | |
8518 | /* Logical operations will preserve the number of sign-bit copies. | |
8519 | MIN and MAX operations always return one of the operands. */ | |
8520 | num0 = num_sign_bit_copies (XEXP (x, 0), mode); | |
8521 | num1 = num_sign_bit_copies (XEXP (x, 1), mode); | |
8522 | return MIN (num0, num1); | |
8523 | ||
8524 | case PLUS: case MINUS: | |
8525 | /* For addition and subtraction, we can have a 1-bit carry. However, | |
8526 | if we are subtracting 1 from a positive number, there will not | |
8527 | be such a carry. Furthermore, if the positive number is known to | |
8528 | be 0 or 1, we know the result is either -1 or 0. */ | |
8529 | ||
3e3ea975 | 8530 | if (code == PLUS && XEXP (x, 1) == constm1_rtx |
9295e6af | 8531 | && bitwidth <= HOST_BITS_PER_WIDE_INT) |
d0ab8cd3 | 8532 | { |
951553af RK |
8533 | nonzero = nonzero_bits (XEXP (x, 0), mode); |
8534 | if ((((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0) | |
8535 | return (nonzero == 1 || nonzero == 0 ? bitwidth | |
8536 | : bitwidth - floor_log2 (nonzero) - 1); | |
d0ab8cd3 RK |
8537 | } |
8538 | ||
8539 | num0 = num_sign_bit_copies (XEXP (x, 0), mode); | |
8540 | num1 = num_sign_bit_copies (XEXP (x, 1), mode); | |
8541 | return MAX (1, MIN (num0, num1) - 1); | |
663522cb | 8542 | |
d0ab8cd3 RK |
8543 | case MULT: |
8544 | /* The number of bits of the product is the sum of the number of | |
8545 | bits of both terms. However, unless one of the terms if known | |
8546 | to be positive, we must allow for an additional bit since negating | |
8547 | a negative number can remove one sign bit copy. */ | |
8548 | ||
8549 | num0 = num_sign_bit_copies (XEXP (x, 0), mode); | |
8550 | num1 = num_sign_bit_copies (XEXP (x, 1), mode); | |
8551 | ||
8552 | result = bitwidth - (bitwidth - num0) - (bitwidth - num1); | |
8553 | if (result > 0 | |
70186b34 BS |
8554 | && (bitwidth > HOST_BITS_PER_WIDE_INT |
8555 | || (((nonzero_bits (XEXP (x, 0), mode) | |
8556 | & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0) | |
8557 | && ((nonzero_bits (XEXP (x, 1), mode) | |
8558 | & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)))) | |
d0ab8cd3 RK |
8559 | result--; |
8560 | ||
8561 | return MAX (1, result); | |
8562 | ||
8563 | case UDIV: | |
70186b34 BS |
8564 | /* The result must be <= the first operand. If the first operand |
8565 | has the high bit set, we know nothing about the number of sign | |
8566 | bit copies. */ | |
8567 | if (bitwidth > HOST_BITS_PER_WIDE_INT) | |
8568 | return 1; | |
8569 | else if ((nonzero_bits (XEXP (x, 0), mode) | |
8570 | & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0) | |
8571 | return 1; | |
8572 | else | |
8573 | return num_sign_bit_copies (XEXP (x, 0), mode); | |
663522cb | 8574 | |
d0ab8cd3 RK |
8575 | case UMOD: |
8576 | /* The result must be <= the scond operand. */ | |
8577 | return num_sign_bit_copies (XEXP (x, 1), mode); | |
8578 | ||
8579 | case DIV: | |
8580 | /* Similar to unsigned division, except that we have to worry about | |
8581 | the case where the divisor is negative, in which case we have | |
8582 | to add 1. */ | |
8583 | result = num_sign_bit_copies (XEXP (x, 0), mode); | |
8584 | if (result > 1 | |
70186b34 BS |
8585 | && (bitwidth > HOST_BITS_PER_WIDE_INT |
8586 | || (nonzero_bits (XEXP (x, 1), mode) | |
8587 | & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)) | |
8588 | result--; | |
d0ab8cd3 RK |
8589 | |
8590 | return result; | |
8591 | ||
8592 | case MOD: | |
8593 | result = num_sign_bit_copies (XEXP (x, 1), mode); | |
8594 | if (result > 1 | |
70186b34 BS |
8595 | && (bitwidth > HOST_BITS_PER_WIDE_INT |
8596 | || (nonzero_bits (XEXP (x, 1), mode) | |
8597 | & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)) | |
8598 | result--; | |
d0ab8cd3 RK |
8599 | |
8600 | return result; | |
8601 | ||
8602 | case ASHIFTRT: | |
8603 | /* Shifts by a constant add to the number of bits equal to the | |
8604 | sign bit. */ | |
8605 | num0 = num_sign_bit_copies (XEXP (x, 0), mode); | |
8606 | if (GET_CODE (XEXP (x, 1)) == CONST_INT | |
8607 | && INTVAL (XEXP (x, 1)) > 0) | |
8608 | num0 = MIN (bitwidth, num0 + INTVAL (XEXP (x, 1))); | |
8609 | ||
8610 | return num0; | |
8611 | ||
8612 | case ASHIFT: | |
d0ab8cd3 RK |
8613 | /* Left shifts destroy copies. */ |
8614 | if (GET_CODE (XEXP (x, 1)) != CONST_INT | |
8615 | || INTVAL (XEXP (x, 1)) < 0 | |
8616 | || INTVAL (XEXP (x, 1)) >= bitwidth) | |
8617 | return 1; | |
8618 | ||
8619 | num0 = num_sign_bit_copies (XEXP (x, 0), mode); | |
8620 | return MAX (1, num0 - INTVAL (XEXP (x, 1))); | |
8621 | ||
8622 | case IF_THEN_ELSE: | |
8623 | num0 = num_sign_bit_copies (XEXP (x, 1), mode); | |
8624 | num1 = num_sign_bit_copies (XEXP (x, 2), mode); | |
8625 | return MIN (num0, num1); | |
8626 | ||
d0ab8cd3 RK |
8627 | case EQ: case NE: case GE: case GT: case LE: case LT: |
8628 | case GEU: case GTU: case LEU: case LTU: | |
0802d516 RK |
8629 | if (STORE_FLAG_VALUE == -1) |
8630 | return bitwidth; | |
e9a25f70 | 8631 | break; |
663522cb | 8632 | |
e9a25f70 JL |
8633 | default: |
8634 | break; | |
d0ab8cd3 RK |
8635 | } |
8636 | ||
8637 | /* If we haven't been able to figure it out by one of the above rules, | |
8638 | see if some of the high-order bits are known to be zero. If so, | |
ac49a949 RS |
8639 | count those bits and return one less than that amount. If we can't |
8640 | safely compute the mask for this mode, always return BITWIDTH. */ | |
8641 | ||
8642 | if (bitwidth > HOST_BITS_PER_WIDE_INT) | |
6752e8d2 | 8643 | return 1; |
d0ab8cd3 | 8644 | |
951553af | 8645 | nonzero = nonzero_bits (x, mode); |
df6f4086 | 8646 | return (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1)) |
951553af | 8647 | ? 1 : bitwidth - floor_log2 (nonzero) - 1); |
d0ab8cd3 RK |
8648 | } |
8649 | \f | |
1a26b032 RK |
8650 | /* Return the number of "extended" bits there are in X, when interpreted |
8651 | as a quantity in MODE whose signedness is indicated by UNSIGNEDP. For | |
8652 | unsigned quantities, this is the number of high-order zero bits. | |
8653 | For signed quantities, this is the number of copies of the sign bit | |
8654 | minus 1. In both case, this function returns the number of "spare" | |
8655 | bits. For example, if two quantities for which this function returns | |
8656 | at least 1 are added, the addition is known not to overflow. | |
8657 | ||
8658 | This function will always return 0 unless called during combine, which | |
8659 | implies that it must be called from a define_split. */ | |
8660 | ||
770ae6cc | 8661 | unsigned int |
1a26b032 RK |
8662 | extended_count (x, mode, unsignedp) |
8663 | rtx x; | |
8664 | enum machine_mode mode; | |
8665 | int unsignedp; | |
8666 | { | |
951553af | 8667 | if (nonzero_sign_valid == 0) |
1a26b032 RK |
8668 | return 0; |
8669 | ||
8670 | return (unsignedp | |
ac49a949 | 8671 | ? (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT |
770ae6cc RK |
8672 | ? (GET_MODE_BITSIZE (mode) - 1 |
8673 | - floor_log2 (nonzero_bits (x, mode))) | |
8674 | : 0) | |
1a26b032 RK |
8675 | : num_sign_bit_copies (x, mode) - 1); |
8676 | } | |
8677 | \f | |
230d793d RS |
8678 | /* This function is called from `simplify_shift_const' to merge two |
8679 | outer operations. Specifically, we have already found that we need | |
8680 | to perform operation *POP0 with constant *PCONST0 at the outermost | |
8681 | position. We would now like to also perform OP1 with constant CONST1 | |
8682 | (with *POP0 being done last). | |
8683 | ||
8684 | Return 1 if we can do the operation and update *POP0 and *PCONST0 with | |
663522cb | 8685 | the resulting operation. *PCOMP_P is set to 1 if we would need to |
230d793d RS |
8686 | complement the innermost operand, otherwise it is unchanged. |
8687 | ||
8688 | MODE is the mode in which the operation will be done. No bits outside | |
8689 | the width of this mode matter. It is assumed that the width of this mode | |
5f4f0e22 | 8690 | is smaller than or equal to HOST_BITS_PER_WIDE_INT. |
230d793d RS |
8691 | |
8692 | If *POP0 or OP1 are NIL, it means no operation is required. Only NEG, PLUS, | |
8693 | IOR, XOR, and AND are supported. We may set *POP0 to SET if the proper | |
8694 | result is simply *PCONST0. | |
8695 | ||
8696 | If the resulting operation cannot be expressed as one operation, we | |
8697 | return 0 and do not change *POP0, *PCONST0, and *PCOMP_P. */ | |
8698 | ||
8699 | static int | |
8700 | merge_outer_ops (pop0, pconst0, op1, const1, mode, pcomp_p) | |
8701 | enum rtx_code *pop0; | |
5f4f0e22 | 8702 | HOST_WIDE_INT *pconst0; |
230d793d | 8703 | enum rtx_code op1; |
5f4f0e22 | 8704 | HOST_WIDE_INT const1; |
230d793d RS |
8705 | enum machine_mode mode; |
8706 | int *pcomp_p; | |
8707 | { | |
8708 | enum rtx_code op0 = *pop0; | |
5f4f0e22 | 8709 | HOST_WIDE_INT const0 = *pconst0; |
230d793d RS |
8710 | |
8711 | const0 &= GET_MODE_MASK (mode); | |
8712 | const1 &= GET_MODE_MASK (mode); | |
8713 | ||
8714 | /* If OP0 is an AND, clear unimportant bits in CONST1. */ | |
8715 | if (op0 == AND) | |
8716 | const1 &= const0; | |
8717 | ||
8718 | /* If OP0 or OP1 is NIL, this is easy. Similarly if they are the same or | |
8719 | if OP0 is SET. */ | |
8720 | ||
8721 | if (op1 == NIL || op0 == SET) | |
8722 | return 1; | |
8723 | ||
8724 | else if (op0 == NIL) | |
8725 | op0 = op1, const0 = const1; | |
8726 | ||
8727 | else if (op0 == op1) | |
8728 | { | |
8729 | switch (op0) | |
8730 | { | |
8731 | case AND: | |
8732 | const0 &= const1; | |
8733 | break; | |
8734 | case IOR: | |
8735 | const0 |= const1; | |
8736 | break; | |
8737 | case XOR: | |
8738 | const0 ^= const1; | |
8739 | break; | |
8740 | case PLUS: | |
8741 | const0 += const1; | |
8742 | break; | |
8743 | case NEG: | |
8744 | op0 = NIL; | |
8745 | break; | |
e9a25f70 JL |
8746 | default: |
8747 | break; | |
230d793d RS |
8748 | } |
8749 | } | |
8750 | ||
8751 | /* Otherwise, if either is a PLUS or NEG, we can't do anything. */ | |
8752 | else if (op0 == PLUS || op1 == PLUS || op0 == NEG || op1 == NEG) | |
8753 | return 0; | |
8754 | ||
8755 | /* If the two constants aren't the same, we can't do anything. The | |
8756 | remaining six cases can all be done. */ | |
8757 | else if (const0 != const1) | |
8758 | return 0; | |
8759 | ||
8760 | else | |
8761 | switch (op0) | |
8762 | { | |
8763 | case IOR: | |
8764 | if (op1 == AND) | |
8765 | /* (a & b) | b == b */ | |
8766 | op0 = SET; | |
8767 | else /* op1 == XOR */ | |
8768 | /* (a ^ b) | b == a | b */ | |
b729186a | 8769 | {;} |
230d793d RS |
8770 | break; |
8771 | ||
8772 | case XOR: | |
8773 | if (op1 == AND) | |
8774 | /* (a & b) ^ b == (~a) & b */ | |
8775 | op0 = AND, *pcomp_p = 1; | |
8776 | else /* op1 == IOR */ | |
8777 | /* (a | b) ^ b == a & ~b */ | |
663522cb | 8778 | op0 = AND, *pconst0 = ~const0; |
230d793d RS |
8779 | break; |
8780 | ||
8781 | case AND: | |
8782 | if (op1 == IOR) | |
8783 | /* (a | b) & b == b */ | |
8784 | op0 = SET; | |
8785 | else /* op1 == XOR */ | |
8786 | /* (a ^ b) & b) == (~a) & b */ | |
8787 | *pcomp_p = 1; | |
8788 | break; | |
e9a25f70 JL |
8789 | default: |
8790 | break; | |
230d793d RS |
8791 | } |
8792 | ||
8793 | /* Check for NO-OP cases. */ | |
8794 | const0 &= GET_MODE_MASK (mode); | |
8795 | if (const0 == 0 | |
8796 | && (op0 == IOR || op0 == XOR || op0 == PLUS)) | |
8797 | op0 = NIL; | |
8798 | else if (const0 == 0 && op0 == AND) | |
8799 | op0 = SET; | |
e51712db KG |
8800 | else if ((unsigned HOST_WIDE_INT) const0 == GET_MODE_MASK (mode) |
8801 | && op0 == AND) | |
230d793d RS |
8802 | op0 = NIL; |
8803 | ||
7e4ce834 RH |
8804 | /* ??? Slightly redundant with the above mask, but not entirely. |
8805 | Moving this above means we'd have to sign-extend the mode mask | |
8806 | for the final test. */ | |
8807 | const0 = trunc_int_for_mode (const0, mode); | |
9fa6d012 | 8808 | |
230d793d RS |
8809 | *pop0 = op0; |
8810 | *pconst0 = const0; | |
8811 | ||
8812 | return 1; | |
8813 | } | |
8814 | \f | |
8815 | /* Simplify a shift of VAROP by COUNT bits. CODE says what kind of shift. | |
8816 | The result of the shift is RESULT_MODE. X, if non-zero, is an expression | |
8817 | that we started with. | |
8818 | ||
8819 | The shift is normally computed in the widest mode we find in VAROP, as | |
8820 | long as it isn't a different number of words than RESULT_MODE. Exceptions | |
8821 | are ASHIFTRT and ROTATE, which are always done in their original mode, */ | |
8822 | ||
8823 | static rtx | |
770ae6cc | 8824 | simplify_shift_const (x, code, result_mode, varop, input_count) |
230d793d RS |
8825 | rtx x; |
8826 | enum rtx_code code; | |
8827 | enum machine_mode result_mode; | |
8828 | rtx varop; | |
770ae6cc | 8829 | int input_count; |
230d793d RS |
8830 | { |
8831 | enum rtx_code orig_code = code; | |
770ae6cc RK |
8832 | int orig_count = input_count; |
8833 | unsigned int count; | |
8834 | int signed_count; | |
230d793d RS |
8835 | enum machine_mode mode = result_mode; |
8836 | enum machine_mode shift_mode, tmode; | |
770ae6cc | 8837 | unsigned int mode_words |
230d793d RS |
8838 | = (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD; |
8839 | /* We form (outer_op (code varop count) (outer_const)). */ | |
8840 | enum rtx_code outer_op = NIL; | |
c4e861e8 | 8841 | HOST_WIDE_INT outer_const = 0; |
230d793d RS |
8842 | rtx const_rtx; |
8843 | int complement_p = 0; | |
8844 | rtx new; | |
8845 | ||
8846 | /* If we were given an invalid count, don't do anything except exactly | |
8847 | what was requested. */ | |
8848 | ||
770ae6cc | 8849 | if (input_count < 0 || input_count > (int) GET_MODE_BITSIZE (mode)) |
230d793d RS |
8850 | { |
8851 | if (x) | |
8852 | return x; | |
8853 | ||
770ae6cc | 8854 | return gen_rtx_fmt_ee (code, mode, varop, GEN_INT (input_count)); |
230d793d RS |
8855 | } |
8856 | ||
770ae6cc RK |
8857 | count = input_count; |
8858 | ||
230d793d RS |
8859 | /* Unless one of the branches of the `if' in this loop does a `continue', |
8860 | we will `break' the loop after the `if'. */ | |
8861 | ||
8862 | while (count != 0) | |
8863 | { | |
8864 | /* If we have an operand of (clobber (const_int 0)), just return that | |
8865 | value. */ | |
8866 | if (GET_CODE (varop) == CLOBBER) | |
8867 | return varop; | |
8868 | ||
8869 | /* If we discovered we had to complement VAROP, leave. Making a NOT | |
8870 | here would cause an infinite loop. */ | |
8871 | if (complement_p) | |
8872 | break; | |
8873 | ||
abc95ed3 | 8874 | /* Convert ROTATERT to ROTATE. */ |
230d793d RS |
8875 | if (code == ROTATERT) |
8876 | code = ROTATE, count = GET_MODE_BITSIZE (result_mode) - count; | |
8877 | ||
230d793d | 8878 | /* We need to determine what mode we will do the shift in. If the |
f6789c77 RK |
8879 | shift is a right shift or a ROTATE, we must always do it in the mode |
8880 | it was originally done in. Otherwise, we can do it in MODE, the | |
0f41302f | 8881 | widest mode encountered. */ |
f6789c77 RK |
8882 | shift_mode |
8883 | = (code == ASHIFTRT || code == LSHIFTRT || code == ROTATE | |
8884 | ? result_mode : mode); | |
230d793d RS |
8885 | |
8886 | /* Handle cases where the count is greater than the size of the mode | |
8887 | minus 1. For ASHIFT, use the size minus one as the count (this can | |
8888 | occur when simplifying (lshiftrt (ashiftrt ..))). For rotates, | |
8889 | take the count modulo the size. For other shifts, the result is | |
8890 | zero. | |
8891 | ||
8892 | Since these shifts are being produced by the compiler by combining | |
8893 | multiple operations, each of which are defined, we know what the | |
8894 | result is supposed to be. */ | |
663522cb | 8895 | |
230d793d RS |
8896 | if (count > GET_MODE_BITSIZE (shift_mode) - 1) |
8897 | { | |
8898 | if (code == ASHIFTRT) | |
8899 | count = GET_MODE_BITSIZE (shift_mode) - 1; | |
8900 | else if (code == ROTATE || code == ROTATERT) | |
8901 | count %= GET_MODE_BITSIZE (shift_mode); | |
8902 | else | |
8903 | { | |
8904 | /* We can't simply return zero because there may be an | |
8905 | outer op. */ | |
8906 | varop = const0_rtx; | |
8907 | count = 0; | |
8908 | break; | |
8909 | } | |
8910 | } | |
8911 | ||
312def2e RK |
8912 | /* An arithmetic right shift of a quantity known to be -1 or 0 |
8913 | is a no-op. */ | |
8914 | if (code == ASHIFTRT | |
8915 | && (num_sign_bit_copies (varop, shift_mode) | |
8916 | == GET_MODE_BITSIZE (shift_mode))) | |
d0ab8cd3 | 8917 | { |
312def2e RK |
8918 | count = 0; |
8919 | break; | |
8920 | } | |
d0ab8cd3 | 8921 | |
312def2e RK |
8922 | /* If we are doing an arithmetic right shift and discarding all but |
8923 | the sign bit copies, this is equivalent to doing a shift by the | |
8924 | bitsize minus one. Convert it into that shift because it will often | |
8925 | allow other simplifications. */ | |
500c518b | 8926 | |
312def2e RK |
8927 | if (code == ASHIFTRT |
8928 | && (count + num_sign_bit_copies (varop, shift_mode) | |
8929 | >= GET_MODE_BITSIZE (shift_mode))) | |
8930 | count = GET_MODE_BITSIZE (shift_mode) - 1; | |
500c518b | 8931 | |
230d793d RS |
8932 | /* We simplify the tests below and elsewhere by converting |
8933 | ASHIFTRT to LSHIFTRT if we know the sign bit is clear. | |
8934 | `make_compound_operation' will convert it to a ASHIFTRT for | |
8935 | those machines (such as Vax) that don't have a LSHIFTRT. */ | |
5f4f0e22 | 8936 | if (GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT |
230d793d | 8937 | && code == ASHIFTRT |
951553af | 8938 | && ((nonzero_bits (varop, shift_mode) |
5f4f0e22 CH |
8939 | & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (shift_mode) - 1))) |
8940 | == 0)) | |
230d793d RS |
8941 | code = LSHIFTRT; |
8942 | ||
8943 | switch (GET_CODE (varop)) | |
8944 | { | |
8945 | case SIGN_EXTEND: | |
8946 | case ZERO_EXTEND: | |
8947 | case SIGN_EXTRACT: | |
8948 | case ZERO_EXTRACT: | |
8949 | new = expand_compound_operation (varop); | |
8950 | if (new != varop) | |
8951 | { | |
8952 | varop = new; | |
8953 | continue; | |
8954 | } | |
8955 | break; | |
8956 | ||
8957 | case MEM: | |
8958 | /* If we have (xshiftrt (mem ...) C) and C is MODE_WIDTH | |
8959 | minus the width of a smaller mode, we can do this with a | |
8960 | SIGN_EXTEND or ZERO_EXTEND from the narrower memory location. */ | |
8961 | if ((code == ASHIFTRT || code == LSHIFTRT) | |
8962 | && ! mode_dependent_address_p (XEXP (varop, 0)) | |
8963 | && ! MEM_VOLATILE_P (varop) | |
8964 | && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count, | |
8965 | MODE_INT, 1)) != BLKmode) | |
8966 | { | |
f76b9db2 | 8967 | if (BYTES_BIG_ENDIAN) |
38a448ca | 8968 | new = gen_rtx_MEM (tmode, XEXP (varop, 0)); |
f76b9db2 | 8969 | else |
38a448ca RH |
8970 | new = gen_rtx_MEM (tmode, |
8971 | plus_constant (XEXP (varop, 0), | |
8972 | count / BITS_PER_UNIT)); | |
bf49b139 | 8973 | |
c6df88cb | 8974 | MEM_COPY_ATTRIBUTES (new, varop); |
230d793d RS |
8975 | varop = gen_rtx_combine (code == ASHIFTRT ? SIGN_EXTEND |
8976 | : ZERO_EXTEND, mode, new); | |
8977 | count = 0; | |
8978 | continue; | |
8979 | } | |
8980 | break; | |
8981 | ||
8982 | case USE: | |
8983 | /* Similar to the case above, except that we can only do this if | |
8984 | the resulting mode is the same as that of the underlying | |
8985 | MEM and adjust the address depending on the *bits* endianness | |
8986 | because of the way that bit-field extract insns are defined. */ | |
8987 | if ((code == ASHIFTRT || code == LSHIFTRT) | |
8988 | && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count, | |
8989 | MODE_INT, 1)) != BLKmode | |
8990 | && tmode == GET_MODE (XEXP (varop, 0))) | |
8991 | { | |
f76b9db2 ILT |
8992 | if (BITS_BIG_ENDIAN) |
8993 | new = XEXP (varop, 0); | |
8994 | else | |
8995 | { | |
8996 | new = copy_rtx (XEXP (varop, 0)); | |
663522cb | 8997 | SUBST (XEXP (new, 0), |
f76b9db2 ILT |
8998 | plus_constant (XEXP (new, 0), |
8999 | count / BITS_PER_UNIT)); | |
9000 | } | |
230d793d RS |
9001 | |
9002 | varop = gen_rtx_combine (code == ASHIFTRT ? SIGN_EXTEND | |
9003 | : ZERO_EXTEND, mode, new); | |
9004 | count = 0; | |
9005 | continue; | |
9006 | } | |
9007 | break; | |
9008 | ||
9009 | case SUBREG: | |
9010 | /* If VAROP is a SUBREG, strip it as long as the inner operand has | |
9011 | the same number of words as what we've seen so far. Then store | |
9012 | the widest mode in MODE. */ | |
f9e67232 RS |
9013 | if (subreg_lowpart_p (varop) |
9014 | && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop))) | |
9015 | > GET_MODE_SIZE (GET_MODE (varop))) | |
230d793d RS |
9016 | && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop))) |
9017 | + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD) | |
9018 | == mode_words)) | |
9019 | { | |
9020 | varop = SUBREG_REG (varop); | |
9021 | if (GET_MODE_SIZE (GET_MODE (varop)) > GET_MODE_SIZE (mode)) | |
9022 | mode = GET_MODE (varop); | |
9023 | continue; | |
9024 | } | |
9025 | break; | |
9026 | ||
9027 | case MULT: | |
9028 | /* Some machines use MULT instead of ASHIFT because MULT | |
9029 | is cheaper. But it is still better on those machines to | |
9030 | merge two shifts into one. */ | |
9031 | if (GET_CODE (XEXP (varop, 1)) == CONST_INT | |
9032 | && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0) | |
9033 | { | |
770ae6cc RK |
9034 | varop |
9035 | = gen_binary (ASHIFT, GET_MODE (varop), XEXP (varop, 0), | |
9036 | GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1))))); | |
230d793d RS |
9037 | continue; |
9038 | } | |
9039 | break; | |
9040 | ||
9041 | case UDIV: | |
9042 | /* Similar, for when divides are cheaper. */ | |
9043 | if (GET_CODE (XEXP (varop, 1)) == CONST_INT | |
9044 | && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0) | |
9045 | { | |
770ae6cc RK |
9046 | varop |
9047 | = gen_binary (LSHIFTRT, GET_MODE (varop), XEXP (varop, 0), | |
9048 | GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1))))); | |
230d793d RS |
9049 | continue; |
9050 | } | |
9051 | break; | |
9052 | ||
9053 | case ASHIFTRT: | |
663522cb | 9054 | /* If we are extracting just the sign bit of an arithmetic right |
230d793d RS |
9055 | shift, that shift is not needed. */ |
9056 | if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1) | |
9057 | { | |
9058 | varop = XEXP (varop, 0); | |
9059 | continue; | |
9060 | } | |
9061 | ||
0f41302f | 9062 | /* ... fall through ... */ |
230d793d RS |
9063 | |
9064 | case LSHIFTRT: | |
9065 | case ASHIFT: | |
230d793d RS |
9066 | case ROTATE: |
9067 | /* Here we have two nested shifts. The result is usually the | |
9068 | AND of a new shift with a mask. We compute the result below. */ | |
9069 | if (GET_CODE (XEXP (varop, 1)) == CONST_INT | |
9070 | && INTVAL (XEXP (varop, 1)) >= 0 | |
9071 | && INTVAL (XEXP (varop, 1)) < GET_MODE_BITSIZE (GET_MODE (varop)) | |
5f4f0e22 CH |
9072 | && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT |
9073 | && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT) | |
230d793d RS |
9074 | { |
9075 | enum rtx_code first_code = GET_CODE (varop); | |
770ae6cc | 9076 | unsigned int first_count = INTVAL (XEXP (varop, 1)); |
5f4f0e22 | 9077 | unsigned HOST_WIDE_INT mask; |
230d793d | 9078 | rtx mask_rtx; |
230d793d | 9079 | |
230d793d RS |
9080 | /* We have one common special case. We can't do any merging if |
9081 | the inner code is an ASHIFTRT of a smaller mode. However, if | |
9082 | we have (ashift:M1 (subreg:M1 (ashiftrt:M2 FOO C1) 0) C2) | |
9083 | with C2 == GET_MODE_BITSIZE (M1) - GET_MODE_BITSIZE (M2), | |
9084 | we can convert it to | |
9085 | (ashiftrt:M1 (ashift:M1 (and:M1 (subreg:M1 FOO 0 C2) C3) C1). | |
9086 | This simplifies certain SIGN_EXTEND operations. */ | |
9087 | if (code == ASHIFT && first_code == ASHIFTRT | |
9088 | && (GET_MODE_BITSIZE (result_mode) | |
9089 | - GET_MODE_BITSIZE (GET_MODE (varop))) == count) | |
9090 | { | |
9091 | /* C3 has the low-order C1 bits zero. */ | |
663522cb | 9092 | |
5f4f0e22 | 9093 | mask = (GET_MODE_MASK (mode) |
663522cb | 9094 | & ~(((HOST_WIDE_INT) 1 << first_count) - 1)); |
230d793d | 9095 | |
5f4f0e22 | 9096 | varop = simplify_and_const_int (NULL_RTX, result_mode, |
230d793d | 9097 | XEXP (varop, 0), mask); |
5f4f0e22 | 9098 | varop = simplify_shift_const (NULL_RTX, ASHIFT, result_mode, |
230d793d RS |
9099 | varop, count); |
9100 | count = first_count; | |
9101 | code = ASHIFTRT; | |
9102 | continue; | |
9103 | } | |
663522cb | 9104 | |
d0ab8cd3 RK |
9105 | /* If this was (ashiftrt (ashift foo C1) C2) and FOO has more |
9106 | than C1 high-order bits equal to the sign bit, we can convert | |
9107 | this to either an ASHIFT or a ASHIFTRT depending on the | |
663522cb | 9108 | two counts. |
230d793d RS |
9109 | |
9110 | We cannot do this if VAROP's mode is not SHIFT_MODE. */ | |
9111 | ||
9112 | if (code == ASHIFTRT && first_code == ASHIFT | |
9113 | && GET_MODE (varop) == shift_mode | |
d0ab8cd3 RK |
9114 | && (num_sign_bit_copies (XEXP (varop, 0), shift_mode) |
9115 | > first_count)) | |
230d793d | 9116 | { |
d0ab8cd3 | 9117 | varop = XEXP (varop, 0); |
770ae6cc RK |
9118 | |
9119 | signed_count = count - first_count; | |
9120 | if (signed_count < 0) | |
663522cb | 9121 | count = -signed_count, code = ASHIFT; |
770ae6cc RK |
9122 | else |
9123 | count = signed_count; | |
9124 | ||
d0ab8cd3 | 9125 | continue; |
230d793d RS |
9126 | } |
9127 | ||
9128 | /* There are some cases we can't do. If CODE is ASHIFTRT, | |
9129 | we can only do this if FIRST_CODE is also ASHIFTRT. | |
9130 | ||
9131 | We can't do the case when CODE is ROTATE and FIRST_CODE is | |
9132 | ASHIFTRT. | |
9133 | ||
9134 | If the mode of this shift is not the mode of the outer shift, | |
bdaae9a0 | 9135 | we can't do this if either shift is a right shift or ROTATE. |
230d793d RS |
9136 | |
9137 | Finally, we can't do any of these if the mode is too wide | |
9138 | unless the codes are the same. | |
9139 | ||
9140 | Handle the case where the shift codes are the same | |
9141 | first. */ | |
9142 | ||
9143 | if (code == first_code) | |
9144 | { | |
9145 | if (GET_MODE (varop) != result_mode | |
bdaae9a0 RK |
9146 | && (code == ASHIFTRT || code == LSHIFTRT |
9147 | || code == ROTATE)) | |
230d793d RS |
9148 | break; |
9149 | ||
9150 | count += first_count; | |
9151 | varop = XEXP (varop, 0); | |
9152 | continue; | |
9153 | } | |
9154 | ||
9155 | if (code == ASHIFTRT | |
9156 | || (code == ROTATE && first_code == ASHIFTRT) | |
5f4f0e22 | 9157 | || GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT |
230d793d | 9158 | || (GET_MODE (varop) != result_mode |
bdaae9a0 RK |
9159 | && (first_code == ASHIFTRT || first_code == LSHIFTRT |
9160 | || first_code == ROTATE | |
230d793d RS |
9161 | || code == ROTATE))) |
9162 | break; | |
9163 | ||
9164 | /* To compute the mask to apply after the shift, shift the | |
663522cb | 9165 | nonzero bits of the inner shift the same way the |
230d793d RS |
9166 | outer shift will. */ |
9167 | ||
951553af | 9168 | mask_rtx = GEN_INT (nonzero_bits (varop, GET_MODE (varop))); |
230d793d RS |
9169 | |
9170 | mask_rtx | |
9171 | = simplify_binary_operation (code, result_mode, mask_rtx, | |
5f4f0e22 | 9172 | GEN_INT (count)); |
663522cb | 9173 | |
230d793d RS |
9174 | /* Give up if we can't compute an outer operation to use. */ |
9175 | if (mask_rtx == 0 | |
9176 | || GET_CODE (mask_rtx) != CONST_INT | |
9177 | || ! merge_outer_ops (&outer_op, &outer_const, AND, | |
9178 | INTVAL (mask_rtx), | |
9179 | result_mode, &complement_p)) | |
9180 | break; | |
9181 | ||
9182 | /* If the shifts are in the same direction, we add the | |
9183 | counts. Otherwise, we subtract them. */ | |
770ae6cc | 9184 | signed_count = count; |
230d793d RS |
9185 | if ((code == ASHIFTRT || code == LSHIFTRT) |
9186 | == (first_code == ASHIFTRT || first_code == LSHIFTRT)) | |
770ae6cc | 9187 | signed_count += first_count; |
230d793d | 9188 | else |
770ae6cc | 9189 | signed_count -= first_count; |
230d793d | 9190 | |
663522cb | 9191 | /* If COUNT is positive, the new shift is usually CODE, |
230d793d RS |
9192 | except for the two exceptions below, in which case it is |
9193 | FIRST_CODE. If the count is negative, FIRST_CODE should | |
9194 | always be used */ | |
770ae6cc | 9195 | if (signed_count > 0 |
230d793d RS |
9196 | && ((first_code == ROTATE && code == ASHIFT) |
9197 | || (first_code == ASHIFTRT && code == LSHIFTRT))) | |
770ae6cc RK |
9198 | code = first_code, count = signed_count; |
9199 | else if (signed_count < 0) | |
663522cb | 9200 | code = first_code, count = -signed_count; |
770ae6cc RK |
9201 | else |
9202 | count = signed_count; | |
230d793d RS |
9203 | |
9204 | varop = XEXP (varop, 0); | |
9205 | continue; | |
9206 | } | |
9207 | ||
9208 | /* If we have (A << B << C) for any shift, we can convert this to | |
9209 | (A << C << B). This wins if A is a constant. Only try this if | |
9210 | B is not a constant. */ | |
9211 | ||
9212 | else if (GET_CODE (varop) == code | |
9213 | && GET_CODE (XEXP (varop, 1)) != CONST_INT | |
9214 | && 0 != (new | |
9215 | = simplify_binary_operation (code, mode, | |
9216 | XEXP (varop, 0), | |
5f4f0e22 | 9217 | GEN_INT (count)))) |
230d793d RS |
9218 | { |
9219 | varop = gen_rtx_combine (code, mode, new, XEXP (varop, 1)); | |
9220 | count = 0; | |
9221 | continue; | |
9222 | } | |
9223 | break; | |
9224 | ||
9225 | case NOT: | |
9226 | /* Make this fit the case below. */ | |
9227 | varop = gen_rtx_combine (XOR, mode, XEXP (varop, 0), | |
5f4f0e22 | 9228 | GEN_INT (GET_MODE_MASK (mode))); |
230d793d RS |
9229 | continue; |
9230 | ||
9231 | case IOR: | |
9232 | case AND: | |
9233 | case XOR: | |
9234 | /* If we have (xshiftrt (ior (plus X (const_int -1)) X) C) | |
9235 | with C the size of VAROP - 1 and the shift is logical if | |
9236 | STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1, | |
9237 | we have an (le X 0) operation. If we have an arithmetic shift | |
9238 | and STORE_FLAG_VALUE is 1 or we have a logical shift with | |
9239 | STORE_FLAG_VALUE of -1, we have a (neg (le X 0)) operation. */ | |
9240 | ||
9241 | if (GET_CODE (varop) == IOR && GET_CODE (XEXP (varop, 0)) == PLUS | |
9242 | && XEXP (XEXP (varop, 0), 1) == constm1_rtx | |
9243 | && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1) | |
9244 | && (code == LSHIFTRT || code == ASHIFTRT) | |
9245 | && count == GET_MODE_BITSIZE (GET_MODE (varop)) - 1 | |
9246 | && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1))) | |
9247 | { | |
9248 | count = 0; | |
9249 | varop = gen_rtx_combine (LE, GET_MODE (varop), XEXP (varop, 1), | |
9250 | const0_rtx); | |
9251 | ||
9252 | if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT) | |
9253 | varop = gen_rtx_combine (NEG, GET_MODE (varop), varop); | |
9254 | ||
9255 | continue; | |
9256 | } | |
9257 | ||
9258 | /* If we have (shift (logical)), move the logical to the outside | |
9259 | to allow it to possibly combine with another logical and the | |
9260 | shift to combine with another shift. This also canonicalizes to | |
9261 | what a ZERO_EXTRACT looks like. Also, some machines have | |
9262 | (and (shift)) insns. */ | |
9263 | ||
9264 | if (GET_CODE (XEXP (varop, 1)) == CONST_INT | |
9265 | && (new = simplify_binary_operation (code, result_mode, | |
9266 | XEXP (varop, 1), | |
5f4f0e22 | 9267 | GEN_INT (count))) != 0 |
663522cb | 9268 | && GET_CODE (new) == CONST_INT |
230d793d RS |
9269 | && merge_outer_ops (&outer_op, &outer_const, GET_CODE (varop), |
9270 | INTVAL (new), result_mode, &complement_p)) | |
9271 | { | |
9272 | varop = XEXP (varop, 0); | |
9273 | continue; | |
9274 | } | |
9275 | ||
9276 | /* If we can't do that, try to simplify the shift in each arm of the | |
9277 | logical expression, make a new logical expression, and apply | |
9278 | the inverse distributive law. */ | |
9279 | { | |
00d4ca1c | 9280 | rtx lhs = simplify_shift_const (NULL_RTX, code, shift_mode, |
230d793d | 9281 | XEXP (varop, 0), count); |
00d4ca1c | 9282 | rtx rhs = simplify_shift_const (NULL_RTX, code, shift_mode, |
230d793d RS |
9283 | XEXP (varop, 1), count); |
9284 | ||
21a64bf1 | 9285 | varop = gen_binary (GET_CODE (varop), shift_mode, lhs, rhs); |
230d793d RS |
9286 | varop = apply_distributive_law (varop); |
9287 | ||
9288 | count = 0; | |
9289 | } | |
9290 | break; | |
9291 | ||
9292 | case EQ: | |
45620ed4 | 9293 | /* convert (lshiftrt (eq FOO 0) C) to (xor FOO 1) if STORE_FLAG_VALUE |
230d793d | 9294 | says that the sign bit can be tested, FOO has mode MODE, C is |
45620ed4 RK |
9295 | GET_MODE_BITSIZE (MODE) - 1, and FOO has only its low-order bit |
9296 | that may be nonzero. */ | |
9297 | if (code == LSHIFTRT | |
230d793d RS |
9298 | && XEXP (varop, 1) == const0_rtx |
9299 | && GET_MODE (XEXP (varop, 0)) == result_mode | |
9300 | && count == GET_MODE_BITSIZE (result_mode) - 1 | |
5f4f0e22 | 9301 | && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT |
230d793d | 9302 | && ((STORE_FLAG_VALUE |
663522cb | 9303 | & ((HOST_WIDE_INT) 1 |
770ae6cc | 9304 | < (GET_MODE_BITSIZE (result_mode) - 1)))) |
951553af | 9305 | && nonzero_bits (XEXP (varop, 0), result_mode) == 1 |
5f4f0e22 CH |
9306 | && merge_outer_ops (&outer_op, &outer_const, XOR, |
9307 | (HOST_WIDE_INT) 1, result_mode, | |
9308 | &complement_p)) | |
230d793d RS |
9309 | { |
9310 | varop = XEXP (varop, 0); | |
9311 | count = 0; | |
9312 | continue; | |
9313 | } | |
9314 | break; | |
9315 | ||
9316 | case NEG: | |
d0ab8cd3 RK |
9317 | /* (lshiftrt (neg A) C) where A is either 0 or 1 and C is one less |
9318 | than the number of bits in the mode is equivalent to A. */ | |
9319 | if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1 | |
951553af | 9320 | && nonzero_bits (XEXP (varop, 0), result_mode) == 1) |
230d793d | 9321 | { |
d0ab8cd3 | 9322 | varop = XEXP (varop, 0); |
230d793d RS |
9323 | count = 0; |
9324 | continue; | |
9325 | } | |
9326 | ||
9327 | /* NEG commutes with ASHIFT since it is multiplication. Move the | |
9328 | NEG outside to allow shifts to combine. */ | |
9329 | if (code == ASHIFT | |
5f4f0e22 CH |
9330 | && merge_outer_ops (&outer_op, &outer_const, NEG, |
9331 | (HOST_WIDE_INT) 0, result_mode, | |
9332 | &complement_p)) | |
230d793d RS |
9333 | { |
9334 | varop = XEXP (varop, 0); | |
9335 | continue; | |
9336 | } | |
9337 | break; | |
9338 | ||
9339 | case PLUS: | |
d0ab8cd3 RK |
9340 | /* (lshiftrt (plus A -1) C) where A is either 0 or 1 and C |
9341 | is one less than the number of bits in the mode is | |
9342 | equivalent to (xor A 1). */ | |
230d793d RS |
9343 | if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1 |
9344 | && XEXP (varop, 1) == constm1_rtx | |
951553af | 9345 | && nonzero_bits (XEXP (varop, 0), result_mode) == 1 |
5f4f0e22 CH |
9346 | && merge_outer_ops (&outer_op, &outer_const, XOR, |
9347 | (HOST_WIDE_INT) 1, result_mode, | |
9348 | &complement_p)) | |
230d793d RS |
9349 | { |
9350 | count = 0; | |
9351 | varop = XEXP (varop, 0); | |
9352 | continue; | |
9353 | } | |
9354 | ||
3f508eca | 9355 | /* If we have (xshiftrt (plus FOO BAR) C), and the only bits |
951553af | 9356 | that might be nonzero in BAR are those being shifted out and those |
3f508eca RK |
9357 | bits are known zero in FOO, we can replace the PLUS with FOO. |
9358 | Similarly in the other operand order. This code occurs when | |
9359 | we are computing the size of a variable-size array. */ | |
9360 | ||
9361 | if ((code == ASHIFTRT || code == LSHIFTRT) | |
5f4f0e22 | 9362 | && count < HOST_BITS_PER_WIDE_INT |
951553af RK |
9363 | && nonzero_bits (XEXP (varop, 1), result_mode) >> count == 0 |
9364 | && (nonzero_bits (XEXP (varop, 1), result_mode) | |
9365 | & nonzero_bits (XEXP (varop, 0), result_mode)) == 0) | |
3f508eca RK |
9366 | { |
9367 | varop = XEXP (varop, 0); | |
9368 | continue; | |
9369 | } | |
9370 | else if ((code == ASHIFTRT || code == LSHIFTRT) | |
5f4f0e22 | 9371 | && count < HOST_BITS_PER_WIDE_INT |
ac49a949 | 9372 | && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT |
951553af | 9373 | && 0 == (nonzero_bits (XEXP (varop, 0), result_mode) |
3f508eca | 9374 | >> count) |
951553af RK |
9375 | && 0 == (nonzero_bits (XEXP (varop, 0), result_mode) |
9376 | & nonzero_bits (XEXP (varop, 1), | |
3f508eca RK |
9377 | result_mode))) |
9378 | { | |
9379 | varop = XEXP (varop, 1); | |
9380 | continue; | |
9381 | } | |
9382 | ||
230d793d RS |
9383 | /* (ashift (plus foo C) N) is (plus (ashift foo N) C'). */ |
9384 | if (code == ASHIFT | |
9385 | && GET_CODE (XEXP (varop, 1)) == CONST_INT | |
9386 | && (new = simplify_binary_operation (ASHIFT, result_mode, | |
9387 | XEXP (varop, 1), | |
5f4f0e22 | 9388 | GEN_INT (count))) != 0 |
770ae6cc | 9389 | && GET_CODE (new) == CONST_INT |
230d793d RS |
9390 | && merge_outer_ops (&outer_op, &outer_const, PLUS, |
9391 | INTVAL (new), result_mode, &complement_p)) | |
9392 | { | |
9393 | varop = XEXP (varop, 0); | |
9394 | continue; | |
9395 | } | |
9396 | break; | |
9397 | ||
9398 | case MINUS: | |
9399 | /* If we have (xshiftrt (minus (ashiftrt X C)) X) C) | |
9400 | with C the size of VAROP - 1 and the shift is logical if | |
9401 | STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1, | |
9402 | we have a (gt X 0) operation. If the shift is arithmetic with | |
9403 | STORE_FLAG_VALUE of 1 or logical with STORE_FLAG_VALUE == -1, | |
9404 | we have a (neg (gt X 0)) operation. */ | |
9405 | ||
0802d516 RK |
9406 | if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1) |
9407 | && GET_CODE (XEXP (varop, 0)) == ASHIFTRT | |
230d793d | 9408 | && count == GET_MODE_BITSIZE (GET_MODE (varop)) - 1 |
230d793d RS |
9409 | && (code == LSHIFTRT || code == ASHIFTRT) |
9410 | && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT | |
9411 | && INTVAL (XEXP (XEXP (varop, 0), 1)) == count | |
9412 | && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1))) | |
9413 | { | |
9414 | count = 0; | |
9415 | varop = gen_rtx_combine (GT, GET_MODE (varop), XEXP (varop, 1), | |
9416 | const0_rtx); | |
9417 | ||
9418 | if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT) | |
9419 | varop = gen_rtx_combine (NEG, GET_MODE (varop), varop); | |
9420 | ||
9421 | continue; | |
9422 | } | |
9423 | break; | |
6e0ef100 JC |
9424 | |
9425 | case TRUNCATE: | |
9426 | /* Change (lshiftrt (truncate (lshiftrt))) to (truncate (lshiftrt)) | |
9427 | if the truncate does not affect the value. */ | |
9428 | if (code == LSHIFTRT | |
9429 | && GET_CODE (XEXP (varop, 0)) == LSHIFTRT | |
9430 | && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT | |
9431 | && (INTVAL (XEXP (XEXP (varop, 0), 1)) | |
b577a8ff JL |
9432 | >= (GET_MODE_BITSIZE (GET_MODE (XEXP (varop, 0))) |
9433 | - GET_MODE_BITSIZE (GET_MODE (varop))))) | |
6e0ef100 JC |
9434 | { |
9435 | rtx varop_inner = XEXP (varop, 0); | |
9436 | ||
770ae6cc RK |
9437 | varop_inner |
9438 | = gen_rtx_combine (LSHIFTRT, GET_MODE (varop_inner), | |
9439 | XEXP (varop_inner, 0), | |
9440 | GEN_INT (count | |
9441 | + INTVAL (XEXP (varop_inner, 1)))); | |
6e0ef100 JC |
9442 | varop = gen_rtx_combine (TRUNCATE, GET_MODE (varop), |
9443 | varop_inner); | |
9444 | count = 0; | |
9445 | continue; | |
9446 | } | |
9447 | break; | |
663522cb | 9448 | |
e9a25f70 JL |
9449 | default: |
9450 | break; | |
230d793d RS |
9451 | } |
9452 | ||
9453 | break; | |
9454 | } | |
9455 | ||
9456 | /* We need to determine what mode to do the shift in. If the shift is | |
f6789c77 RK |
9457 | a right shift or ROTATE, we must always do it in the mode it was |
9458 | originally done in. Otherwise, we can do it in MODE, the widest mode | |
9459 | encountered. The code we care about is that of the shift that will | |
9460 | actually be done, not the shift that was originally requested. */ | |
9461 | shift_mode | |
9462 | = (code == ASHIFTRT || code == LSHIFTRT || code == ROTATE | |
9463 | ? result_mode : mode); | |
230d793d RS |
9464 | |
9465 | /* We have now finished analyzing the shift. The result should be | |
9466 | a shift of type CODE with SHIFT_MODE shifting VAROP COUNT places. If | |
9467 | OUTER_OP is non-NIL, it is an operation that needs to be applied | |
9468 | to the result of the shift. OUTER_CONST is the relevant constant, | |
9469 | but we must turn off all bits turned off in the shift. | |
9470 | ||
9471 | If we were passed a value for X, see if we can use any pieces of | |
9472 | it. If not, make new rtx. */ | |
9473 | ||
9474 | if (x && GET_RTX_CLASS (GET_CODE (x)) == '2' | |
9475 | && GET_CODE (XEXP (x, 1)) == CONST_INT | |
9476 | && INTVAL (XEXP (x, 1)) == count) | |
9477 | const_rtx = XEXP (x, 1); | |
9478 | else | |
5f4f0e22 | 9479 | const_rtx = GEN_INT (count); |
230d793d RS |
9480 | |
9481 | if (x && GET_CODE (XEXP (x, 0)) == SUBREG | |
9482 | && GET_MODE (XEXP (x, 0)) == shift_mode | |
9483 | && SUBREG_REG (XEXP (x, 0)) == varop) | |
9484 | varop = XEXP (x, 0); | |
9485 | else if (GET_MODE (varop) != shift_mode) | |
9486 | varop = gen_lowpart_for_combine (shift_mode, varop); | |
9487 | ||
0f41302f | 9488 | /* If we can't make the SUBREG, try to return what we were given. */ |
230d793d RS |
9489 | if (GET_CODE (varop) == CLOBBER) |
9490 | return x ? x : varop; | |
9491 | ||
9492 | new = simplify_binary_operation (code, shift_mode, varop, const_rtx); | |
9493 | if (new != 0) | |
9494 | x = new; | |
9495 | else | |
9496 | { | |
9497 | if (x == 0 || GET_CODE (x) != code || GET_MODE (x) != shift_mode) | |
9498 | x = gen_rtx_combine (code, shift_mode, varop, const_rtx); | |
9499 | ||
9500 | SUBST (XEXP (x, 0), varop); | |
9501 | SUBST (XEXP (x, 1), const_rtx); | |
9502 | } | |
9503 | ||
224eeff2 RK |
9504 | /* If we have an outer operation and we just made a shift, it is |
9505 | possible that we could have simplified the shift were it not | |
9506 | for the outer operation. So try to do the simplification | |
9507 | recursively. */ | |
9508 | ||
9509 | if (outer_op != NIL && GET_CODE (x) == code | |
9510 | && GET_CODE (XEXP (x, 1)) == CONST_INT) | |
9511 | x = simplify_shift_const (x, code, shift_mode, XEXP (x, 0), | |
9512 | INTVAL (XEXP (x, 1))); | |
9513 | ||
230d793d RS |
9514 | /* If we were doing a LSHIFTRT in a wider mode than it was originally, |
9515 | turn off all the bits that the shift would have turned off. */ | |
9516 | if (orig_code == LSHIFTRT && result_mode != shift_mode) | |
5f4f0e22 | 9517 | x = simplify_and_const_int (NULL_RTX, shift_mode, x, |
230d793d | 9518 | GET_MODE_MASK (result_mode) >> orig_count); |
663522cb | 9519 | |
230d793d RS |
9520 | /* Do the remainder of the processing in RESULT_MODE. */ |
9521 | x = gen_lowpart_for_combine (result_mode, x); | |
9522 | ||
9523 | /* If COMPLEMENT_P is set, we have to complement X before doing the outer | |
9524 | operation. */ | |
9525 | if (complement_p) | |
0c1c8ea6 | 9526 | x = gen_unary (NOT, result_mode, result_mode, x); |
230d793d RS |
9527 | |
9528 | if (outer_op != NIL) | |
9529 | { | |
5f4f0e22 | 9530 | if (GET_MODE_BITSIZE (result_mode) < HOST_BITS_PER_WIDE_INT) |
7e4ce834 | 9531 | outer_const = trunc_int_for_mode (outer_const, result_mode); |
230d793d RS |
9532 | |
9533 | if (outer_op == AND) | |
5f4f0e22 | 9534 | x = simplify_and_const_int (NULL_RTX, result_mode, x, outer_const); |
230d793d RS |
9535 | else if (outer_op == SET) |
9536 | /* This means that we have determined that the result is | |
9537 | equivalent to a constant. This should be rare. */ | |
5f4f0e22 | 9538 | x = GEN_INT (outer_const); |
230d793d | 9539 | else if (GET_RTX_CLASS (outer_op) == '1') |
0c1c8ea6 | 9540 | x = gen_unary (outer_op, result_mode, result_mode, x); |
230d793d | 9541 | else |
5f4f0e22 | 9542 | x = gen_binary (outer_op, result_mode, x, GEN_INT (outer_const)); |
230d793d RS |
9543 | } |
9544 | ||
9545 | return x; | |
663522cb | 9546 | } |
230d793d RS |
9547 | \f |
9548 | /* Like recog, but we receive the address of a pointer to a new pattern. | |
9549 | We try to match the rtx that the pointer points to. | |
9550 | If that fails, we may try to modify or replace the pattern, | |
9551 | storing the replacement into the same pointer object. | |
9552 | ||
9553 | Modifications include deletion or addition of CLOBBERs. | |
9554 | ||
9555 | PNOTES is a pointer to a location where any REG_UNUSED notes added for | |
9556 | the CLOBBERs are placed. | |
9557 | ||
9558 | The value is the final insn code from the pattern ultimately matched, | |
9559 | or -1. */ | |
9560 | ||
9561 | static int | |
8e2f6e35 | 9562 | recog_for_combine (pnewpat, insn, pnotes) |
230d793d RS |
9563 | rtx *pnewpat; |
9564 | rtx insn; | |
9565 | rtx *pnotes; | |
9566 | { | |
9567 | register rtx pat = *pnewpat; | |
9568 | int insn_code_number; | |
9569 | int num_clobbers_to_add = 0; | |
9570 | int i; | |
9571 | rtx notes = 0; | |
9572 | ||
974f4146 RK |
9573 | /* If PAT is a PARALLEL, check to see if it contains the CLOBBER |
9574 | we use to indicate that something didn't match. If we find such a | |
9575 | thing, force rejection. */ | |
d96023cf | 9576 | if (GET_CODE (pat) == PARALLEL) |
974f4146 | 9577 | for (i = XVECLEN (pat, 0) - 1; i >= 0; i--) |
d96023cf RK |
9578 | if (GET_CODE (XVECEXP (pat, 0, i)) == CLOBBER |
9579 | && XEXP (XVECEXP (pat, 0, i), 0) == const0_rtx) | |
974f4146 RK |
9580 | return -1; |
9581 | ||
230d793d RS |
9582 | /* Is the result of combination a valid instruction? */ |
9583 | insn_code_number = recog (pat, insn, &num_clobbers_to_add); | |
9584 | ||
9585 | /* If it isn't, there is the possibility that we previously had an insn | |
9586 | that clobbered some register as a side effect, but the combined | |
9587 | insn doesn't need to do that. So try once more without the clobbers | |
9588 | unless this represents an ASM insn. */ | |
9589 | ||
9590 | if (insn_code_number < 0 && ! check_asm_operands (pat) | |
9591 | && GET_CODE (pat) == PARALLEL) | |
9592 | { | |
9593 | int pos; | |
9594 | ||
9595 | for (pos = 0, i = 0; i < XVECLEN (pat, 0); i++) | |
9596 | if (GET_CODE (XVECEXP (pat, 0, i)) != CLOBBER) | |
9597 | { | |
9598 | if (i != pos) | |
9599 | SUBST (XVECEXP (pat, 0, pos), XVECEXP (pat, 0, i)); | |
9600 | pos++; | |
9601 | } | |
9602 | ||
9603 | SUBST_INT (XVECLEN (pat, 0), pos); | |
9604 | ||
9605 | if (pos == 1) | |
9606 | pat = XVECEXP (pat, 0, 0); | |
9607 | ||
9608 | insn_code_number = recog (pat, insn, &num_clobbers_to_add); | |
9609 | } | |
9610 | ||
9611 | /* If we had any clobbers to add, make a new pattern than contains | |
9612 | them. Then check to make sure that all of them are dead. */ | |
9613 | if (num_clobbers_to_add) | |
9614 | { | |
38a448ca RH |
9615 | rtx newpat = gen_rtx_PARALLEL (VOIDmode, |
9616 | gen_rtvec (GET_CODE (pat) == PARALLEL | |
c5c76735 JL |
9617 | ? (XVECLEN (pat, 0) |
9618 | + num_clobbers_to_add) | |
38a448ca | 9619 | : num_clobbers_to_add + 1)); |
230d793d RS |
9620 | |
9621 | if (GET_CODE (pat) == PARALLEL) | |
9622 | for (i = 0; i < XVECLEN (pat, 0); i++) | |
9623 | XVECEXP (newpat, 0, i) = XVECEXP (pat, 0, i); | |
9624 | else | |
9625 | XVECEXP (newpat, 0, 0) = pat; | |
9626 | ||
9627 | add_clobbers (newpat, insn_code_number); | |
9628 | ||
9629 | for (i = XVECLEN (newpat, 0) - num_clobbers_to_add; | |
9630 | i < XVECLEN (newpat, 0); i++) | |
9631 | { | |
9632 | if (GET_CODE (XEXP (XVECEXP (newpat, 0, i), 0)) == REG | |
9633 | && ! reg_dead_at_p (XEXP (XVECEXP (newpat, 0, i), 0), insn)) | |
9634 | return -1; | |
38a448ca RH |
9635 | notes = gen_rtx_EXPR_LIST (REG_UNUSED, |
9636 | XEXP (XVECEXP (newpat, 0, i), 0), notes); | |
230d793d RS |
9637 | } |
9638 | pat = newpat; | |
9639 | } | |
9640 | ||
9641 | *pnewpat = pat; | |
9642 | *pnotes = notes; | |
9643 | ||
9644 | return insn_code_number; | |
9645 | } | |
9646 | \f | |
9647 | /* Like gen_lowpart but for use by combine. In combine it is not possible | |
9648 | to create any new pseudoregs. However, it is safe to create | |
9649 | invalid memory addresses, because combine will try to recognize | |
9650 | them and all they will do is make the combine attempt fail. | |
9651 | ||
9652 | If for some reason this cannot do its job, an rtx | |
9653 | (clobber (const_int 0)) is returned. | |
9654 | An insn containing that will not be recognized. */ | |
9655 | ||
9656 | #undef gen_lowpart | |
9657 | ||
9658 | static rtx | |
9659 | gen_lowpart_for_combine (mode, x) | |
9660 | enum machine_mode mode; | |
9661 | register rtx x; | |
9662 | { | |
9663 | rtx result; | |
9664 | ||
9665 | if (GET_MODE (x) == mode) | |
9666 | return x; | |
9667 | ||
eae957a8 RK |
9668 | /* We can only support MODE being wider than a word if X is a |
9669 | constant integer or has a mode the same size. */ | |
9670 | ||
9671 | if (GET_MODE_SIZE (mode) > UNITS_PER_WORD | |
9672 | && ! ((GET_MODE (x) == VOIDmode | |
9673 | && (GET_CODE (x) == CONST_INT | |
9674 | || GET_CODE (x) == CONST_DOUBLE)) | |
9675 | || GET_MODE_SIZE (GET_MODE (x)) == GET_MODE_SIZE (mode))) | |
38a448ca | 9676 | return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx); |
230d793d RS |
9677 | |
9678 | /* X might be a paradoxical (subreg (mem)). In that case, gen_lowpart | |
9679 | won't know what to do. So we will strip off the SUBREG here and | |
9680 | process normally. */ | |
9681 | if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM) | |
9682 | { | |
9683 | x = SUBREG_REG (x); | |
9684 | if (GET_MODE (x) == mode) | |
9685 | return x; | |
9686 | } | |
9687 | ||
9688 | result = gen_lowpart_common (mode, x); | |
02188693 | 9689 | #ifdef CLASS_CANNOT_CHANGE_MODE |
64bf47a2 RK |
9690 | if (result != 0 |
9691 | && GET_CODE (result) == SUBREG | |
9692 | && GET_CODE (SUBREG_REG (result)) == REG | |
9693 | && REGNO (SUBREG_REG (result)) >= FIRST_PSEUDO_REGISTER | |
02188693 RH |
9694 | && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (result), |
9695 | GET_MODE (SUBREG_REG (result)))) | |
9696 | REG_CHANGES_MODE (REGNO (SUBREG_REG (result))) = 1; | |
9697 | #endif | |
64bf47a2 | 9698 | |
230d793d RS |
9699 | if (result) |
9700 | return result; | |
9701 | ||
9702 | if (GET_CODE (x) == MEM) | |
9703 | { | |
9704 | register int offset = 0; | |
9705 | rtx new; | |
9706 | ||
9707 | /* Refuse to work on a volatile memory ref or one with a mode-dependent | |
9708 | address. */ | |
9709 | if (MEM_VOLATILE_P (x) || mode_dependent_address_p (XEXP (x, 0))) | |
38a448ca | 9710 | return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx); |
230d793d RS |
9711 | |
9712 | /* If we want to refer to something bigger than the original memref, | |
9713 | generate a perverse subreg instead. That will force a reload | |
9714 | of the original memref X. */ | |
9715 | if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode)) | |
38a448ca | 9716 | return gen_rtx_SUBREG (mode, x, 0); |
230d793d | 9717 | |
f76b9db2 ILT |
9718 | if (WORDS_BIG_ENDIAN) |
9719 | offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD) | |
9720 | - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD)); | |
c5c76735 | 9721 | |
f76b9db2 ILT |
9722 | if (BYTES_BIG_ENDIAN) |
9723 | { | |
9724 | /* Adjust the address so that the address-after-the-data is | |
9725 | unchanged. */ | |
9726 | offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)) | |
9727 | - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x)))); | |
9728 | } | |
38a448ca | 9729 | new = gen_rtx_MEM (mode, plus_constant (XEXP (x, 0), offset)); |
c6df88cb | 9730 | MEM_COPY_ATTRIBUTES (new, x); |
230d793d RS |
9731 | return new; |
9732 | } | |
9733 | ||
9734 | /* If X is a comparison operator, rewrite it in a new mode. This | |
9735 | probably won't match, but may allow further simplifications. */ | |
9736 | else if (GET_RTX_CLASS (GET_CODE (x)) == '<') | |
9737 | return gen_rtx_combine (GET_CODE (x), mode, XEXP (x, 0), XEXP (x, 1)); | |
9738 | ||
9739 | /* If we couldn't simplify X any other way, just enclose it in a | |
9740 | SUBREG. Normally, this SUBREG won't match, but some patterns may | |
a7c99304 | 9741 | include an explicit SUBREG or we may simplify it further in combine. */ |
230d793d | 9742 | else |
dfbe1b2f RK |
9743 | { |
9744 | int word = 0; | |
9745 | ||
9746 | if (WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD) | |
9747 | word = ((GET_MODE_SIZE (GET_MODE (x)) | |
9748 | - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD)) | |
9749 | / UNITS_PER_WORD); | |
38a448ca | 9750 | return gen_rtx_SUBREG (mode, x, word); |
dfbe1b2f | 9751 | } |
230d793d RS |
9752 | } |
9753 | \f | |
9754 | /* Make an rtx expression. This is a subset of gen_rtx and only supports | |
9755 | expressions of 1, 2, or 3 operands, each of which are rtx expressions. | |
9756 | ||
9757 | If the identical expression was previously in the insn (in the undobuf), | |
9758 | it will be returned. Only if it is not found will a new expression | |
9759 | be made. */ | |
9760 | ||
9761 | /*VARARGS2*/ | |
9762 | static rtx | |
83d2b3b9 | 9763 | gen_rtx_combine VPARAMS ((enum rtx_code code, enum machine_mode mode, ...)) |
230d793d | 9764 | { |
5148a72b | 9765 | #ifndef ANSI_PROTOTYPES |
230d793d RS |
9766 | enum rtx_code code; |
9767 | enum machine_mode mode; | |
4f90e4a0 RK |
9768 | #endif |
9769 | va_list p; | |
230d793d RS |
9770 | int n_args; |
9771 | rtx args[3]; | |
b729186a | 9772 | int j; |
6f7d635c | 9773 | const char *fmt; |
230d793d | 9774 | rtx rt; |
241cea85 | 9775 | struct undo *undo; |
230d793d | 9776 | |
4f90e4a0 RK |
9777 | VA_START (p, mode); |
9778 | ||
5148a72b | 9779 | #ifndef ANSI_PROTOTYPES |
230d793d RS |
9780 | code = va_arg (p, enum rtx_code); |
9781 | mode = va_arg (p, enum machine_mode); | |
4f90e4a0 RK |
9782 | #endif |
9783 | ||
230d793d RS |
9784 | n_args = GET_RTX_LENGTH (code); |
9785 | fmt = GET_RTX_FORMAT (code); | |
9786 | ||
9787 | if (n_args == 0 || n_args > 3) | |
9788 | abort (); | |
9789 | ||
9790 | /* Get each arg and verify that it is supposed to be an expression. */ | |
9791 | for (j = 0; j < n_args; j++) | |
9792 | { | |
9793 | if (*fmt++ != 'e') | |
9794 | abort (); | |
9795 | ||
9796 | args[j] = va_arg (p, rtx); | |
9797 | } | |
9798 | ||
f0305a2b KG |
9799 | va_end (p); |
9800 | ||
230d793d RS |
9801 | /* See if this is in undobuf. Be sure we don't use objects that came |
9802 | from another insn; this could produce circular rtl structures. */ | |
9803 | ||
241cea85 RK |
9804 | for (undo = undobuf.undos; undo != undobuf.previous_undos; undo = undo->next) |
9805 | if (!undo->is_int | |
9806 | && GET_CODE (undo->old_contents.r) == code | |
9807 | && GET_MODE (undo->old_contents.r) == mode) | |
230d793d RS |
9808 | { |
9809 | for (j = 0; j < n_args; j++) | |
241cea85 | 9810 | if (XEXP (undo->old_contents.r, j) != args[j]) |
230d793d RS |
9811 | break; |
9812 | ||
9813 | if (j == n_args) | |
241cea85 | 9814 | return undo->old_contents.r; |
230d793d RS |
9815 | } |
9816 | ||
9817 | /* Otherwise make a new rtx. We know we have 1, 2, or 3 args. | |
9818 | Use rtx_alloc instead of gen_rtx because it's faster on RISC. */ | |
9819 | rt = rtx_alloc (code); | |
9820 | PUT_MODE (rt, mode); | |
9821 | XEXP (rt, 0) = args[0]; | |
9822 | if (n_args > 1) | |
9823 | { | |
9824 | XEXP (rt, 1) = args[1]; | |
9825 | if (n_args > 2) | |
9826 | XEXP (rt, 2) = args[2]; | |
9827 | } | |
9828 | return rt; | |
9829 | } | |
9830 | ||
9831 | /* These routines make binary and unary operations by first seeing if they | |
9832 | fold; if not, a new expression is allocated. */ | |
9833 | ||
9834 | static rtx | |
9835 | gen_binary (code, mode, op0, op1) | |
9836 | enum rtx_code code; | |
9837 | enum machine_mode mode; | |
9838 | rtx op0, op1; | |
9839 | { | |
9840 | rtx result; | |
1a26b032 RK |
9841 | rtx tem; |
9842 | ||
9843 | if (GET_RTX_CLASS (code) == 'c' | |
9844 | && (GET_CODE (op0) == CONST_INT | |
9845 | || (CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT))) | |
9846 | tem = op0, op0 = op1, op1 = tem; | |
230d793d | 9847 | |
663522cb | 9848 | if (GET_RTX_CLASS (code) == '<') |
230d793d RS |
9849 | { |
9850 | enum machine_mode op_mode = GET_MODE (op0); | |
9210df58 | 9851 | |
663522cb | 9852 | /* Strip the COMPARE from (REL_OP (compare X Y) 0) to get |
0f41302f | 9853 | just (REL_OP X Y). */ |
9210df58 RK |
9854 | if (GET_CODE (op0) == COMPARE && op1 == const0_rtx) |
9855 | { | |
9856 | op1 = XEXP (op0, 1); | |
9857 | op0 = XEXP (op0, 0); | |
9858 | op_mode = GET_MODE (op0); | |
9859 | } | |
9860 | ||
230d793d RS |
9861 | if (op_mode == VOIDmode) |
9862 | op_mode = GET_MODE (op1); | |
9863 | result = simplify_relational_operation (code, op_mode, op0, op1); | |
9864 | } | |
9865 | else | |
9866 | result = simplify_binary_operation (code, mode, op0, op1); | |
9867 | ||
9868 | if (result) | |
9869 | return result; | |
9870 | ||
9871 | /* Put complex operands first and constants second. */ | |
9872 | if (GET_RTX_CLASS (code) == 'c' | |
9873 | && ((CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT) | |
9874 | || (GET_RTX_CLASS (GET_CODE (op0)) == 'o' | |
9875 | && GET_RTX_CLASS (GET_CODE (op1)) != 'o') | |
9876 | || (GET_CODE (op0) == SUBREG | |
9877 | && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op0))) == 'o' | |
9878 | && GET_RTX_CLASS (GET_CODE (op1)) != 'o'))) | |
9879 | return gen_rtx_combine (code, mode, op1, op0); | |
9880 | ||
e5e809f4 JL |
9881 | /* If we are turning off bits already known off in OP0, we need not do |
9882 | an AND. */ | |
9883 | else if (code == AND && GET_CODE (op1) == CONST_INT | |
9884 | && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT | |
663522cb | 9885 | && (nonzero_bits (op0, mode) & ~INTVAL (op1)) == 0) |
e5e809f4 JL |
9886 | return op0; |
9887 | ||
230d793d RS |
9888 | return gen_rtx_combine (code, mode, op0, op1); |
9889 | } | |
9890 | ||
9891 | static rtx | |
0c1c8ea6 | 9892 | gen_unary (code, mode, op0_mode, op0) |
230d793d | 9893 | enum rtx_code code; |
0c1c8ea6 | 9894 | enum machine_mode mode, op0_mode; |
230d793d RS |
9895 | rtx op0; |
9896 | { | |
0c1c8ea6 | 9897 | rtx result = simplify_unary_operation (code, mode, op0, op0_mode); |
230d793d RS |
9898 | |
9899 | if (result) | |
9900 | return result; | |
9901 | ||
9902 | return gen_rtx_combine (code, mode, op0); | |
9903 | } | |
9904 | \f | |
9905 | /* Simplify a comparison between *POP0 and *POP1 where CODE is the | |
9906 | comparison code that will be tested. | |
9907 | ||
9908 | The result is a possibly different comparison code to use. *POP0 and | |
9909 | *POP1 may be updated. | |
9910 | ||
9911 | It is possible that we might detect that a comparison is either always | |
9912 | true or always false. However, we do not perform general constant | |
5089e22e | 9913 | folding in combine, so this knowledge isn't useful. Such tautologies |
230d793d RS |
9914 | should have been detected earlier. Hence we ignore all such cases. */ |
9915 | ||
9916 | static enum rtx_code | |
9917 | simplify_comparison (code, pop0, pop1) | |
9918 | enum rtx_code code; | |
9919 | rtx *pop0; | |
9920 | rtx *pop1; | |
9921 | { | |
9922 | rtx op0 = *pop0; | |
9923 | rtx op1 = *pop1; | |
9924 | rtx tem, tem1; | |
9925 | int i; | |
9926 | enum machine_mode mode, tmode; | |
9927 | ||
9928 | /* Try a few ways of applying the same transformation to both operands. */ | |
9929 | while (1) | |
9930 | { | |
3a19aabc RK |
9931 | #ifndef WORD_REGISTER_OPERATIONS |
9932 | /* The test below this one won't handle SIGN_EXTENDs on these machines, | |
9933 | so check specially. */ | |
9934 | if (code != GTU && code != GEU && code != LTU && code != LEU | |
9935 | && GET_CODE (op0) == ASHIFTRT && GET_CODE (op1) == ASHIFTRT | |
9936 | && GET_CODE (XEXP (op0, 0)) == ASHIFT | |
9937 | && GET_CODE (XEXP (op1, 0)) == ASHIFT | |
9938 | && GET_CODE (XEXP (XEXP (op0, 0), 0)) == SUBREG | |
9939 | && GET_CODE (XEXP (XEXP (op1, 0), 0)) == SUBREG | |
9940 | && (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0))) | |
ad25ba17 | 9941 | == GET_MODE (SUBREG_REG (XEXP (XEXP (op1, 0), 0)))) |
3a19aabc RK |
9942 | && GET_CODE (XEXP (op0, 1)) == CONST_INT |
9943 | && GET_CODE (XEXP (op1, 1)) == CONST_INT | |
9944 | && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT | |
9945 | && GET_CODE (XEXP (XEXP (op1, 0), 1)) == CONST_INT | |
9946 | && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (op1, 1)) | |
9947 | && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (XEXP (op0, 0), 1)) | |
9948 | && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (XEXP (op1, 0), 1)) | |
9949 | && (INTVAL (XEXP (op0, 1)) | |
9950 | == (GET_MODE_BITSIZE (GET_MODE (op0)) | |
9951 | - (GET_MODE_BITSIZE | |
9952 | (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0)))))))) | |
9953 | { | |
9954 | op0 = SUBREG_REG (XEXP (XEXP (op0, 0), 0)); | |
9955 | op1 = SUBREG_REG (XEXP (XEXP (op1, 0), 0)); | |
9956 | } | |
9957 | #endif | |
9958 | ||
230d793d RS |
9959 | /* If both operands are the same constant shift, see if we can ignore the |
9960 | shift. We can if the shift is a rotate or if the bits shifted out of | |
951553af | 9961 | this shift are known to be zero for both inputs and if the type of |
230d793d | 9962 | comparison is compatible with the shift. */ |
67232b23 RK |
9963 | if (GET_CODE (op0) == GET_CODE (op1) |
9964 | && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT | |
9965 | && ((GET_CODE (op0) == ROTATE && (code == NE || code == EQ)) | |
45620ed4 | 9966 | || ((GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFT) |
67232b23 RK |
9967 | && (code != GT && code != LT && code != GE && code != LE)) |
9968 | || (GET_CODE (op0) == ASHIFTRT | |
9969 | && (code != GTU && code != LTU | |
9970 | && code != GEU && code != GEU))) | |
9971 | && GET_CODE (XEXP (op0, 1)) == CONST_INT | |
9972 | && INTVAL (XEXP (op0, 1)) >= 0 | |
9973 | && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT | |
9974 | && XEXP (op0, 1) == XEXP (op1, 1)) | |
230d793d RS |
9975 | { |
9976 | enum machine_mode mode = GET_MODE (op0); | |
5f4f0e22 | 9977 | unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode); |
230d793d RS |
9978 | int shift_count = INTVAL (XEXP (op0, 1)); |
9979 | ||
9980 | if (GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFTRT) | |
9981 | mask &= (mask >> shift_count) << shift_count; | |
45620ed4 | 9982 | else if (GET_CODE (op0) == ASHIFT) |
230d793d RS |
9983 | mask = (mask & (mask << shift_count)) >> shift_count; |
9984 | ||
663522cb KH |
9985 | if ((nonzero_bits (XEXP (op0, 0), mode) & ~mask) == 0 |
9986 | && (nonzero_bits (XEXP (op1, 0), mode) & ~mask) == 0) | |
230d793d RS |
9987 | op0 = XEXP (op0, 0), op1 = XEXP (op1, 0); |
9988 | else | |
9989 | break; | |
9990 | } | |
9991 | ||
9992 | /* If both operands are AND's of a paradoxical SUBREG by constant, the | |
9993 | SUBREGs are of the same mode, and, in both cases, the AND would | |
9994 | be redundant if the comparison was done in the narrower mode, | |
9995 | do the comparison in the narrower mode (e.g., we are AND'ing with 1 | |
951553af RK |
9996 | and the operand's possibly nonzero bits are 0xffffff01; in that case |
9997 | if we only care about QImode, we don't need the AND). This case | |
9998 | occurs if the output mode of an scc insn is not SImode and | |
7e4dc511 RK |
9999 | STORE_FLAG_VALUE == 1 (e.g., the 386). |
10000 | ||
10001 | Similarly, check for a case where the AND's are ZERO_EXTEND | |
10002 | operations from some narrower mode even though a SUBREG is not | |
10003 | present. */ | |
230d793d | 10004 | |
663522cb KH |
10005 | else if (GET_CODE (op0) == AND && GET_CODE (op1) == AND |
10006 | && GET_CODE (XEXP (op0, 1)) == CONST_INT | |
10007 | && GET_CODE (XEXP (op1, 1)) == CONST_INT) | |
230d793d | 10008 | { |
7e4dc511 RK |
10009 | rtx inner_op0 = XEXP (op0, 0); |
10010 | rtx inner_op1 = XEXP (op1, 0); | |
10011 | HOST_WIDE_INT c0 = INTVAL (XEXP (op0, 1)); | |
10012 | HOST_WIDE_INT c1 = INTVAL (XEXP (op1, 1)); | |
10013 | int changed = 0; | |
663522cb | 10014 | |
7e4dc511 RK |
10015 | if (GET_CODE (inner_op0) == SUBREG && GET_CODE (inner_op1) == SUBREG |
10016 | && (GET_MODE_SIZE (GET_MODE (inner_op0)) | |
10017 | > GET_MODE_SIZE (GET_MODE (SUBREG_REG (inner_op0)))) | |
10018 | && (GET_MODE (SUBREG_REG (inner_op0)) | |
10019 | == GET_MODE (SUBREG_REG (inner_op1))) | |
729a2bc6 | 10020 | && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (inner_op0))) |
7e4dc511 | 10021 | <= HOST_BITS_PER_WIDE_INT) |
01c82bbb | 10022 | && (0 == ((~c0) & nonzero_bits (SUBREG_REG (inner_op0), |
729a2bc6 | 10023 | GET_MODE (SUBREG_REG (inner_op0))))) |
01c82bbb RK |
10024 | && (0 == ((~c1) & nonzero_bits (SUBREG_REG (inner_op1), |
10025 | GET_MODE (SUBREG_REG (inner_op1)))))) | |
7e4dc511 RK |
10026 | { |
10027 | op0 = SUBREG_REG (inner_op0); | |
10028 | op1 = SUBREG_REG (inner_op1); | |
10029 | ||
10030 | /* The resulting comparison is always unsigned since we masked | |
0f41302f | 10031 | off the original sign bit. */ |
7e4dc511 RK |
10032 | code = unsigned_condition (code); |
10033 | ||
10034 | changed = 1; | |
10035 | } | |
230d793d | 10036 | |
7e4dc511 RK |
10037 | else if (c0 == c1) |
10038 | for (tmode = GET_CLASS_NARROWEST_MODE | |
10039 | (GET_MODE_CLASS (GET_MODE (op0))); | |
10040 | tmode != GET_MODE (op0); tmode = GET_MODE_WIDER_MODE (tmode)) | |
e51712db | 10041 | if ((unsigned HOST_WIDE_INT) c0 == GET_MODE_MASK (tmode)) |
7e4dc511 RK |
10042 | { |
10043 | op0 = gen_lowpart_for_combine (tmode, inner_op0); | |
10044 | op1 = gen_lowpart_for_combine (tmode, inner_op1); | |
66415c8b | 10045 | code = unsigned_condition (code); |
7e4dc511 RK |
10046 | changed = 1; |
10047 | break; | |
10048 | } | |
10049 | ||
10050 | if (! changed) | |
10051 | break; | |
230d793d | 10052 | } |
3a19aabc | 10053 | |
ad25ba17 RK |
10054 | /* If both operands are NOT, we can strip off the outer operation |
10055 | and adjust the comparison code for swapped operands; similarly for | |
10056 | NEG, except that this must be an equality comparison. */ | |
10057 | else if ((GET_CODE (op0) == NOT && GET_CODE (op1) == NOT) | |
10058 | || (GET_CODE (op0) == NEG && GET_CODE (op1) == NEG | |
10059 | && (code == EQ || code == NE))) | |
10060 | op0 = XEXP (op0, 0), op1 = XEXP (op1, 0), code = swap_condition (code); | |
3a19aabc | 10061 | |
230d793d RS |
10062 | else |
10063 | break; | |
10064 | } | |
663522cb | 10065 | |
230d793d | 10066 | /* If the first operand is a constant, swap the operands and adjust the |
3aceff0d RK |
10067 | comparison code appropriately, but don't do this if the second operand |
10068 | is already a constant integer. */ | |
10069 | if (CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT) | |
230d793d RS |
10070 | { |
10071 | tem = op0, op0 = op1, op1 = tem; | |
10072 | code = swap_condition (code); | |
10073 | } | |
10074 | ||
10075 | /* We now enter a loop during which we will try to simplify the comparison. | |
10076 | For the most part, we only are concerned with comparisons with zero, | |
10077 | but some things may really be comparisons with zero but not start | |
10078 | out looking that way. */ | |
10079 | ||
10080 | while (GET_CODE (op1) == CONST_INT) | |
10081 | { | |
10082 | enum machine_mode mode = GET_MODE (op0); | |
770ae6cc | 10083 | unsigned int mode_width = GET_MODE_BITSIZE (mode); |
5f4f0e22 | 10084 | unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode); |
230d793d RS |
10085 | int equality_comparison_p; |
10086 | int sign_bit_comparison_p; | |
10087 | int unsigned_comparison_p; | |
5f4f0e22 | 10088 | HOST_WIDE_INT const_op; |
230d793d RS |
10089 | |
10090 | /* We only want to handle integral modes. This catches VOIDmode, | |
10091 | CCmode, and the floating-point modes. An exception is that we | |
10092 | can handle VOIDmode if OP0 is a COMPARE or a comparison | |
10093 | operation. */ | |
10094 | ||
10095 | if (GET_MODE_CLASS (mode) != MODE_INT | |
10096 | && ! (mode == VOIDmode | |
10097 | && (GET_CODE (op0) == COMPARE | |
10098 | || GET_RTX_CLASS (GET_CODE (op0)) == '<'))) | |
10099 | break; | |
10100 | ||
10101 | /* Get the constant we are comparing against and turn off all bits | |
10102 | not on in our mode. */ | |
3c094e22 | 10103 | const_op = trunc_int_for_mode (INTVAL (op1), mode); |
230d793d RS |
10104 | |
10105 | /* If we are comparing against a constant power of two and the value | |
951553af | 10106 | being compared can only have that single bit nonzero (e.g., it was |
230d793d RS |
10107 | `and'ed with that bit), we can replace this with a comparison |
10108 | with zero. */ | |
10109 | if (const_op | |
10110 | && (code == EQ || code == NE || code == GE || code == GEU | |
10111 | || code == LT || code == LTU) | |
5f4f0e22 | 10112 | && mode_width <= HOST_BITS_PER_WIDE_INT |
230d793d | 10113 | && exact_log2 (const_op) >= 0 |
e51712db | 10114 | && nonzero_bits (op0, mode) == (unsigned HOST_WIDE_INT) const_op) |
230d793d RS |
10115 | { |
10116 | code = (code == EQ || code == GE || code == GEU ? NE : EQ); | |
10117 | op1 = const0_rtx, const_op = 0; | |
10118 | } | |
10119 | ||
d0ab8cd3 RK |
10120 | /* Similarly, if we are comparing a value known to be either -1 or |
10121 | 0 with -1, change it to the opposite comparison against zero. */ | |
10122 | ||
10123 | if (const_op == -1 | |
10124 | && (code == EQ || code == NE || code == GT || code == LE | |
10125 | || code == GEU || code == LTU) | |
10126 | && num_sign_bit_copies (op0, mode) == mode_width) | |
10127 | { | |
10128 | code = (code == EQ || code == LE || code == GEU ? NE : EQ); | |
10129 | op1 = const0_rtx, const_op = 0; | |
10130 | } | |
10131 | ||
230d793d | 10132 | /* Do some canonicalizations based on the comparison code. We prefer |
663522cb | 10133 | comparisons against zero and then prefer equality comparisons. |
4803a34a | 10134 | If we can reduce the size of a constant, we will do that too. */ |
230d793d RS |
10135 | |
10136 | switch (code) | |
10137 | { | |
10138 | case LT: | |
4803a34a RK |
10139 | /* < C is equivalent to <= (C - 1) */ |
10140 | if (const_op > 0) | |
230d793d | 10141 | { |
4803a34a | 10142 | const_op -= 1; |
5f4f0e22 | 10143 | op1 = GEN_INT (const_op); |
230d793d RS |
10144 | code = LE; |
10145 | /* ... fall through to LE case below. */ | |
10146 | } | |
10147 | else | |
10148 | break; | |
10149 | ||
10150 | case LE: | |
4803a34a RK |
10151 | /* <= C is equivalent to < (C + 1); we do this for C < 0 */ |
10152 | if (const_op < 0) | |
10153 | { | |
10154 | const_op += 1; | |
5f4f0e22 | 10155 | op1 = GEN_INT (const_op); |
4803a34a RK |
10156 | code = LT; |
10157 | } | |
230d793d RS |
10158 | |
10159 | /* If we are doing a <= 0 comparison on a value known to have | |
10160 | a zero sign bit, we can replace this with == 0. */ | |
10161 | else if (const_op == 0 | |
5f4f0e22 | 10162 | && mode_width <= HOST_BITS_PER_WIDE_INT |
951553af | 10163 | && (nonzero_bits (op0, mode) |
5f4f0e22 | 10164 | & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0) |
230d793d RS |
10165 | code = EQ; |
10166 | break; | |
10167 | ||
10168 | case GE: | |
0f41302f | 10169 | /* >= C is equivalent to > (C - 1). */ |
4803a34a | 10170 | if (const_op > 0) |
230d793d | 10171 | { |
4803a34a | 10172 | const_op -= 1; |
5f4f0e22 | 10173 | op1 = GEN_INT (const_op); |
230d793d RS |
10174 | code = GT; |
10175 | /* ... fall through to GT below. */ | |
10176 | } | |
10177 | else | |
10178 | break; | |
10179 | ||
10180 | case GT: | |
663522cb | 10181 | /* > C is equivalent to >= (C + 1); we do this for C < 0. */ |
4803a34a RK |
10182 | if (const_op < 0) |
10183 | { | |
10184 | const_op += 1; | |
5f4f0e22 | 10185 | op1 = GEN_INT (const_op); |
4803a34a RK |
10186 | code = GE; |
10187 | } | |
230d793d RS |
10188 | |
10189 | /* If we are doing a > 0 comparison on a value known to have | |
10190 | a zero sign bit, we can replace this with != 0. */ | |
10191 | else if (const_op == 0 | |
5f4f0e22 | 10192 | && mode_width <= HOST_BITS_PER_WIDE_INT |
951553af | 10193 | && (nonzero_bits (op0, mode) |
5f4f0e22 | 10194 | & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0) |
230d793d RS |
10195 | code = NE; |
10196 | break; | |
10197 | ||
230d793d | 10198 | case LTU: |
4803a34a RK |
10199 | /* < C is equivalent to <= (C - 1). */ |
10200 | if (const_op > 0) | |
10201 | { | |
10202 | const_op -= 1; | |
5f4f0e22 | 10203 | op1 = GEN_INT (const_op); |
4803a34a | 10204 | code = LEU; |
0f41302f | 10205 | /* ... fall through ... */ |
4803a34a | 10206 | } |
d0ab8cd3 RK |
10207 | |
10208 | /* (unsigned) < 0x80000000 is equivalent to >= 0. */ | |
f77aada2 JW |
10209 | else if ((mode_width <= HOST_BITS_PER_WIDE_INT) |
10210 | && (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1))) | |
d0ab8cd3 RK |
10211 | { |
10212 | const_op = 0, op1 = const0_rtx; | |
10213 | code = GE; | |
10214 | break; | |
10215 | } | |
4803a34a RK |
10216 | else |
10217 | break; | |
230d793d RS |
10218 | |
10219 | case LEU: | |
10220 | /* unsigned <= 0 is equivalent to == 0 */ | |
10221 | if (const_op == 0) | |
10222 | code = EQ; | |
d0ab8cd3 | 10223 | |
0f41302f | 10224 | /* (unsigned) <= 0x7fffffff is equivalent to >= 0. */ |
f77aada2 JW |
10225 | else if ((mode_width <= HOST_BITS_PER_WIDE_INT) |
10226 | && (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1)) | |
d0ab8cd3 RK |
10227 | { |
10228 | const_op = 0, op1 = const0_rtx; | |
10229 | code = GE; | |
10230 | } | |
230d793d RS |
10231 | break; |
10232 | ||
4803a34a RK |
10233 | case GEU: |
10234 | /* >= C is equivalent to < (C - 1). */ | |
10235 | if (const_op > 1) | |
10236 | { | |
10237 | const_op -= 1; | |
5f4f0e22 | 10238 | op1 = GEN_INT (const_op); |
4803a34a | 10239 | code = GTU; |
0f41302f | 10240 | /* ... fall through ... */ |
4803a34a | 10241 | } |
d0ab8cd3 RK |
10242 | |
10243 | /* (unsigned) >= 0x80000000 is equivalent to < 0. */ | |
f77aada2 JW |
10244 | else if ((mode_width <= HOST_BITS_PER_WIDE_INT) |
10245 | && (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1))) | |
d0ab8cd3 RK |
10246 | { |
10247 | const_op = 0, op1 = const0_rtx; | |
10248 | code = LT; | |
8b2e69e1 | 10249 | break; |
d0ab8cd3 | 10250 | } |
4803a34a RK |
10251 | else |
10252 | break; | |
10253 | ||
230d793d RS |
10254 | case GTU: |
10255 | /* unsigned > 0 is equivalent to != 0 */ | |
10256 | if (const_op == 0) | |
10257 | code = NE; | |
d0ab8cd3 RK |
10258 | |
10259 | /* (unsigned) > 0x7fffffff is equivalent to < 0. */ | |
f77aada2 JW |
10260 | else if ((mode_width <= HOST_BITS_PER_WIDE_INT) |
10261 | && (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1)) | |
d0ab8cd3 RK |
10262 | { |
10263 | const_op = 0, op1 = const0_rtx; | |
10264 | code = LT; | |
10265 | } | |
230d793d | 10266 | break; |
e9a25f70 JL |
10267 | |
10268 | default: | |
10269 | break; | |
230d793d RS |
10270 | } |
10271 | ||
10272 | /* Compute some predicates to simplify code below. */ | |
10273 | ||
10274 | equality_comparison_p = (code == EQ || code == NE); | |
10275 | sign_bit_comparison_p = ((code == LT || code == GE) && const_op == 0); | |
10276 | unsigned_comparison_p = (code == LTU || code == LEU || code == GTU | |
d5010e66 | 10277 | || code == GEU); |
230d793d | 10278 | |
6139ff20 RK |
10279 | /* If this is a sign bit comparison and we can do arithmetic in |
10280 | MODE, say that we will only be needing the sign bit of OP0. */ | |
10281 | if (sign_bit_comparison_p | |
10282 | && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT) | |
10283 | op0 = force_to_mode (op0, mode, | |
10284 | ((HOST_WIDE_INT) 1 | |
10285 | << (GET_MODE_BITSIZE (mode) - 1)), | |
e3d616e3 | 10286 | NULL_RTX, 0); |
6139ff20 | 10287 | |
230d793d RS |
10288 | /* Now try cases based on the opcode of OP0. If none of the cases |
10289 | does a "continue", we exit this loop immediately after the | |
10290 | switch. */ | |
10291 | ||
10292 | switch (GET_CODE (op0)) | |
10293 | { | |
10294 | case ZERO_EXTRACT: | |
10295 | /* If we are extracting a single bit from a variable position in | |
10296 | a constant that has only a single bit set and are comparing it | |
663522cb | 10297 | with zero, we can convert this into an equality comparison |
d7cd794f | 10298 | between the position and the location of the single bit. */ |
230d793d | 10299 | |
230d793d RS |
10300 | if (GET_CODE (XEXP (op0, 0)) == CONST_INT |
10301 | && XEXP (op0, 1) == const1_rtx | |
10302 | && equality_comparison_p && const_op == 0 | |
d7cd794f | 10303 | && (i = exact_log2 (INTVAL (XEXP (op0, 0)))) >= 0) |
230d793d | 10304 | { |
f76b9db2 | 10305 | if (BITS_BIG_ENDIAN) |
0d8e55d8 | 10306 | { |
d7cd794f | 10307 | #ifdef HAVE_extzv |
a995e389 | 10308 | mode = insn_data[(int) CODE_FOR_extzv].operand[1].mode; |
0d8e55d8 JL |
10309 | if (mode == VOIDmode) |
10310 | mode = word_mode; | |
10311 | i = (GET_MODE_BITSIZE (mode) - 1 - i); | |
d7cd794f | 10312 | #else |
663522cb | 10313 | i = BITS_PER_WORD - 1 - i; |
230d793d | 10314 | #endif |
0d8e55d8 | 10315 | } |
230d793d RS |
10316 | |
10317 | op0 = XEXP (op0, 2); | |
5f4f0e22 | 10318 | op1 = GEN_INT (i); |
230d793d RS |
10319 | const_op = i; |
10320 | ||
10321 | /* Result is nonzero iff shift count is equal to I. */ | |
10322 | code = reverse_condition (code); | |
10323 | continue; | |
10324 | } | |
230d793d | 10325 | |
0f41302f | 10326 | /* ... fall through ... */ |
230d793d RS |
10327 | |
10328 | case SIGN_EXTRACT: | |
10329 | tem = expand_compound_operation (op0); | |
10330 | if (tem != op0) | |
10331 | { | |
10332 | op0 = tem; | |
10333 | continue; | |
10334 | } | |
10335 | break; | |
10336 | ||
10337 | case NOT: | |
10338 | /* If testing for equality, we can take the NOT of the constant. */ | |
10339 | if (equality_comparison_p | |
10340 | && (tem = simplify_unary_operation (NOT, mode, op1, mode)) != 0) | |
10341 | { | |
10342 | op0 = XEXP (op0, 0); | |
10343 | op1 = tem; | |
10344 | continue; | |
10345 | } | |
10346 | ||
10347 | /* If just looking at the sign bit, reverse the sense of the | |
10348 | comparison. */ | |
10349 | if (sign_bit_comparison_p) | |
10350 | { | |
10351 | op0 = XEXP (op0, 0); | |
10352 | code = (code == GE ? LT : GE); | |
10353 | continue; | |
10354 | } | |
10355 | break; | |
10356 | ||
10357 | case NEG: | |
10358 | /* If testing for equality, we can take the NEG of the constant. */ | |
10359 | if (equality_comparison_p | |
10360 | && (tem = simplify_unary_operation (NEG, mode, op1, mode)) != 0) | |
10361 | { | |
10362 | op0 = XEXP (op0, 0); | |
10363 | op1 = tem; | |
10364 | continue; | |
10365 | } | |
10366 | ||
10367 | /* The remaining cases only apply to comparisons with zero. */ | |
10368 | if (const_op != 0) | |
10369 | break; | |
10370 | ||
10371 | /* When X is ABS or is known positive, | |
10372 | (neg X) is < 0 if and only if X != 0. */ | |
10373 | ||
10374 | if (sign_bit_comparison_p | |
10375 | && (GET_CODE (XEXP (op0, 0)) == ABS | |
5f4f0e22 | 10376 | || (mode_width <= HOST_BITS_PER_WIDE_INT |
951553af | 10377 | && (nonzero_bits (XEXP (op0, 0), mode) |
5f4f0e22 | 10378 | & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0))) |
230d793d RS |
10379 | { |
10380 | op0 = XEXP (op0, 0); | |
10381 | code = (code == LT ? NE : EQ); | |
10382 | continue; | |
10383 | } | |
10384 | ||
3bed8141 | 10385 | /* If we have NEG of something whose two high-order bits are the |
0f41302f | 10386 | same, we know that "(-a) < 0" is equivalent to "a > 0". */ |
3bed8141 | 10387 | if (num_sign_bit_copies (op0, mode) >= 2) |
230d793d RS |
10388 | { |
10389 | op0 = XEXP (op0, 0); | |
10390 | code = swap_condition (code); | |
10391 | continue; | |
10392 | } | |
10393 | break; | |
10394 | ||
10395 | case ROTATE: | |
10396 | /* If we are testing equality and our count is a constant, we | |
10397 | can perform the inverse operation on our RHS. */ | |
10398 | if (equality_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT | |
10399 | && (tem = simplify_binary_operation (ROTATERT, mode, | |
10400 | op1, XEXP (op0, 1))) != 0) | |
10401 | { | |
10402 | op0 = XEXP (op0, 0); | |
10403 | op1 = tem; | |
10404 | continue; | |
10405 | } | |
10406 | ||
10407 | /* If we are doing a < 0 or >= 0 comparison, it means we are testing | |
10408 | a particular bit. Convert it to an AND of a constant of that | |
10409 | bit. This will be converted into a ZERO_EXTRACT. */ | |
10410 | if (const_op == 0 && sign_bit_comparison_p | |
10411 | && GET_CODE (XEXP (op0, 1)) == CONST_INT | |
5f4f0e22 | 10412 | && mode_width <= HOST_BITS_PER_WIDE_INT) |
230d793d | 10413 | { |
5f4f0e22 CH |
10414 | op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0), |
10415 | ((HOST_WIDE_INT) 1 | |
10416 | << (mode_width - 1 | |
10417 | - INTVAL (XEXP (op0, 1))))); | |
230d793d RS |
10418 | code = (code == LT ? NE : EQ); |
10419 | continue; | |
10420 | } | |
10421 | ||
663522cb | 10422 | /* Fall through. */ |
230d793d RS |
10423 | |
10424 | case ABS: | |
10425 | /* ABS is ignorable inside an equality comparison with zero. */ | |
10426 | if (const_op == 0 && equality_comparison_p) | |
10427 | { | |
10428 | op0 = XEXP (op0, 0); | |
10429 | continue; | |
10430 | } | |
10431 | break; | |
230d793d RS |
10432 | |
10433 | case SIGN_EXTEND: | |
10434 | /* Can simplify (compare (zero/sign_extend FOO) CONST) | |
663522cb | 10435 | to (compare FOO CONST) if CONST fits in FOO's mode and we |
230d793d RS |
10436 | are either testing inequality or have an unsigned comparison |
10437 | with ZERO_EXTEND or a signed comparison with SIGN_EXTEND. */ | |
10438 | if (! unsigned_comparison_p | |
10439 | && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0))) | |
5f4f0e22 CH |
10440 | <= HOST_BITS_PER_WIDE_INT) |
10441 | && ((unsigned HOST_WIDE_INT) const_op | |
e51712db | 10442 | < (((unsigned HOST_WIDE_INT) 1 |
5f4f0e22 | 10443 | << (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0))) - 1))))) |
230d793d RS |
10444 | { |
10445 | op0 = XEXP (op0, 0); | |
10446 | continue; | |
10447 | } | |
10448 | break; | |
10449 | ||
10450 | case SUBREG: | |
a687e897 | 10451 | /* Check for the case where we are comparing A - C1 with C2, |
abc95ed3 | 10452 | both constants are smaller than 1/2 the maximum positive |
a687e897 RK |
10453 | value in MODE, and the comparison is equality or unsigned. |
10454 | In that case, if A is either zero-extended to MODE or has | |
10455 | sufficient sign bits so that the high-order bit in MODE | |
10456 | is a copy of the sign in the inner mode, we can prove that it is | |
10457 | safe to do the operation in the wider mode. This simplifies | |
10458 | many range checks. */ | |
10459 | ||
10460 | if (mode_width <= HOST_BITS_PER_WIDE_INT | |
10461 | && subreg_lowpart_p (op0) | |
10462 | && GET_CODE (SUBREG_REG (op0)) == PLUS | |
10463 | && GET_CODE (XEXP (SUBREG_REG (op0), 1)) == CONST_INT | |
10464 | && INTVAL (XEXP (SUBREG_REG (op0), 1)) < 0 | |
663522cb KH |
10465 | && (-INTVAL (XEXP (SUBREG_REG (op0), 1)) |
10466 | < (HOST_WIDE_INT) (GET_MODE_MASK (mode) / 2)) | |
adb7a1cb | 10467 | && (unsigned HOST_WIDE_INT) const_op < GET_MODE_MASK (mode) / 2 |
951553af RK |
10468 | && (0 == (nonzero_bits (XEXP (SUBREG_REG (op0), 0), |
10469 | GET_MODE (SUBREG_REG (op0))) | |
663522cb | 10470 | & ~GET_MODE_MASK (mode)) |
a687e897 RK |
10471 | || (num_sign_bit_copies (XEXP (SUBREG_REG (op0), 0), |
10472 | GET_MODE (SUBREG_REG (op0))) | |
10473 | > (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) | |
10474 | - GET_MODE_BITSIZE (mode))))) | |
10475 | { | |
10476 | op0 = SUBREG_REG (op0); | |
10477 | continue; | |
10478 | } | |
10479 | ||
fe0cf571 RK |
10480 | /* If the inner mode is narrower and we are extracting the low part, |
10481 | we can treat the SUBREG as if it were a ZERO_EXTEND. */ | |
10482 | if (subreg_lowpart_p (op0) | |
89f1c7f2 RS |
10483 | && GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) < mode_width) |
10484 | /* Fall through */ ; | |
10485 | else | |
230d793d RS |
10486 | break; |
10487 | ||
0f41302f | 10488 | /* ... fall through ... */ |
230d793d RS |
10489 | |
10490 | case ZERO_EXTEND: | |
10491 | if ((unsigned_comparison_p || equality_comparison_p) | |
10492 | && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0))) | |
5f4f0e22 CH |
10493 | <= HOST_BITS_PER_WIDE_INT) |
10494 | && ((unsigned HOST_WIDE_INT) const_op | |
230d793d RS |
10495 | < GET_MODE_MASK (GET_MODE (XEXP (op0, 0))))) |
10496 | { | |
10497 | op0 = XEXP (op0, 0); | |
10498 | continue; | |
10499 | } | |
10500 | break; | |
10501 | ||
10502 | case PLUS: | |
20fdd649 | 10503 | /* (eq (plus X A) B) -> (eq X (minus B A)). We can only do |
5089e22e | 10504 | this for equality comparisons due to pathological cases involving |
230d793d | 10505 | overflows. */ |
20fdd649 RK |
10506 | if (equality_comparison_p |
10507 | && 0 != (tem = simplify_binary_operation (MINUS, mode, | |
10508 | op1, XEXP (op0, 1)))) | |
230d793d RS |
10509 | { |
10510 | op0 = XEXP (op0, 0); | |
10511 | op1 = tem; | |
10512 | continue; | |
10513 | } | |
10514 | ||
10515 | /* (plus (abs X) (const_int -1)) is < 0 if and only if X == 0. */ | |
10516 | if (const_op == 0 && XEXP (op0, 1) == constm1_rtx | |
10517 | && GET_CODE (XEXP (op0, 0)) == ABS && sign_bit_comparison_p) | |
10518 | { | |
10519 | op0 = XEXP (XEXP (op0, 0), 0); | |
10520 | code = (code == LT ? EQ : NE); | |
10521 | continue; | |
10522 | } | |
10523 | break; | |
10524 | ||
10525 | case MINUS: | |
65945ec1 HPN |
10526 | /* We used to optimize signed comparisons against zero, but that |
10527 | was incorrect. Unsigned comparisons against zero (GTU, LEU) | |
10528 | arrive here as equality comparisons, or (GEU, LTU) are | |
10529 | optimized away. No need to special-case them. */ | |
0bd4b461 | 10530 | |
20fdd649 RK |
10531 | /* (eq (minus A B) C) -> (eq A (plus B C)) or |
10532 | (eq B (minus A C)), whichever simplifies. We can only do | |
10533 | this for equality comparisons due to pathological cases involving | |
10534 | overflows. */ | |
10535 | if (equality_comparison_p | |
10536 | && 0 != (tem = simplify_binary_operation (PLUS, mode, | |
10537 | XEXP (op0, 1), op1))) | |
10538 | { | |
10539 | op0 = XEXP (op0, 0); | |
10540 | op1 = tem; | |
10541 | continue; | |
10542 | } | |
10543 | ||
10544 | if (equality_comparison_p | |
10545 | && 0 != (tem = simplify_binary_operation (MINUS, mode, | |
10546 | XEXP (op0, 0), op1))) | |
10547 | { | |
10548 | op0 = XEXP (op0, 1); | |
10549 | op1 = tem; | |
10550 | continue; | |
10551 | } | |
10552 | ||
230d793d RS |
10553 | /* The sign bit of (minus (ashiftrt X C) X), where C is the number |
10554 | of bits in X minus 1, is one iff X > 0. */ | |
10555 | if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == ASHIFTRT | |
10556 | && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT | |
10557 | && INTVAL (XEXP (XEXP (op0, 0), 1)) == mode_width - 1 | |
10558 | && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1))) | |
10559 | { | |
10560 | op0 = XEXP (op0, 1); | |
10561 | code = (code == GE ? LE : GT); | |
10562 | continue; | |
10563 | } | |
10564 | break; | |
10565 | ||
10566 | case XOR: | |
10567 | /* (eq (xor A B) C) -> (eq A (xor B C)). This is a simplification | |
10568 | if C is zero or B is a constant. */ | |
10569 | if (equality_comparison_p | |
10570 | && 0 != (tem = simplify_binary_operation (XOR, mode, | |
10571 | XEXP (op0, 1), op1))) | |
10572 | { | |
10573 | op0 = XEXP (op0, 0); | |
10574 | op1 = tem; | |
10575 | continue; | |
10576 | } | |
10577 | break; | |
10578 | ||
10579 | case EQ: case NE: | |
10580 | case LT: case LTU: case LE: case LEU: | |
10581 | case GT: case GTU: case GE: case GEU: | |
10582 | /* We can't do anything if OP0 is a condition code value, rather | |
10583 | than an actual data value. */ | |
10584 | if (const_op != 0 | |
10585 | #ifdef HAVE_cc0 | |
10586 | || XEXP (op0, 0) == cc0_rtx | |
10587 | #endif | |
10588 | || GET_MODE_CLASS (GET_MODE (XEXP (op0, 0))) == MODE_CC) | |
10589 | break; | |
10590 | ||
10591 | /* Get the two operands being compared. */ | |
10592 | if (GET_CODE (XEXP (op0, 0)) == COMPARE) | |
10593 | tem = XEXP (XEXP (op0, 0), 0), tem1 = XEXP (XEXP (op0, 0), 1); | |
10594 | else | |
10595 | tem = XEXP (op0, 0), tem1 = XEXP (op0, 1); | |
10596 | ||
10597 | /* Check for the cases where we simply want the result of the | |
10598 | earlier test or the opposite of that result. */ | |
10599 | if (code == NE | |
10600 | || (code == EQ && reversible_comparison_p (op0)) | |
5f4f0e22 | 10601 | || (GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT |
3f508eca | 10602 | && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT |
230d793d | 10603 | && (STORE_FLAG_VALUE |
5f4f0e22 CH |
10604 | & (((HOST_WIDE_INT) 1 |
10605 | << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1)))) | |
230d793d RS |
10606 | && (code == LT |
10607 | || (code == GE && reversible_comparison_p (op0))))) | |
10608 | { | |
10609 | code = (code == LT || code == NE | |
10610 | ? GET_CODE (op0) : reverse_condition (GET_CODE (op0))); | |
10611 | op0 = tem, op1 = tem1; | |
10612 | continue; | |
10613 | } | |
10614 | break; | |
10615 | ||
10616 | case IOR: | |
10617 | /* The sign bit of (ior (plus X (const_int -1)) X) is non-zero | |
10618 | iff X <= 0. */ | |
10619 | if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == PLUS | |
10620 | && XEXP (XEXP (op0, 0), 1) == constm1_rtx | |
10621 | && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1))) | |
10622 | { | |
10623 | op0 = XEXP (op0, 1); | |
10624 | code = (code == GE ? GT : LE); | |
10625 | continue; | |
10626 | } | |
10627 | break; | |
10628 | ||
10629 | case AND: | |
10630 | /* Convert (and (xshift 1 X) Y) to (and (lshiftrt Y X) 1). This | |
10631 | will be converted to a ZERO_EXTRACT later. */ | |
10632 | if (const_op == 0 && equality_comparison_p | |
45620ed4 | 10633 | && GET_CODE (XEXP (op0, 0)) == ASHIFT |
230d793d RS |
10634 | && XEXP (XEXP (op0, 0), 0) == const1_rtx) |
10635 | { | |
10636 | op0 = simplify_and_const_int | |
10637 | (op0, mode, gen_rtx_combine (LSHIFTRT, mode, | |
10638 | XEXP (op0, 1), | |
10639 | XEXP (XEXP (op0, 0), 1)), | |
5f4f0e22 | 10640 | (HOST_WIDE_INT) 1); |
230d793d RS |
10641 | continue; |
10642 | } | |
10643 | ||
10644 | /* If we are comparing (and (lshiftrt X C1) C2) for equality with | |
10645 | zero and X is a comparison and C1 and C2 describe only bits set | |
10646 | in STORE_FLAG_VALUE, we can compare with X. */ | |
10647 | if (const_op == 0 && equality_comparison_p | |
5f4f0e22 | 10648 | && mode_width <= HOST_BITS_PER_WIDE_INT |
230d793d RS |
10649 | && GET_CODE (XEXP (op0, 1)) == CONST_INT |
10650 | && GET_CODE (XEXP (op0, 0)) == LSHIFTRT | |
10651 | && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT | |
10652 | && INTVAL (XEXP (XEXP (op0, 0), 1)) >= 0 | |
5f4f0e22 | 10653 | && INTVAL (XEXP (XEXP (op0, 0), 1)) < HOST_BITS_PER_WIDE_INT) |
230d793d RS |
10654 | { |
10655 | mask = ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode)) | |
10656 | << INTVAL (XEXP (XEXP (op0, 0), 1))); | |
663522cb | 10657 | if ((~STORE_FLAG_VALUE & mask) == 0 |
230d793d RS |
10658 | && (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (op0, 0), 0))) == '<' |
10659 | || ((tem = get_last_value (XEXP (XEXP (op0, 0), 0))) != 0 | |
10660 | && GET_RTX_CLASS (GET_CODE (tem)) == '<'))) | |
10661 | { | |
10662 | op0 = XEXP (XEXP (op0, 0), 0); | |
10663 | continue; | |
10664 | } | |
10665 | } | |
10666 | ||
10667 | /* If we are doing an equality comparison of an AND of a bit equal | |
10668 | to the sign bit, replace this with a LT or GE comparison of | |
10669 | the underlying value. */ | |
10670 | if (equality_comparison_p | |
10671 | && const_op == 0 | |
10672 | && GET_CODE (XEXP (op0, 1)) == CONST_INT | |
5f4f0e22 | 10673 | && mode_width <= HOST_BITS_PER_WIDE_INT |
230d793d | 10674 | && ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode)) |
e51712db | 10675 | == (unsigned HOST_WIDE_INT) 1 << (mode_width - 1))) |
230d793d RS |
10676 | { |
10677 | op0 = XEXP (op0, 0); | |
10678 | code = (code == EQ ? GE : LT); | |
10679 | continue; | |
10680 | } | |
10681 | ||
10682 | /* If this AND operation is really a ZERO_EXTEND from a narrower | |
10683 | mode, the constant fits within that mode, and this is either an | |
10684 | equality or unsigned comparison, try to do this comparison in | |
10685 | the narrower mode. */ | |
10686 | if ((equality_comparison_p || unsigned_comparison_p) | |
10687 | && GET_CODE (XEXP (op0, 1)) == CONST_INT | |
10688 | && (i = exact_log2 ((INTVAL (XEXP (op0, 1)) | |
10689 | & GET_MODE_MASK (mode)) | |
10690 | + 1)) >= 0 | |
10691 | && const_op >> i == 0 | |
10692 | && (tmode = mode_for_size (i, MODE_INT, 1)) != BLKmode) | |
10693 | { | |
10694 | op0 = gen_lowpart_for_combine (tmode, XEXP (op0, 0)); | |
10695 | continue; | |
10696 | } | |
e5e809f4 JL |
10697 | |
10698 | /* If this is (and:M1 (subreg:M2 X 0) (const_int C1)) where C1 fits | |
10699 | in both M1 and M2 and the SUBREG is either paradoxical or | |
10700 | represents the low part, permute the SUBREG and the AND and | |
10701 | try again. */ | |
10702 | if (GET_CODE (XEXP (op0, 0)) == SUBREG | |
c5c76735 | 10703 | && (0 |
9ec36da5 | 10704 | #ifdef WORD_REGISTER_OPERATIONS |
c5c76735 JL |
10705 | || ((mode_width |
10706 | > (GET_MODE_BITSIZE | |
10707 | (GET_MODE (SUBREG_REG (XEXP (op0, 0)))))) | |
10708 | && mode_width <= BITS_PER_WORD) | |
9ec36da5 | 10709 | #endif |
c5c76735 JL |
10710 | || ((mode_width |
10711 | <= (GET_MODE_BITSIZE | |
10712 | (GET_MODE (SUBREG_REG (XEXP (op0, 0)))))) | |
10713 | && subreg_lowpart_p (XEXP (op0, 0)))) | |
adc05e6c JL |
10714 | #ifndef WORD_REGISTER_OPERATIONS |
10715 | /* It is unsafe to commute the AND into the SUBREG if the SUBREG | |
10716 | is paradoxical and WORD_REGISTER_OPERATIONS is not defined. | |
10717 | As originally written the upper bits have a defined value | |
10718 | due to the AND operation. However, if we commute the AND | |
10719 | inside the SUBREG then they no longer have defined values | |
10720 | and the meaning of the code has been changed. */ | |
10721 | && (GET_MODE_SIZE (GET_MODE (XEXP (op0, 0))) | |
10722 | <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (op0, 0))))) | |
10723 | #endif | |
e5e809f4 JL |
10724 | && GET_CODE (XEXP (op0, 1)) == CONST_INT |
10725 | && mode_width <= HOST_BITS_PER_WIDE_INT | |
10726 | && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (XEXP (op0, 0)))) | |
10727 | <= HOST_BITS_PER_WIDE_INT) | |
663522cb KH |
10728 | && (INTVAL (XEXP (op0, 1)) & ~mask) == 0 |
10729 | && 0 == (~GET_MODE_MASK (GET_MODE (SUBREG_REG (XEXP (op0, 0)))) | |
9ec36da5 | 10730 | & INTVAL (XEXP (op0, 1))) |
e51712db KG |
10731 | && (unsigned HOST_WIDE_INT) INTVAL (XEXP (op0, 1)) != mask |
10732 | && ((unsigned HOST_WIDE_INT) INTVAL (XEXP (op0, 1)) | |
9ec36da5 | 10733 | != GET_MODE_MASK (GET_MODE (SUBREG_REG (XEXP (op0, 0)))))) |
663522cb | 10734 | |
e5e809f4 JL |
10735 | { |
10736 | op0 | |
10737 | = gen_lowpart_for_combine | |
10738 | (mode, | |
10739 | gen_binary (AND, GET_MODE (SUBREG_REG (XEXP (op0, 0))), | |
10740 | SUBREG_REG (XEXP (op0, 0)), XEXP (op0, 1))); | |
10741 | continue; | |
10742 | } | |
10743 | ||
230d793d RS |
10744 | break; |
10745 | ||
10746 | case ASHIFT: | |
45620ed4 | 10747 | /* If we have (compare (ashift FOO N) (const_int C)) and |
230d793d | 10748 | the high order N bits of FOO (N+1 if an inequality comparison) |
951553af | 10749 | are known to be zero, we can do this by comparing FOO with C |
230d793d RS |
10750 | shifted right N bits so long as the low-order N bits of C are |
10751 | zero. */ | |
10752 | if (GET_CODE (XEXP (op0, 1)) == CONST_INT | |
10753 | && INTVAL (XEXP (op0, 1)) >= 0 | |
10754 | && ((INTVAL (XEXP (op0, 1)) + ! equality_comparison_p) | |
5f4f0e22 CH |
10755 | < HOST_BITS_PER_WIDE_INT) |
10756 | && ((const_op | |
34785d05 | 10757 | & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0) |
5f4f0e22 | 10758 | && mode_width <= HOST_BITS_PER_WIDE_INT |
951553af | 10759 | && (nonzero_bits (XEXP (op0, 0), mode) |
663522cb KH |
10760 | & ~(mask >> (INTVAL (XEXP (op0, 1)) |
10761 | + ! equality_comparison_p))) == 0) | |
230d793d | 10762 | { |
7ce787fe NC |
10763 | /* We must perform a logical shift, not an arithmetic one, |
10764 | as we want the top N bits of C to be zero. */ | |
aaaec114 | 10765 | unsigned HOST_WIDE_INT temp = const_op & GET_MODE_MASK (mode); |
663522cb | 10766 | |
7ce787fe | 10767 | temp >>= INTVAL (XEXP (op0, 1)); |
aaaec114 | 10768 | op1 = GEN_INT (trunc_int_for_mode (temp, mode)); |
230d793d RS |
10769 | op0 = XEXP (op0, 0); |
10770 | continue; | |
10771 | } | |
10772 | ||
dfbe1b2f | 10773 | /* If we are doing a sign bit comparison, it means we are testing |
230d793d | 10774 | a particular bit. Convert it to the appropriate AND. */ |
dfbe1b2f | 10775 | if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT |
5f4f0e22 | 10776 | && mode_width <= HOST_BITS_PER_WIDE_INT) |
230d793d | 10777 | { |
5f4f0e22 CH |
10778 | op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0), |
10779 | ((HOST_WIDE_INT) 1 | |
10780 | << (mode_width - 1 | |
10781 | - INTVAL (XEXP (op0, 1))))); | |
230d793d RS |
10782 | code = (code == LT ? NE : EQ); |
10783 | continue; | |
10784 | } | |
dfbe1b2f RK |
10785 | |
10786 | /* If this an equality comparison with zero and we are shifting | |
10787 | the low bit to the sign bit, we can convert this to an AND of the | |
10788 | low-order bit. */ | |
10789 | if (const_op == 0 && equality_comparison_p | |
10790 | && GET_CODE (XEXP (op0, 1)) == CONST_INT | |
10791 | && INTVAL (XEXP (op0, 1)) == mode_width - 1) | |
10792 | { | |
5f4f0e22 CH |
10793 | op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0), |
10794 | (HOST_WIDE_INT) 1); | |
dfbe1b2f RK |
10795 | continue; |
10796 | } | |
230d793d RS |
10797 | break; |
10798 | ||
10799 | case ASHIFTRT: | |
d0ab8cd3 RK |
10800 | /* If this is an equality comparison with zero, we can do this |
10801 | as a logical shift, which might be much simpler. */ | |
10802 | if (equality_comparison_p && const_op == 0 | |
10803 | && GET_CODE (XEXP (op0, 1)) == CONST_INT) | |
10804 | { | |
10805 | op0 = simplify_shift_const (NULL_RTX, LSHIFTRT, mode, | |
10806 | XEXP (op0, 0), | |
10807 | INTVAL (XEXP (op0, 1))); | |
10808 | continue; | |
10809 | } | |
10810 | ||
230d793d RS |
10811 | /* If OP0 is a sign extension and CODE is not an unsigned comparison, |
10812 | do the comparison in a narrower mode. */ | |
10813 | if (! unsigned_comparison_p | |
10814 | && GET_CODE (XEXP (op0, 1)) == CONST_INT | |
10815 | && GET_CODE (XEXP (op0, 0)) == ASHIFT | |
10816 | && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1) | |
10817 | && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)), | |
22331794 | 10818 | MODE_INT, 1)) != BLKmode |
5f4f0e22 | 10819 | && ((unsigned HOST_WIDE_INT) const_op <= GET_MODE_MASK (tmode) |
663522cb | 10820 | || ((unsigned HOST_WIDE_INT) -const_op |
5f4f0e22 | 10821 | <= GET_MODE_MASK (tmode)))) |
230d793d RS |
10822 | { |
10823 | op0 = gen_lowpart_for_combine (tmode, XEXP (XEXP (op0, 0), 0)); | |
10824 | continue; | |
10825 | } | |
10826 | ||
14a774a9 RK |
10827 | /* Likewise if OP0 is a PLUS of a sign extension with a |
10828 | constant, which is usually represented with the PLUS | |
10829 | between the shifts. */ | |
10830 | if (! unsigned_comparison_p | |
10831 | && GET_CODE (XEXP (op0, 1)) == CONST_INT | |
10832 | && GET_CODE (XEXP (op0, 0)) == PLUS | |
10833 | && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT | |
10834 | && GET_CODE (XEXP (XEXP (op0, 0), 0)) == ASHIFT | |
10835 | && XEXP (op0, 1) == XEXP (XEXP (XEXP (op0, 0), 0), 1) | |
10836 | && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)), | |
10837 | MODE_INT, 1)) != BLKmode | |
10838 | && ((unsigned HOST_WIDE_INT) const_op <= GET_MODE_MASK (tmode) | |
663522cb | 10839 | || ((unsigned HOST_WIDE_INT) -const_op |
14a774a9 RK |
10840 | <= GET_MODE_MASK (tmode)))) |
10841 | { | |
10842 | rtx inner = XEXP (XEXP (XEXP (op0, 0), 0), 0); | |
10843 | rtx add_const = XEXP (XEXP (op0, 0), 1); | |
10844 | rtx new_const = gen_binary (ASHIFTRT, GET_MODE (op0), add_const, | |
10845 | XEXP (op0, 1)); | |
10846 | ||
10847 | op0 = gen_binary (PLUS, tmode, | |
10848 | gen_lowpart_for_combine (tmode, inner), | |
10849 | new_const); | |
10850 | continue; | |
10851 | } | |
10852 | ||
0f41302f | 10853 | /* ... fall through ... */ |
230d793d RS |
10854 | case LSHIFTRT: |
10855 | /* If we have (compare (xshiftrt FOO N) (const_int C)) and | |
951553af | 10856 | the low order N bits of FOO are known to be zero, we can do this |
230d793d RS |
10857 | by comparing FOO with C shifted left N bits so long as no |
10858 | overflow occurs. */ | |
10859 | if (GET_CODE (XEXP (op0, 1)) == CONST_INT | |
10860 | && INTVAL (XEXP (op0, 1)) >= 0 | |
5f4f0e22 CH |
10861 | && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT |
10862 | && mode_width <= HOST_BITS_PER_WIDE_INT | |
951553af | 10863 | && (nonzero_bits (XEXP (op0, 0), mode) |
5f4f0e22 | 10864 | & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0 |
230d793d RS |
10865 | && (const_op == 0 |
10866 | || (floor_log2 (const_op) + INTVAL (XEXP (op0, 1)) | |
10867 | < mode_width))) | |
10868 | { | |
10869 | const_op <<= INTVAL (XEXP (op0, 1)); | |
5f4f0e22 | 10870 | op1 = GEN_INT (const_op); |
230d793d RS |
10871 | op0 = XEXP (op0, 0); |
10872 | continue; | |
10873 | } | |
10874 | ||
10875 | /* If we are using this shift to extract just the sign bit, we | |
10876 | can replace this with an LT or GE comparison. */ | |
10877 | if (const_op == 0 | |
10878 | && (equality_comparison_p || sign_bit_comparison_p) | |
10879 | && GET_CODE (XEXP (op0, 1)) == CONST_INT | |
10880 | && INTVAL (XEXP (op0, 1)) == mode_width - 1) | |
10881 | { | |
10882 | op0 = XEXP (op0, 0); | |
10883 | code = (code == NE || code == GT ? LT : GE); | |
10884 | continue; | |
10885 | } | |
10886 | break; | |
663522cb | 10887 | |
e9a25f70 JL |
10888 | default: |
10889 | break; | |
230d793d RS |
10890 | } |
10891 | ||
10892 | break; | |
10893 | } | |
10894 | ||
10895 | /* Now make any compound operations involved in this comparison. Then, | |
76d31c63 | 10896 | check for an outmost SUBREG on OP0 that is not doing anything or is |
230d793d RS |
10897 | paradoxical. The latter case can only occur when it is known that the |
10898 | "extra" bits will be zero. Therefore, it is safe to remove the SUBREG. | |
10899 | We can never remove a SUBREG for a non-equality comparison because the | |
10900 | sign bit is in a different place in the underlying object. */ | |
10901 | ||
10902 | op0 = make_compound_operation (op0, op1 == const0_rtx ? COMPARE : SET); | |
10903 | op1 = make_compound_operation (op1, SET); | |
10904 | ||
10905 | if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0) | |
10906 | && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT | |
10907 | && (code == NE || code == EQ) | |
10908 | && ((GET_MODE_SIZE (GET_MODE (op0)) | |
10909 | > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))) | |
10910 | { | |
10911 | op0 = SUBREG_REG (op0); | |
10912 | op1 = gen_lowpart_for_combine (GET_MODE (op0), op1); | |
10913 | } | |
10914 | ||
10915 | else if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0) | |
10916 | && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT | |
10917 | && (code == NE || code == EQ) | |
ac49a949 RS |
10918 | && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) |
10919 | <= HOST_BITS_PER_WIDE_INT) | |
951553af | 10920 | && (nonzero_bits (SUBREG_REG (op0), GET_MODE (SUBREG_REG (op0))) |
663522cb | 10921 | & ~GET_MODE_MASK (GET_MODE (op0))) == 0 |
230d793d RS |
10922 | && (tem = gen_lowpart_for_combine (GET_MODE (SUBREG_REG (op0)), |
10923 | op1), | |
951553af | 10924 | (nonzero_bits (tem, GET_MODE (SUBREG_REG (op0))) |
663522cb | 10925 | & ~GET_MODE_MASK (GET_MODE (op0))) == 0)) |
230d793d RS |
10926 | op0 = SUBREG_REG (op0), op1 = tem; |
10927 | ||
10928 | /* We now do the opposite procedure: Some machines don't have compare | |
10929 | insns in all modes. If OP0's mode is an integer mode smaller than a | |
10930 | word and we can't do a compare in that mode, see if there is a larger | |
a687e897 RK |
10931 | mode for which we can do the compare. There are a number of cases in |
10932 | which we can use the wider mode. */ | |
230d793d RS |
10933 | |
10934 | mode = GET_MODE (op0); | |
10935 | if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT | |
10936 | && GET_MODE_SIZE (mode) < UNITS_PER_WORD | |
10937 | && cmp_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing) | |
10938 | for (tmode = GET_MODE_WIDER_MODE (mode); | |
5f4f0e22 CH |
10939 | (tmode != VOIDmode |
10940 | && GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT); | |
230d793d | 10941 | tmode = GET_MODE_WIDER_MODE (tmode)) |
a687e897 | 10942 | if (cmp_optab->handlers[(int) tmode].insn_code != CODE_FOR_nothing) |
230d793d | 10943 | { |
951553af | 10944 | /* If the only nonzero bits in OP0 and OP1 are those in the |
a687e897 RK |
10945 | narrower mode and this is an equality or unsigned comparison, |
10946 | we can use the wider mode. Similarly for sign-extended | |
7e4dc511 | 10947 | values, in which case it is true for all comparisons. */ |
a687e897 RK |
10948 | if (((code == EQ || code == NE |
10949 | || code == GEU || code == GTU || code == LEU || code == LTU) | |
663522cb KH |
10950 | && (nonzero_bits (op0, tmode) & ~GET_MODE_MASK (mode)) == 0 |
10951 | && (nonzero_bits (op1, tmode) & ~GET_MODE_MASK (mode)) == 0) | |
7e4dc511 RK |
10952 | || ((num_sign_bit_copies (op0, tmode) |
10953 | > GET_MODE_BITSIZE (tmode) - GET_MODE_BITSIZE (mode)) | |
a687e897 | 10954 | && (num_sign_bit_copies (op1, tmode) |
58744483 | 10955 | > GET_MODE_BITSIZE (tmode) - GET_MODE_BITSIZE (mode)))) |
a687e897 | 10956 | { |
14a774a9 RK |
10957 | /* If OP0 is an AND and we don't have an AND in MODE either, |
10958 | make a new AND in the proper mode. */ | |
10959 | if (GET_CODE (op0) == AND | |
10960 | && (add_optab->handlers[(int) mode].insn_code | |
10961 | == CODE_FOR_nothing)) | |
10962 | op0 = gen_binary (AND, tmode, | |
10963 | gen_lowpart_for_combine (tmode, | |
10964 | XEXP (op0, 0)), | |
10965 | gen_lowpart_for_combine (tmode, | |
10966 | XEXP (op0, 1))); | |
10967 | ||
a687e897 RK |
10968 | op0 = gen_lowpart_for_combine (tmode, op0); |
10969 | op1 = gen_lowpart_for_combine (tmode, op1); | |
10970 | break; | |
10971 | } | |
230d793d | 10972 | |
a687e897 RK |
10973 | /* If this is a test for negative, we can make an explicit |
10974 | test of the sign bit. */ | |
10975 | ||
10976 | if (op1 == const0_rtx && (code == LT || code == GE) | |
10977 | && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT) | |
230d793d | 10978 | { |
a687e897 RK |
10979 | op0 = gen_binary (AND, tmode, |
10980 | gen_lowpart_for_combine (tmode, op0), | |
5f4f0e22 CH |
10981 | GEN_INT ((HOST_WIDE_INT) 1 |
10982 | << (GET_MODE_BITSIZE (mode) - 1))); | |
230d793d | 10983 | code = (code == LT) ? NE : EQ; |
a687e897 | 10984 | break; |
230d793d | 10985 | } |
230d793d RS |
10986 | } |
10987 | ||
b7a775b2 RK |
10988 | #ifdef CANONICALIZE_COMPARISON |
10989 | /* If this machine only supports a subset of valid comparisons, see if we | |
10990 | can convert an unsupported one into a supported one. */ | |
10991 | CANONICALIZE_COMPARISON (code, op0, op1); | |
10992 | #endif | |
10993 | ||
230d793d RS |
10994 | *pop0 = op0; |
10995 | *pop1 = op1; | |
10996 | ||
10997 | return code; | |
10998 | } | |
10999 | \f | |
11000 | /* Return 1 if we know that X, a comparison operation, is not operating | |
11001 | on a floating-point value or is EQ or NE, meaning that we can safely | |
11002 | reverse it. */ | |
11003 | ||
11004 | static int | |
11005 | reversible_comparison_p (x) | |
11006 | rtx x; | |
11007 | { | |
11008 | if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT | |
7e2a0d8e | 11009 | || flag_fast_math |
1eb8759b RH |
11010 | || GET_CODE (x) == NE || GET_CODE (x) == EQ |
11011 | || GET_CODE (x) == UNORDERED || GET_CODE (x) == ORDERED) | |
230d793d RS |
11012 | return 1; |
11013 | ||
11014 | switch (GET_MODE_CLASS (GET_MODE (XEXP (x, 0)))) | |
11015 | { | |
11016 | case MODE_INT: | |
3ad2180a RK |
11017 | case MODE_PARTIAL_INT: |
11018 | case MODE_COMPLEX_INT: | |
230d793d RS |
11019 | return 1; |
11020 | ||
11021 | case MODE_CC: | |
9210df58 RK |
11022 | /* If the mode of the condition codes tells us that this is safe, |
11023 | we need look no further. */ | |
11024 | if (REVERSIBLE_CC_MODE (GET_MODE (XEXP (x, 0)))) | |
11025 | return 1; | |
11026 | ||
11027 | /* Otherwise try and find where the condition codes were last set and | |
11028 | use that. */ | |
230d793d RS |
11029 | x = get_last_value (XEXP (x, 0)); |
11030 | return (x && GET_CODE (x) == COMPARE | |
3ad2180a | 11031 | && ! FLOAT_MODE_P (GET_MODE (XEXP (x, 0)))); |
663522cb | 11032 | |
e9a25f70 JL |
11033 | default: |
11034 | return 0; | |
230d793d | 11035 | } |
230d793d RS |
11036 | } |
11037 | \f | |
11038 | /* Utility function for following routine. Called when X is part of a value | |
11039 | being stored into reg_last_set_value. Sets reg_last_set_table_tick | |
11040 | for each register mentioned. Similar to mention_regs in cse.c */ | |
11041 | ||
11042 | static void | |
11043 | update_table_tick (x) | |
11044 | rtx x; | |
11045 | { | |
11046 | register enum rtx_code code = GET_CODE (x); | |
6f7d635c | 11047 | register const char *fmt = GET_RTX_FORMAT (code); |
230d793d RS |
11048 | register int i; |
11049 | ||
11050 | if (code == REG) | |
11051 | { | |
770ae6cc RK |
11052 | unsigned int regno = REGNO (x); |
11053 | unsigned int endregno | |
11054 | = regno + (regno < FIRST_PSEUDO_REGISTER | |
11055 | ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1); | |
11056 | unsigned int r; | |
230d793d | 11057 | |
770ae6cc RK |
11058 | for (r = regno; r < endregno; r++) |
11059 | reg_last_set_table_tick[r] = label_tick; | |
230d793d RS |
11060 | |
11061 | return; | |
11062 | } | |
663522cb | 11063 | |
230d793d RS |
11064 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) |
11065 | /* Note that we can't have an "E" in values stored; see | |
11066 | get_last_value_validate. */ | |
11067 | if (fmt[i] == 'e') | |
11068 | update_table_tick (XEXP (x, i)); | |
11069 | } | |
11070 | ||
11071 | /* Record that REG is set to VALUE in insn INSN. If VALUE is zero, we | |
11072 | are saying that the register is clobbered and we no longer know its | |
7988fd36 RK |
11073 | value. If INSN is zero, don't update reg_last_set; this is only permitted |
11074 | with VALUE also zero and is used to invalidate the register. */ | |
230d793d RS |
11075 | |
11076 | static void | |
11077 | record_value_for_reg (reg, insn, value) | |
11078 | rtx reg; | |
11079 | rtx insn; | |
11080 | rtx value; | |
11081 | { | |
770ae6cc RK |
11082 | unsigned int regno = REGNO (reg); |
11083 | unsigned int endregno | |
11084 | = regno + (regno < FIRST_PSEUDO_REGISTER | |
11085 | ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1); | |
11086 | unsigned int i; | |
230d793d RS |
11087 | |
11088 | /* If VALUE contains REG and we have a previous value for REG, substitute | |
11089 | the previous value. */ | |
11090 | if (value && insn && reg_overlap_mentioned_p (reg, value)) | |
11091 | { | |
11092 | rtx tem; | |
11093 | ||
11094 | /* Set things up so get_last_value is allowed to see anything set up to | |
11095 | our insn. */ | |
11096 | subst_low_cuid = INSN_CUID (insn); | |
663522cb | 11097 | tem = get_last_value (reg); |
230d793d | 11098 | |
14a774a9 RK |
11099 | /* If TEM is simply a binary operation with two CLOBBERs as operands, |
11100 | it isn't going to be useful and will take a lot of time to process, | |
11101 | so just use the CLOBBER. */ | |
11102 | ||
230d793d | 11103 | if (tem) |
14a774a9 RK |
11104 | { |
11105 | if ((GET_RTX_CLASS (GET_CODE (tem)) == '2' | |
11106 | || GET_RTX_CLASS (GET_CODE (tem)) == 'c') | |
11107 | && GET_CODE (XEXP (tem, 0)) == CLOBBER | |
11108 | && GET_CODE (XEXP (tem, 1)) == CLOBBER) | |
11109 | tem = XEXP (tem, 0); | |
11110 | ||
11111 | value = replace_rtx (copy_rtx (value), reg, tem); | |
11112 | } | |
230d793d RS |
11113 | } |
11114 | ||
11115 | /* For each register modified, show we don't know its value, that | |
ef026f91 RS |
11116 | we don't know about its bitwise content, that its value has been |
11117 | updated, and that we don't know the location of the death of the | |
11118 | register. */ | |
770ae6cc | 11119 | for (i = regno; i < endregno; i++) |
230d793d RS |
11120 | { |
11121 | if (insn) | |
11122 | reg_last_set[i] = insn; | |
770ae6cc | 11123 | |
230d793d | 11124 | reg_last_set_value[i] = 0; |
ef026f91 RS |
11125 | reg_last_set_mode[i] = 0; |
11126 | reg_last_set_nonzero_bits[i] = 0; | |
11127 | reg_last_set_sign_bit_copies[i] = 0; | |
230d793d RS |
11128 | reg_last_death[i] = 0; |
11129 | } | |
11130 | ||
11131 | /* Mark registers that are being referenced in this value. */ | |
11132 | if (value) | |
11133 | update_table_tick (value); | |
11134 | ||
11135 | /* Now update the status of each register being set. | |
11136 | If someone is using this register in this block, set this register | |
11137 | to invalid since we will get confused between the two lives in this | |
11138 | basic block. This makes using this register always invalid. In cse, we | |
11139 | scan the table to invalidate all entries using this register, but this | |
11140 | is too much work for us. */ | |
11141 | ||
11142 | for (i = regno; i < endregno; i++) | |
11143 | { | |
11144 | reg_last_set_label[i] = label_tick; | |
11145 | if (value && reg_last_set_table_tick[i] == label_tick) | |
11146 | reg_last_set_invalid[i] = 1; | |
11147 | else | |
11148 | reg_last_set_invalid[i] = 0; | |
11149 | } | |
11150 | ||
11151 | /* The value being assigned might refer to X (like in "x++;"). In that | |
11152 | case, we must replace it with (clobber (const_int 0)) to prevent | |
11153 | infinite loops. */ | |
9a893315 | 11154 | if (value && ! get_last_value_validate (&value, insn, |
230d793d RS |
11155 | reg_last_set_label[regno], 0)) |
11156 | { | |
11157 | value = copy_rtx (value); | |
9a893315 JW |
11158 | if (! get_last_value_validate (&value, insn, |
11159 | reg_last_set_label[regno], 1)) | |
230d793d RS |
11160 | value = 0; |
11161 | } | |
11162 | ||
55310dad RK |
11163 | /* For the main register being modified, update the value, the mode, the |
11164 | nonzero bits, and the number of sign bit copies. */ | |
11165 | ||
230d793d RS |
11166 | reg_last_set_value[regno] = value; |
11167 | ||
55310dad RK |
11168 | if (value) |
11169 | { | |
2afabb48 | 11170 | subst_low_cuid = INSN_CUID (insn); |
55310dad RK |
11171 | reg_last_set_mode[regno] = GET_MODE (reg); |
11172 | reg_last_set_nonzero_bits[regno] = nonzero_bits (value, GET_MODE (reg)); | |
11173 | reg_last_set_sign_bit_copies[regno] | |
11174 | = num_sign_bit_copies (value, GET_MODE (reg)); | |
11175 | } | |
230d793d RS |
11176 | } |
11177 | ||
230d793d | 11178 | /* Called via note_stores from record_dead_and_set_regs to handle one |
84832317 MM |
11179 | SET or CLOBBER in an insn. DATA is the instruction in which the |
11180 | set is occurring. */ | |
230d793d RS |
11181 | |
11182 | static void | |
84832317 | 11183 | record_dead_and_set_regs_1 (dest, setter, data) |
230d793d | 11184 | rtx dest, setter; |
84832317 | 11185 | void *data; |
230d793d | 11186 | { |
84832317 MM |
11187 | rtx record_dead_insn = (rtx) data; |
11188 | ||
ca89d290 RK |
11189 | if (GET_CODE (dest) == SUBREG) |
11190 | dest = SUBREG_REG (dest); | |
11191 | ||
230d793d RS |
11192 | if (GET_CODE (dest) == REG) |
11193 | { | |
11194 | /* If we are setting the whole register, we know its value. Otherwise | |
11195 | show that we don't know the value. We can handle SUBREG in | |
11196 | some cases. */ | |
11197 | if (GET_CODE (setter) == SET && dest == SET_DEST (setter)) | |
11198 | record_value_for_reg (dest, record_dead_insn, SET_SRC (setter)); | |
11199 | else if (GET_CODE (setter) == SET | |
11200 | && GET_CODE (SET_DEST (setter)) == SUBREG | |
11201 | && SUBREG_REG (SET_DEST (setter)) == dest | |
90bf8081 | 11202 | && GET_MODE_BITSIZE (GET_MODE (dest)) <= BITS_PER_WORD |
230d793d | 11203 | && subreg_lowpart_p (SET_DEST (setter))) |
d0ab8cd3 RK |
11204 | record_value_for_reg (dest, record_dead_insn, |
11205 | gen_lowpart_for_combine (GET_MODE (dest), | |
11206 | SET_SRC (setter))); | |
230d793d | 11207 | else |
5f4f0e22 | 11208 | record_value_for_reg (dest, record_dead_insn, NULL_RTX); |
230d793d RS |
11209 | } |
11210 | else if (GET_CODE (dest) == MEM | |
11211 | /* Ignore pushes, they clobber nothing. */ | |
11212 | && ! push_operand (dest, GET_MODE (dest))) | |
11213 | mem_last_set = INSN_CUID (record_dead_insn); | |
11214 | } | |
11215 | ||
11216 | /* Update the records of when each REG was most recently set or killed | |
11217 | for the things done by INSN. This is the last thing done in processing | |
11218 | INSN in the combiner loop. | |
11219 | ||
ef026f91 RS |
11220 | We update reg_last_set, reg_last_set_value, reg_last_set_mode, |
11221 | reg_last_set_nonzero_bits, reg_last_set_sign_bit_copies, reg_last_death, | |
11222 | and also the similar information mem_last_set (which insn most recently | |
11223 | modified memory) and last_call_cuid (which insn was the most recent | |
11224 | subroutine call). */ | |
230d793d RS |
11225 | |
11226 | static void | |
11227 | record_dead_and_set_regs (insn) | |
11228 | rtx insn; | |
11229 | { | |
11230 | register rtx link; | |
770ae6cc | 11231 | unsigned int i; |
55310dad | 11232 | |
230d793d RS |
11233 | for (link = REG_NOTES (insn); link; link = XEXP (link, 1)) |
11234 | { | |
dbc131f3 RK |
11235 | if (REG_NOTE_KIND (link) == REG_DEAD |
11236 | && GET_CODE (XEXP (link, 0)) == REG) | |
11237 | { | |
770ae6cc RK |
11238 | unsigned int regno = REGNO (XEXP (link, 0)); |
11239 | unsigned int endregno | |
dbc131f3 RK |
11240 | = regno + (regno < FIRST_PSEUDO_REGISTER |
11241 | ? HARD_REGNO_NREGS (regno, GET_MODE (XEXP (link, 0))) | |
11242 | : 1); | |
dbc131f3 RK |
11243 | |
11244 | for (i = regno; i < endregno; i++) | |
11245 | reg_last_death[i] = insn; | |
11246 | } | |
230d793d | 11247 | else if (REG_NOTE_KIND (link) == REG_INC) |
5f4f0e22 | 11248 | record_value_for_reg (XEXP (link, 0), insn, NULL_RTX); |
230d793d RS |
11249 | } |
11250 | ||
11251 | if (GET_CODE (insn) == CALL_INSN) | |
55310dad RK |
11252 | { |
11253 | for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) | |
11254 | if (call_used_regs[i]) | |
11255 | { | |
11256 | reg_last_set_value[i] = 0; | |
ef026f91 RS |
11257 | reg_last_set_mode[i] = 0; |
11258 | reg_last_set_nonzero_bits[i] = 0; | |
11259 | reg_last_set_sign_bit_copies[i] = 0; | |
55310dad RK |
11260 | reg_last_death[i] = 0; |
11261 | } | |
11262 | ||
11263 | last_call_cuid = mem_last_set = INSN_CUID (insn); | |
11264 | } | |
230d793d | 11265 | |
84832317 | 11266 | note_stores (PATTERN (insn), record_dead_and_set_regs_1, insn); |
230d793d | 11267 | } |
732f2ac9 | 11268 | |
732f2ac9 JJ |
11269 | /* If a SUBREG has the promoted bit set, it is in fact a property of the |
11270 | register present in the SUBREG, so for each such SUBREG go back and | |
11271 | adjust nonzero and sign bit information of the registers that are | |
11272 | known to have some zero/sign bits set. | |
11273 | ||
11274 | This is needed because when combine blows the SUBREGs away, the | |
11275 | information on zero/sign bits is lost and further combines can be | |
11276 | missed because of that. */ | |
11277 | ||
11278 | static void | |
11279 | record_promoted_value (insn, subreg) | |
663522cb KH |
11280 | rtx insn; |
11281 | rtx subreg; | |
732f2ac9 | 11282 | { |
4a71b24f | 11283 | rtx links, set; |
770ae6cc | 11284 | unsigned int regno = REGNO (SUBREG_REG (subreg)); |
732f2ac9 JJ |
11285 | enum machine_mode mode = GET_MODE (subreg); |
11286 | ||
25af74a0 | 11287 | if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT) |
732f2ac9 JJ |
11288 | return; |
11289 | ||
663522cb | 11290 | for (links = LOG_LINKS (insn); links;) |
732f2ac9 JJ |
11291 | { |
11292 | insn = XEXP (links, 0); | |
11293 | set = single_set (insn); | |
11294 | ||
11295 | if (! set || GET_CODE (SET_DEST (set)) != REG | |
11296 | || REGNO (SET_DEST (set)) != regno | |
11297 | || GET_MODE (SET_DEST (set)) != GET_MODE (SUBREG_REG (subreg))) | |
11298 | { | |
11299 | links = XEXP (links, 1); | |
11300 | continue; | |
11301 | } | |
11302 | ||
663522cb KH |
11303 | if (reg_last_set[regno] == insn) |
11304 | { | |
732f2ac9 | 11305 | if (SUBREG_PROMOTED_UNSIGNED_P (subreg)) |
663522cb KH |
11306 | reg_last_set_nonzero_bits[regno] &= GET_MODE_MASK (mode); |
11307 | } | |
732f2ac9 JJ |
11308 | |
11309 | if (GET_CODE (SET_SRC (set)) == REG) | |
11310 | { | |
11311 | regno = REGNO (SET_SRC (set)); | |
11312 | links = LOG_LINKS (insn); | |
11313 | } | |
11314 | else | |
11315 | break; | |
11316 | } | |
11317 | } | |
11318 | ||
11319 | /* Scan X for promoted SUBREGs. For each one found, | |
11320 | note what it implies to the registers used in it. */ | |
11321 | ||
11322 | static void | |
11323 | check_promoted_subreg (insn, x) | |
663522cb KH |
11324 | rtx insn; |
11325 | rtx x; | |
732f2ac9 JJ |
11326 | { |
11327 | if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x) | |
11328 | && GET_CODE (SUBREG_REG (x)) == REG) | |
11329 | record_promoted_value (insn, x); | |
11330 | else | |
11331 | { | |
11332 | const char *format = GET_RTX_FORMAT (GET_CODE (x)); | |
11333 | int i, j; | |
11334 | ||
11335 | for (i = 0; i < GET_RTX_LENGTH (GET_CODE (x)); i++) | |
663522cb | 11336 | switch (format[i]) |
732f2ac9 JJ |
11337 | { |
11338 | case 'e': | |
11339 | check_promoted_subreg (insn, XEXP (x, i)); | |
11340 | break; | |
11341 | case 'V': | |
11342 | case 'E': | |
11343 | if (XVEC (x, i) != 0) | |
11344 | for (j = 0; j < XVECLEN (x, i); j++) | |
11345 | check_promoted_subreg (insn, XVECEXP (x, i, j)); | |
11346 | break; | |
11347 | } | |
11348 | } | |
11349 | } | |
230d793d RS |
11350 | \f |
11351 | /* Utility routine for the following function. Verify that all the registers | |
11352 | mentioned in *LOC are valid when *LOC was part of a value set when | |
11353 | label_tick == TICK. Return 0 if some are not. | |
11354 | ||
11355 | If REPLACE is non-zero, replace the invalid reference with | |
11356 | (clobber (const_int 0)) and return 1. This replacement is useful because | |
11357 | we often can get useful information about the form of a value (e.g., if | |
11358 | it was produced by a shift that always produces -1 or 0) even though | |
11359 | we don't know exactly what registers it was produced from. */ | |
11360 | ||
11361 | static int | |
9a893315 | 11362 | get_last_value_validate (loc, insn, tick, replace) |
230d793d | 11363 | rtx *loc; |
9a893315 | 11364 | rtx insn; |
230d793d RS |
11365 | int tick; |
11366 | int replace; | |
11367 | { | |
11368 | rtx x = *loc; | |
6f7d635c | 11369 | const char *fmt = GET_RTX_FORMAT (GET_CODE (x)); |
230d793d RS |
11370 | int len = GET_RTX_LENGTH (GET_CODE (x)); |
11371 | int i; | |
11372 | ||
11373 | if (GET_CODE (x) == REG) | |
11374 | { | |
770ae6cc RK |
11375 | unsigned int regno = REGNO (x); |
11376 | unsigned int endregno | |
11377 | = regno + (regno < FIRST_PSEUDO_REGISTER | |
11378 | ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1); | |
11379 | unsigned int j; | |
230d793d RS |
11380 | |
11381 | for (j = regno; j < endregno; j++) | |
11382 | if (reg_last_set_invalid[j] | |
57cf50a4 GRK |
11383 | /* If this is a pseudo-register that was only set once and not |
11384 | live at the beginning of the function, it is always valid. */ | |
663522cb | 11385 | || (! (regno >= FIRST_PSEUDO_REGISTER |
57cf50a4 | 11386 | && REG_N_SETS (regno) == 1 |
770ae6cc RK |
11387 | && (! REGNO_REG_SET_P |
11388 | (BASIC_BLOCK (0)->global_live_at_start, regno))) | |
230d793d RS |
11389 | && reg_last_set_label[j] > tick)) |
11390 | { | |
11391 | if (replace) | |
38a448ca | 11392 | *loc = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx); |
230d793d RS |
11393 | return replace; |
11394 | } | |
11395 | ||
11396 | return 1; | |
11397 | } | |
9a893315 JW |
11398 | /* If this is a memory reference, make sure that there were |
11399 | no stores after it that might have clobbered the value. We don't | |
11400 | have alias info, so we assume any store invalidates it. */ | |
11401 | else if (GET_CODE (x) == MEM && ! RTX_UNCHANGING_P (x) | |
11402 | && INSN_CUID (insn) <= mem_last_set) | |
11403 | { | |
11404 | if (replace) | |
38a448ca | 11405 | *loc = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx); |
9a893315 JW |
11406 | return replace; |
11407 | } | |
230d793d RS |
11408 | |
11409 | for (i = 0; i < len; i++) | |
11410 | if ((fmt[i] == 'e' | |
9a893315 | 11411 | && get_last_value_validate (&XEXP (x, i), insn, tick, replace) == 0) |
230d793d RS |
11412 | /* Don't bother with these. They shouldn't occur anyway. */ |
11413 | || fmt[i] == 'E') | |
11414 | return 0; | |
11415 | ||
11416 | /* If we haven't found a reason for it to be invalid, it is valid. */ | |
11417 | return 1; | |
11418 | } | |
11419 | ||
11420 | /* Get the last value assigned to X, if known. Some registers | |
11421 | in the value may be replaced with (clobber (const_int 0)) if their value | |
11422 | is known longer known reliably. */ | |
11423 | ||
11424 | static rtx | |
11425 | get_last_value (x) | |
11426 | rtx x; | |
11427 | { | |
770ae6cc | 11428 | unsigned int regno; |
230d793d RS |
11429 | rtx value; |
11430 | ||
11431 | /* If this is a non-paradoxical SUBREG, get the value of its operand and | |
11432 | then convert it to the desired mode. If this is a paradoxical SUBREG, | |
0f41302f | 11433 | we cannot predict what values the "extra" bits might have. */ |
230d793d RS |
11434 | if (GET_CODE (x) == SUBREG |
11435 | && subreg_lowpart_p (x) | |
11436 | && (GET_MODE_SIZE (GET_MODE (x)) | |
11437 | <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))) | |
11438 | && (value = get_last_value (SUBREG_REG (x))) != 0) | |
11439 | return gen_lowpart_for_combine (GET_MODE (x), value); | |
11440 | ||
11441 | if (GET_CODE (x) != REG) | |
11442 | return 0; | |
11443 | ||
11444 | regno = REGNO (x); | |
11445 | value = reg_last_set_value[regno]; | |
11446 | ||
57cf50a4 GRK |
11447 | /* If we don't have a value, or if it isn't for this basic block and |
11448 | it's either a hard register, set more than once, or it's a live | |
663522cb | 11449 | at the beginning of the function, return 0. |
57cf50a4 | 11450 | |
663522cb | 11451 | Because if it's not live at the beginnning of the function then the reg |
57cf50a4 GRK |
11452 | is always set before being used (is never used without being set). |
11453 | And, if it's set only once, and it's always set before use, then all | |
11454 | uses must have the same last value, even if it's not from this basic | |
11455 | block. */ | |
230d793d RS |
11456 | |
11457 | if (value == 0 | |
57cf50a4 GRK |
11458 | || (reg_last_set_label[regno] != label_tick |
11459 | && (regno < FIRST_PSEUDO_REGISTER | |
11460 | || REG_N_SETS (regno) != 1 | |
770ae6cc RK |
11461 | || (REGNO_REG_SET_P |
11462 | (BASIC_BLOCK (0)->global_live_at_start, regno))))) | |
230d793d RS |
11463 | return 0; |
11464 | ||
4255220d | 11465 | /* If the value was set in a later insn than the ones we are processing, |
ca4cd906 | 11466 | we can't use it even if the register was only set once. */ |
bcd49eb7 | 11467 | if (INSN_CUID (reg_last_set[regno]) >= subst_low_cuid) |
ca4cd906 | 11468 | return 0; |
d0ab8cd3 RK |
11469 | |
11470 | /* If the value has all its registers valid, return it. */ | |
9a893315 JW |
11471 | if (get_last_value_validate (&value, reg_last_set[regno], |
11472 | reg_last_set_label[regno], 0)) | |
230d793d RS |
11473 | return value; |
11474 | ||
11475 | /* Otherwise, make a copy and replace any invalid register with | |
11476 | (clobber (const_int 0)). If that fails for some reason, return 0. */ | |
11477 | ||
11478 | value = copy_rtx (value); | |
9a893315 JW |
11479 | if (get_last_value_validate (&value, reg_last_set[regno], |
11480 | reg_last_set_label[regno], 1)) | |
230d793d RS |
11481 | return value; |
11482 | ||
11483 | return 0; | |
11484 | } | |
11485 | \f | |
11486 | /* Return nonzero if expression X refers to a REG or to memory | |
11487 | that is set in an instruction more recent than FROM_CUID. */ | |
11488 | ||
11489 | static int | |
11490 | use_crosses_set_p (x, from_cuid) | |
11491 | register rtx x; | |
11492 | int from_cuid; | |
11493 | { | |
6f7d635c | 11494 | register const char *fmt; |
230d793d RS |
11495 | register int i; |
11496 | register enum rtx_code code = GET_CODE (x); | |
11497 | ||
11498 | if (code == REG) | |
11499 | { | |
770ae6cc RK |
11500 | unsigned int regno = REGNO (x); |
11501 | unsigned endreg = regno + (regno < FIRST_PSEUDO_REGISTER | |
663522cb KH |
11502 | ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1); |
11503 | ||
230d793d RS |
11504 | #ifdef PUSH_ROUNDING |
11505 | /* Don't allow uses of the stack pointer to be moved, | |
11506 | because we don't know whether the move crosses a push insn. */ | |
f73ad30e | 11507 | if (regno == STACK_POINTER_REGNUM && PUSH_ARGS) |
230d793d RS |
11508 | return 1; |
11509 | #endif | |
770ae6cc | 11510 | for (; regno < endreg; regno++) |
e28f5732 RK |
11511 | if (reg_last_set[regno] |
11512 | && INSN_CUID (reg_last_set[regno]) > from_cuid) | |
11513 | return 1; | |
11514 | return 0; | |
230d793d RS |
11515 | } |
11516 | ||
11517 | if (code == MEM && mem_last_set > from_cuid) | |
11518 | return 1; | |
11519 | ||
11520 | fmt = GET_RTX_FORMAT (code); | |
11521 | ||
11522 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
11523 | { | |
11524 | if (fmt[i] == 'E') | |
11525 | { | |
11526 | register int j; | |
11527 | for (j = XVECLEN (x, i) - 1; j >= 0; j--) | |
11528 | if (use_crosses_set_p (XVECEXP (x, i, j), from_cuid)) | |
11529 | return 1; | |
11530 | } | |
11531 | else if (fmt[i] == 'e' | |
11532 | && use_crosses_set_p (XEXP (x, i), from_cuid)) | |
11533 | return 1; | |
11534 | } | |
11535 | return 0; | |
11536 | } | |
11537 | \f | |
11538 | /* Define three variables used for communication between the following | |
11539 | routines. */ | |
11540 | ||
770ae6cc | 11541 | static unsigned int reg_dead_regno, reg_dead_endregno; |
230d793d RS |
11542 | static int reg_dead_flag; |
11543 | ||
11544 | /* Function called via note_stores from reg_dead_at_p. | |
11545 | ||
663522cb | 11546 | If DEST is within [reg_dead_regno, reg_dead_endregno), set |
230d793d RS |
11547 | reg_dead_flag to 1 if X is a CLOBBER and to -1 it is a SET. */ |
11548 | ||
11549 | static void | |
84832317 | 11550 | reg_dead_at_p_1 (dest, x, data) |
230d793d RS |
11551 | rtx dest; |
11552 | rtx x; | |
84832317 | 11553 | void *data ATTRIBUTE_UNUSED; |
230d793d | 11554 | { |
770ae6cc | 11555 | unsigned int regno, endregno; |
230d793d RS |
11556 | |
11557 | if (GET_CODE (dest) != REG) | |
11558 | return; | |
11559 | ||
11560 | regno = REGNO (dest); | |
663522cb | 11561 | endregno = regno + (regno < FIRST_PSEUDO_REGISTER |
230d793d RS |
11562 | ? HARD_REGNO_NREGS (regno, GET_MODE (dest)) : 1); |
11563 | ||
11564 | if (reg_dead_endregno > regno && reg_dead_regno < endregno) | |
11565 | reg_dead_flag = (GET_CODE (x) == CLOBBER) ? 1 : -1; | |
11566 | } | |
11567 | ||
11568 | /* Return non-zero if REG is known to be dead at INSN. | |
11569 | ||
11570 | We scan backwards from INSN. If we hit a REG_DEAD note or a CLOBBER | |
11571 | referencing REG, it is dead. If we hit a SET referencing REG, it is | |
11572 | live. Otherwise, see if it is live or dead at the start of the basic | |
6e25d159 RK |
11573 | block we are in. Hard regs marked as being live in NEWPAT_USED_REGS |
11574 | must be assumed to be always live. */ | |
230d793d RS |
11575 | |
11576 | static int | |
11577 | reg_dead_at_p (reg, insn) | |
11578 | rtx reg; | |
11579 | rtx insn; | |
11580 | { | |
770ae6cc RK |
11581 | int block; |
11582 | unsigned int i; | |
230d793d RS |
11583 | |
11584 | /* Set variables for reg_dead_at_p_1. */ | |
11585 | reg_dead_regno = REGNO (reg); | |
11586 | reg_dead_endregno = reg_dead_regno + (reg_dead_regno < FIRST_PSEUDO_REGISTER | |
11587 | ? HARD_REGNO_NREGS (reg_dead_regno, | |
11588 | GET_MODE (reg)) | |
11589 | : 1); | |
11590 | ||
11591 | reg_dead_flag = 0; | |
11592 | ||
6e25d159 RK |
11593 | /* Check that reg isn't mentioned in NEWPAT_USED_REGS. */ |
11594 | if (reg_dead_regno < FIRST_PSEUDO_REGISTER) | |
11595 | { | |
11596 | for (i = reg_dead_regno; i < reg_dead_endregno; i++) | |
11597 | if (TEST_HARD_REG_BIT (newpat_used_regs, i)) | |
11598 | return 0; | |
11599 | } | |
11600 | ||
230d793d RS |
11601 | /* Scan backwards until we find a REG_DEAD note, SET, CLOBBER, label, or |
11602 | beginning of function. */ | |
60715d0b | 11603 | for (; insn && GET_CODE (insn) != CODE_LABEL && GET_CODE (insn) != BARRIER; |
230d793d RS |
11604 | insn = prev_nonnote_insn (insn)) |
11605 | { | |
84832317 | 11606 | note_stores (PATTERN (insn), reg_dead_at_p_1, NULL); |
230d793d RS |
11607 | if (reg_dead_flag) |
11608 | return reg_dead_flag == 1 ? 1 : 0; | |
11609 | ||
11610 | if (find_regno_note (insn, REG_DEAD, reg_dead_regno)) | |
11611 | return 1; | |
11612 | } | |
11613 | ||
11614 | /* Get the basic block number that we were in. */ | |
11615 | if (insn == 0) | |
11616 | block = 0; | |
11617 | else | |
11618 | { | |
11619 | for (block = 0; block < n_basic_blocks; block++) | |
3b413743 | 11620 | if (insn == BLOCK_HEAD (block)) |
230d793d RS |
11621 | break; |
11622 | ||
11623 | if (block == n_basic_blocks) | |
11624 | return 0; | |
11625 | } | |
11626 | ||
11627 | for (i = reg_dead_regno; i < reg_dead_endregno; i++) | |
e881bb1b | 11628 | if (REGNO_REG_SET_P (BASIC_BLOCK (block)->global_live_at_start, i)) |
230d793d RS |
11629 | return 0; |
11630 | ||
11631 | return 1; | |
11632 | } | |
6e25d159 RK |
11633 | \f |
11634 | /* Note hard registers in X that are used. This code is similar to | |
11635 | that in flow.c, but much simpler since we don't care about pseudos. */ | |
11636 | ||
11637 | static void | |
11638 | mark_used_regs_combine (x) | |
11639 | rtx x; | |
11640 | { | |
770ae6cc RK |
11641 | RTX_CODE code = GET_CODE (x); |
11642 | unsigned int regno; | |
6e25d159 RK |
11643 | int i; |
11644 | ||
11645 | switch (code) | |
11646 | { | |
11647 | case LABEL_REF: | |
11648 | case SYMBOL_REF: | |
11649 | case CONST_INT: | |
11650 | case CONST: | |
11651 | case CONST_DOUBLE: | |
11652 | case PC: | |
11653 | case ADDR_VEC: | |
11654 | case ADDR_DIFF_VEC: | |
11655 | case ASM_INPUT: | |
11656 | #ifdef HAVE_cc0 | |
11657 | /* CC0 must die in the insn after it is set, so we don't need to take | |
11658 | special note of it here. */ | |
11659 | case CC0: | |
11660 | #endif | |
11661 | return; | |
11662 | ||
11663 | case CLOBBER: | |
11664 | /* If we are clobbering a MEM, mark any hard registers inside the | |
11665 | address as used. */ | |
11666 | if (GET_CODE (XEXP (x, 0)) == MEM) | |
11667 | mark_used_regs_combine (XEXP (XEXP (x, 0), 0)); | |
11668 | return; | |
11669 | ||
11670 | case REG: | |
11671 | regno = REGNO (x); | |
11672 | /* A hard reg in a wide mode may really be multiple registers. | |
11673 | If so, mark all of them just like the first. */ | |
11674 | if (regno < FIRST_PSEUDO_REGISTER) | |
11675 | { | |
770ae6cc RK |
11676 | unsigned int endregno, r; |
11677 | ||
6e25d159 RK |
11678 | /* None of this applies to the stack, frame or arg pointers */ |
11679 | if (regno == STACK_POINTER_REGNUM | |
11680 | #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM | |
11681 | || regno == HARD_FRAME_POINTER_REGNUM | |
11682 | #endif | |
11683 | #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM | |
11684 | || (regno == ARG_POINTER_REGNUM && fixed_regs[regno]) | |
11685 | #endif | |
11686 | || regno == FRAME_POINTER_REGNUM) | |
11687 | return; | |
11688 | ||
770ae6cc RK |
11689 | endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x)); |
11690 | for (r = regno; r < endregno; r++) | |
11691 | SET_HARD_REG_BIT (newpat_used_regs, r); | |
6e25d159 RK |
11692 | } |
11693 | return; | |
11694 | ||
11695 | case SET: | |
11696 | { | |
11697 | /* If setting a MEM, or a SUBREG of a MEM, then note any hard regs in | |
11698 | the address. */ | |
11699 | register rtx testreg = SET_DEST (x); | |
11700 | ||
e048778f RK |
11701 | while (GET_CODE (testreg) == SUBREG |
11702 | || GET_CODE (testreg) == ZERO_EXTRACT | |
11703 | || GET_CODE (testreg) == SIGN_EXTRACT | |
11704 | || GET_CODE (testreg) == STRICT_LOW_PART) | |
6e25d159 RK |
11705 | testreg = XEXP (testreg, 0); |
11706 | ||
11707 | if (GET_CODE (testreg) == MEM) | |
11708 | mark_used_regs_combine (XEXP (testreg, 0)); | |
11709 | ||
11710 | mark_used_regs_combine (SET_SRC (x)); | |
6e25d159 | 11711 | } |
e9a25f70 JL |
11712 | return; |
11713 | ||
11714 | default: | |
11715 | break; | |
6e25d159 RK |
11716 | } |
11717 | ||
11718 | /* Recursively scan the operands of this expression. */ | |
11719 | ||
11720 | { | |
6f7d635c | 11721 | register const char *fmt = GET_RTX_FORMAT (code); |
6e25d159 RK |
11722 | |
11723 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
11724 | { | |
663522cb | 11725 | if (fmt[i] == 'e') |
6e25d159 | 11726 | mark_used_regs_combine (XEXP (x, i)); |
663522cb KH |
11727 | else if (fmt[i] == 'E') |
11728 | { | |
11729 | register int j; | |
6e25d159 | 11730 | |
663522cb KH |
11731 | for (j = 0; j < XVECLEN (x, i); j++) |
11732 | mark_used_regs_combine (XVECEXP (x, i, j)); | |
11733 | } | |
6e25d159 RK |
11734 | } |
11735 | } | |
11736 | } | |
230d793d RS |
11737 | \f |
11738 | /* Remove register number REGNO from the dead registers list of INSN. | |
11739 | ||
11740 | Return the note used to record the death, if there was one. */ | |
11741 | ||
11742 | rtx | |
11743 | remove_death (regno, insn) | |
770ae6cc | 11744 | unsigned int regno; |
230d793d RS |
11745 | rtx insn; |
11746 | { | |
11747 | register rtx note = find_regno_note (insn, REG_DEAD, regno); | |
11748 | ||
11749 | if (note) | |
1a26b032 | 11750 | { |
b1f21e0a | 11751 | REG_N_DEATHS (regno)--; |
1a26b032 RK |
11752 | remove_note (insn, note); |
11753 | } | |
230d793d RS |
11754 | |
11755 | return note; | |
11756 | } | |
11757 | ||
11758 | /* For each register (hardware or pseudo) used within expression X, if its | |
11759 | death is in an instruction with cuid between FROM_CUID (inclusive) and | |
11760 | TO_INSN (exclusive), put a REG_DEAD note for that register in the | |
663522cb | 11761 | list headed by PNOTES. |
230d793d | 11762 | |
6eb12cef RK |
11763 | That said, don't move registers killed by maybe_kill_insn. |
11764 | ||
230d793d RS |
11765 | This is done when X is being merged by combination into TO_INSN. These |
11766 | notes will then be distributed as needed. */ | |
11767 | ||
11768 | static void | |
6eb12cef | 11769 | move_deaths (x, maybe_kill_insn, from_cuid, to_insn, pnotes) |
230d793d | 11770 | rtx x; |
6eb12cef | 11771 | rtx maybe_kill_insn; |
230d793d RS |
11772 | int from_cuid; |
11773 | rtx to_insn; | |
11774 | rtx *pnotes; | |
11775 | { | |
6f7d635c | 11776 | register const char *fmt; |
230d793d RS |
11777 | register int len, i; |
11778 | register enum rtx_code code = GET_CODE (x); | |
11779 | ||
11780 | if (code == REG) | |
11781 | { | |
770ae6cc | 11782 | unsigned int regno = REGNO (x); |
230d793d | 11783 | register rtx where_dead = reg_last_death[regno]; |
e340018d JW |
11784 | register rtx before_dead, after_dead; |
11785 | ||
6eb12cef RK |
11786 | /* Don't move the register if it gets killed in between from and to */ |
11787 | if (maybe_kill_insn && reg_set_p (x, maybe_kill_insn) | |
770ae6cc | 11788 | && ! reg_referenced_p (x, maybe_kill_insn)) |
6eb12cef RK |
11789 | return; |
11790 | ||
e340018d JW |
11791 | /* WHERE_DEAD could be a USE insn made by combine, so first we |
11792 | make sure that we have insns with valid INSN_CUID values. */ | |
11793 | before_dead = where_dead; | |
11794 | while (before_dead && INSN_UID (before_dead) > max_uid_cuid) | |
11795 | before_dead = PREV_INSN (before_dead); | |
770ae6cc | 11796 | |
e340018d JW |
11797 | after_dead = where_dead; |
11798 | while (after_dead && INSN_UID (after_dead) > max_uid_cuid) | |
11799 | after_dead = NEXT_INSN (after_dead); | |
11800 | ||
11801 | if (before_dead && after_dead | |
11802 | && INSN_CUID (before_dead) >= from_cuid | |
11803 | && (INSN_CUID (after_dead) < INSN_CUID (to_insn) | |
11804 | || (where_dead != after_dead | |
11805 | && INSN_CUID (after_dead) == INSN_CUID (to_insn)))) | |
230d793d | 11806 | { |
dbc131f3 | 11807 | rtx note = remove_death (regno, where_dead); |
230d793d RS |
11808 | |
11809 | /* It is possible for the call above to return 0. This can occur | |
11810 | when reg_last_death points to I2 or I1 that we combined with. | |
dbc131f3 RK |
11811 | In that case make a new note. |
11812 | ||
11813 | We must also check for the case where X is a hard register | |
11814 | and NOTE is a death note for a range of hard registers | |
11815 | including X. In that case, we must put REG_DEAD notes for | |
11816 | the remaining registers in place of NOTE. */ | |
11817 | ||
11818 | if (note != 0 && regno < FIRST_PSEUDO_REGISTER | |
11819 | && (GET_MODE_SIZE (GET_MODE (XEXP (note, 0))) | |
24e46fc4 | 11820 | > GET_MODE_SIZE (GET_MODE (x)))) |
dbc131f3 | 11821 | { |
770ae6cc RK |
11822 | unsigned int deadregno = REGNO (XEXP (note, 0)); |
11823 | unsigned int deadend | |
dbc131f3 RK |
11824 | = (deadregno + HARD_REGNO_NREGS (deadregno, |
11825 | GET_MODE (XEXP (note, 0)))); | |
770ae6cc RK |
11826 | unsigned int ourend |
11827 | = regno + HARD_REGNO_NREGS (regno, GET_MODE (x)); | |
11828 | unsigned int i; | |
dbc131f3 RK |
11829 | |
11830 | for (i = deadregno; i < deadend; i++) | |
11831 | if (i < regno || i >= ourend) | |
11832 | REG_NOTES (where_dead) | |
38a448ca RH |
11833 | = gen_rtx_EXPR_LIST (REG_DEAD, |
11834 | gen_rtx_REG (reg_raw_mode[i], i), | |
11835 | REG_NOTES (where_dead)); | |
dbc131f3 | 11836 | } |
770ae6cc | 11837 | |
24e46fc4 JW |
11838 | /* If we didn't find any note, or if we found a REG_DEAD note that |
11839 | covers only part of the given reg, and we have a multi-reg hard | |
fabd69e8 RK |
11840 | register, then to be safe we must check for REG_DEAD notes |
11841 | for each register other than the first. They could have | |
11842 | their own REG_DEAD notes lying around. */ | |
24e46fc4 JW |
11843 | else if ((note == 0 |
11844 | || (note != 0 | |
11845 | && (GET_MODE_SIZE (GET_MODE (XEXP (note, 0))) | |
11846 | < GET_MODE_SIZE (GET_MODE (x))))) | |
11847 | && regno < FIRST_PSEUDO_REGISTER | |
fabd69e8 RK |
11848 | && HARD_REGNO_NREGS (regno, GET_MODE (x)) > 1) |
11849 | { | |
770ae6cc RK |
11850 | unsigned int ourend |
11851 | = regno + HARD_REGNO_NREGS (regno, GET_MODE (x)); | |
11852 | unsigned int i, offset; | |
fabd69e8 RK |
11853 | rtx oldnotes = 0; |
11854 | ||
24e46fc4 JW |
11855 | if (note) |
11856 | offset = HARD_REGNO_NREGS (regno, GET_MODE (XEXP (note, 0))); | |
11857 | else | |
11858 | offset = 1; | |
11859 | ||
11860 | for (i = regno + offset; i < ourend; i++) | |
38a448ca | 11861 | move_deaths (gen_rtx_REG (reg_raw_mode[i], i), |
6eb12cef | 11862 | maybe_kill_insn, from_cuid, to_insn, &oldnotes); |
fabd69e8 | 11863 | } |
230d793d | 11864 | |
dbc131f3 | 11865 | if (note != 0 && GET_MODE (XEXP (note, 0)) == GET_MODE (x)) |
230d793d RS |
11866 | { |
11867 | XEXP (note, 1) = *pnotes; | |
11868 | *pnotes = note; | |
11869 | } | |
11870 | else | |
38a448ca | 11871 | *pnotes = gen_rtx_EXPR_LIST (REG_DEAD, x, *pnotes); |
1a26b032 | 11872 | |
b1f21e0a | 11873 | REG_N_DEATHS (regno)++; |
230d793d RS |
11874 | } |
11875 | ||
11876 | return; | |
11877 | } | |
11878 | ||
11879 | else if (GET_CODE (x) == SET) | |
11880 | { | |
11881 | rtx dest = SET_DEST (x); | |
11882 | ||
6eb12cef | 11883 | move_deaths (SET_SRC (x), maybe_kill_insn, from_cuid, to_insn, pnotes); |
230d793d | 11884 | |
a7c99304 RK |
11885 | /* In the case of a ZERO_EXTRACT, a STRICT_LOW_PART, or a SUBREG |
11886 | that accesses one word of a multi-word item, some | |
11887 | piece of everything register in the expression is used by | |
11888 | this insn, so remove any old death. */ | |
11889 | ||
11890 | if (GET_CODE (dest) == ZERO_EXTRACT | |
11891 | || GET_CODE (dest) == STRICT_LOW_PART | |
11892 | || (GET_CODE (dest) == SUBREG | |
11893 | && (((GET_MODE_SIZE (GET_MODE (dest)) | |
11894 | + UNITS_PER_WORD - 1) / UNITS_PER_WORD) | |
11895 | == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) | |
11896 | + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))) | |
230d793d | 11897 | { |
6eb12cef | 11898 | move_deaths (dest, maybe_kill_insn, from_cuid, to_insn, pnotes); |
a7c99304 | 11899 | return; |
230d793d RS |
11900 | } |
11901 | ||
a7c99304 RK |
11902 | /* If this is some other SUBREG, we know it replaces the entire |
11903 | value, so use that as the destination. */ | |
11904 | if (GET_CODE (dest) == SUBREG) | |
11905 | dest = SUBREG_REG (dest); | |
11906 | ||
11907 | /* If this is a MEM, adjust deaths of anything used in the address. | |
11908 | For a REG (the only other possibility), the entire value is | |
11909 | being replaced so the old value is not used in this insn. */ | |
230d793d RS |
11910 | |
11911 | if (GET_CODE (dest) == MEM) | |
6eb12cef RK |
11912 | move_deaths (XEXP (dest, 0), maybe_kill_insn, from_cuid, |
11913 | to_insn, pnotes); | |
230d793d RS |
11914 | return; |
11915 | } | |
11916 | ||
11917 | else if (GET_CODE (x) == CLOBBER) | |
11918 | return; | |
11919 | ||
11920 | len = GET_RTX_LENGTH (code); | |
11921 | fmt = GET_RTX_FORMAT (code); | |
11922 | ||
11923 | for (i = 0; i < len; i++) | |
11924 | { | |
11925 | if (fmt[i] == 'E') | |
11926 | { | |
11927 | register int j; | |
11928 | for (j = XVECLEN (x, i) - 1; j >= 0; j--) | |
6eb12cef RK |
11929 | move_deaths (XVECEXP (x, i, j), maybe_kill_insn, from_cuid, |
11930 | to_insn, pnotes); | |
230d793d RS |
11931 | } |
11932 | else if (fmt[i] == 'e') | |
6eb12cef | 11933 | move_deaths (XEXP (x, i), maybe_kill_insn, from_cuid, to_insn, pnotes); |
230d793d RS |
11934 | } |
11935 | } | |
11936 | \f | |
a7c99304 RK |
11937 | /* Return 1 if X is the target of a bit-field assignment in BODY, the |
11938 | pattern of an insn. X must be a REG. */ | |
230d793d RS |
11939 | |
11940 | static int | |
a7c99304 RK |
11941 | reg_bitfield_target_p (x, body) |
11942 | rtx x; | |
230d793d RS |
11943 | rtx body; |
11944 | { | |
11945 | int i; | |
11946 | ||
11947 | if (GET_CODE (body) == SET) | |
a7c99304 RK |
11948 | { |
11949 | rtx dest = SET_DEST (body); | |
11950 | rtx target; | |
770ae6cc | 11951 | unsigned int regno, tregno, endregno, endtregno; |
a7c99304 RK |
11952 | |
11953 | if (GET_CODE (dest) == ZERO_EXTRACT) | |
11954 | target = XEXP (dest, 0); | |
11955 | else if (GET_CODE (dest) == STRICT_LOW_PART) | |
11956 | target = SUBREG_REG (XEXP (dest, 0)); | |
11957 | else | |
11958 | return 0; | |
11959 | ||
11960 | if (GET_CODE (target) == SUBREG) | |
11961 | target = SUBREG_REG (target); | |
11962 | ||
11963 | if (GET_CODE (target) != REG) | |
11964 | return 0; | |
11965 | ||
11966 | tregno = REGNO (target), regno = REGNO (x); | |
11967 | if (tregno >= FIRST_PSEUDO_REGISTER || regno >= FIRST_PSEUDO_REGISTER) | |
11968 | return target == x; | |
11969 | ||
11970 | endtregno = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (target)); | |
11971 | endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x)); | |
11972 | ||
11973 | return endregno > tregno && regno < endtregno; | |
11974 | } | |
230d793d RS |
11975 | |
11976 | else if (GET_CODE (body) == PARALLEL) | |
11977 | for (i = XVECLEN (body, 0) - 1; i >= 0; i--) | |
a7c99304 | 11978 | if (reg_bitfield_target_p (x, XVECEXP (body, 0, i))) |
230d793d RS |
11979 | return 1; |
11980 | ||
11981 | return 0; | |
663522cb | 11982 | } |
230d793d RS |
11983 | \f |
11984 | /* Given a chain of REG_NOTES originally from FROM_INSN, try to place them | |
11985 | as appropriate. I3 and I2 are the insns resulting from the combination | |
11986 | insns including FROM (I2 may be zero). | |
11987 | ||
11988 | ELIM_I2 and ELIM_I1 are either zero or registers that we know will | |
11989 | not need REG_DEAD notes because they are being substituted for. This | |
11990 | saves searching in the most common cases. | |
11991 | ||
11992 | Each note in the list is either ignored or placed on some insns, depending | |
11993 | on the type of note. */ | |
11994 | ||
11995 | static void | |
11996 | distribute_notes (notes, from_insn, i3, i2, elim_i2, elim_i1) | |
11997 | rtx notes; | |
11998 | rtx from_insn; | |
11999 | rtx i3, i2; | |
12000 | rtx elim_i2, elim_i1; | |
12001 | { | |
12002 | rtx note, next_note; | |
12003 | rtx tem; | |
12004 | ||
12005 | for (note = notes; note; note = next_note) | |
12006 | { | |
12007 | rtx place = 0, place2 = 0; | |
12008 | ||
12009 | /* If this NOTE references a pseudo register, ensure it references | |
12010 | the latest copy of that register. */ | |
12011 | if (XEXP (note, 0) && GET_CODE (XEXP (note, 0)) == REG | |
12012 | && REGNO (XEXP (note, 0)) >= FIRST_PSEUDO_REGISTER) | |
12013 | XEXP (note, 0) = regno_reg_rtx[REGNO (XEXP (note, 0))]; | |
12014 | ||
12015 | next_note = XEXP (note, 1); | |
12016 | switch (REG_NOTE_KIND (note)) | |
12017 | { | |
c9903b44 DE |
12018 | case REG_BR_PROB: |
12019 | case REG_EXEC_COUNT: | |
12020 | /* Doesn't matter much where we put this, as long as it's somewhere. | |
12021 | It is preferable to keep these notes on branches, which is most | |
12022 | likely to be i3. */ | |
12023 | place = i3; | |
12024 | break; | |
12025 | ||
4b7c585f | 12026 | case REG_EH_REGION: |
0e403ec3 AS |
12027 | case REG_EH_RETHROW: |
12028 | /* These notes must remain with the call. It should not be | |
12029 | possible for both I2 and I3 to be a call. */ | |
663522cb | 12030 | if (GET_CODE (i3) == CALL_INSN) |
4b7c585f JL |
12031 | place = i3; |
12032 | else if (i2 && GET_CODE (i2) == CALL_INSN) | |
12033 | place = i2; | |
12034 | else | |
12035 | abort (); | |
12036 | break; | |
12037 | ||
230d793d | 12038 | case REG_UNUSED: |
07d0cbdd | 12039 | /* Any clobbers for i3 may still exist, and so we must process |
176c9e6b JW |
12040 | REG_UNUSED notes from that insn. |
12041 | ||
12042 | Any clobbers from i2 or i1 can only exist if they were added by | |
12043 | recog_for_combine. In that case, recog_for_combine created the | |
12044 | necessary REG_UNUSED notes. Trying to keep any original | |
12045 | REG_UNUSED notes from these insns can cause incorrect output | |
12046 | if it is for the same register as the original i3 dest. | |
12047 | In that case, we will notice that the register is set in i3, | |
12048 | and then add a REG_UNUSED note for the destination of i3, which | |
07d0cbdd JW |
12049 | is wrong. However, it is possible to have REG_UNUSED notes from |
12050 | i2 or i1 for register which were both used and clobbered, so | |
12051 | we keep notes from i2 or i1 if they will turn into REG_DEAD | |
12052 | notes. */ | |
176c9e6b | 12053 | |
230d793d RS |
12054 | /* If this register is set or clobbered in I3, put the note there |
12055 | unless there is one already. */ | |
07d0cbdd | 12056 | if (reg_set_p (XEXP (note, 0), PATTERN (i3))) |
230d793d | 12057 | { |
07d0cbdd JW |
12058 | if (from_insn != i3) |
12059 | break; | |
12060 | ||
230d793d RS |
12061 | if (! (GET_CODE (XEXP (note, 0)) == REG |
12062 | ? find_regno_note (i3, REG_UNUSED, REGNO (XEXP (note, 0))) | |
12063 | : find_reg_note (i3, REG_UNUSED, XEXP (note, 0)))) | |
12064 | place = i3; | |
12065 | } | |
12066 | /* Otherwise, if this register is used by I3, then this register | |
12067 | now dies here, so we must put a REG_DEAD note here unless there | |
12068 | is one already. */ | |
12069 | else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3)) | |
12070 | && ! (GET_CODE (XEXP (note, 0)) == REG | |
770ae6cc RK |
12071 | ? find_regno_note (i3, REG_DEAD, |
12072 | REGNO (XEXP (note, 0))) | |
230d793d RS |
12073 | : find_reg_note (i3, REG_DEAD, XEXP (note, 0)))) |
12074 | { | |
12075 | PUT_REG_NOTE_KIND (note, REG_DEAD); | |
12076 | place = i3; | |
12077 | } | |
12078 | break; | |
12079 | ||
12080 | case REG_EQUAL: | |
12081 | case REG_EQUIV: | |
12082 | case REG_NONNEG: | |
9ae8ffe7 | 12083 | case REG_NOALIAS: |
230d793d RS |
12084 | /* These notes say something about results of an insn. We can |
12085 | only support them if they used to be on I3 in which case they | |
a687e897 RK |
12086 | remain on I3. Otherwise they are ignored. |
12087 | ||
12088 | If the note refers to an expression that is not a constant, we | |
12089 | must also ignore the note since we cannot tell whether the | |
12090 | equivalence is still true. It might be possible to do | |
12091 | slightly better than this (we only have a problem if I2DEST | |
12092 | or I1DEST is present in the expression), but it doesn't | |
12093 | seem worth the trouble. */ | |
12094 | ||
12095 | if (from_insn == i3 | |
12096 | && (XEXP (note, 0) == 0 || CONSTANT_P (XEXP (note, 0)))) | |
230d793d RS |
12097 | place = i3; |
12098 | break; | |
12099 | ||
12100 | case REG_INC: | |
12101 | case REG_NO_CONFLICT: | |
230d793d RS |
12102 | /* These notes say something about how a register is used. They must |
12103 | be present on any use of the register in I2 or I3. */ | |
12104 | if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3))) | |
12105 | place = i3; | |
12106 | ||
12107 | if (i2 && reg_mentioned_p (XEXP (note, 0), PATTERN (i2))) | |
12108 | { | |
12109 | if (place) | |
12110 | place2 = i2; | |
12111 | else | |
12112 | place = i2; | |
12113 | } | |
12114 | break; | |
12115 | ||
e55b4486 RH |
12116 | case REG_LABEL: |
12117 | /* This can show up in several ways -- either directly in the | |
12118 | pattern, or hidden off in the constant pool with (or without?) | |
12119 | a REG_EQUAL note. */ | |
12120 | /* ??? Ignore the without-reg_equal-note problem for now. */ | |
12121 | if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3)) | |
12122 | || ((tem = find_reg_note (i3, REG_EQUAL, NULL_RTX)) | |
12123 | && GET_CODE (XEXP (tem, 0)) == LABEL_REF | |
12124 | && XEXP (XEXP (tem, 0), 0) == XEXP (note, 0))) | |
12125 | place = i3; | |
12126 | ||
12127 | if (i2 | |
12128 | && (reg_mentioned_p (XEXP (note, 0), PATTERN (i2)) | |
663522cb | 12129 | || ((tem = find_reg_note (i2, REG_EQUAL, NULL_RTX)) |
e55b4486 RH |
12130 | && GET_CODE (XEXP (tem, 0)) == LABEL_REF |
12131 | && XEXP (XEXP (tem, 0), 0) == XEXP (note, 0)))) | |
12132 | { | |
12133 | if (place) | |
12134 | place2 = i2; | |
12135 | else | |
12136 | place = i2; | |
12137 | } | |
12138 | break; | |
12139 | ||
230d793d RS |
12140 | case REG_WAS_0: |
12141 | /* It is too much trouble to try to see if this note is still | |
12142 | correct in all situations. It is better to simply delete it. */ | |
12143 | break; | |
12144 | ||
12145 | case REG_RETVAL: | |
12146 | /* If the insn previously containing this note still exists, | |
12147 | put it back where it was. Otherwise move it to the previous | |
12148 | insn. Adjust the corresponding REG_LIBCALL note. */ | |
12149 | if (GET_CODE (from_insn) != NOTE) | |
12150 | place = from_insn; | |
12151 | else | |
12152 | { | |
5f4f0e22 | 12153 | tem = find_reg_note (XEXP (note, 0), REG_LIBCALL, NULL_RTX); |
230d793d RS |
12154 | place = prev_real_insn (from_insn); |
12155 | if (tem && place) | |
12156 | XEXP (tem, 0) = place; | |
12157 | } | |
12158 | break; | |
12159 | ||
12160 | case REG_LIBCALL: | |
12161 | /* This is handled similarly to REG_RETVAL. */ | |
12162 | if (GET_CODE (from_insn) != NOTE) | |
12163 | place = from_insn; | |
12164 | else | |
12165 | { | |
5f4f0e22 | 12166 | tem = find_reg_note (XEXP (note, 0), REG_RETVAL, NULL_RTX); |
230d793d RS |
12167 | place = next_real_insn (from_insn); |
12168 | if (tem && place) | |
12169 | XEXP (tem, 0) = place; | |
12170 | } | |
12171 | break; | |
12172 | ||
12173 | case REG_DEAD: | |
12174 | /* If the register is used as an input in I3, it dies there. | |
12175 | Similarly for I2, if it is non-zero and adjacent to I3. | |
12176 | ||
12177 | If the register is not used as an input in either I3 or I2 | |
12178 | and it is not one of the registers we were supposed to eliminate, | |
12179 | there are two possibilities. We might have a non-adjacent I2 | |
12180 | or we might have somehow eliminated an additional register | |
12181 | from a computation. For example, we might have had A & B where | |
12182 | we discover that B will always be zero. In this case we will | |
12183 | eliminate the reference to A. | |
12184 | ||
12185 | In both cases, we must search to see if we can find a previous | |
12186 | use of A and put the death note there. */ | |
12187 | ||
6e2d1486 RK |
12188 | if (from_insn |
12189 | && GET_CODE (from_insn) == CALL_INSN | |
663522cb | 12190 | && find_reg_fusage (from_insn, USE, XEXP (note, 0))) |
6e2d1486 RK |
12191 | place = from_insn; |
12192 | else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3))) | |
230d793d RS |
12193 | place = i3; |
12194 | else if (i2 != 0 && next_nonnote_insn (i2) == i3 | |
12195 | && reg_referenced_p (XEXP (note, 0), PATTERN (i2))) | |
12196 | place = i2; | |
12197 | ||
12198 | if (XEXP (note, 0) == elim_i2 || XEXP (note, 0) == elim_i1) | |
12199 | break; | |
12200 | ||
12201 | if (place == 0) | |
38d8473f | 12202 | { |
d3a923ee RH |
12203 | basic_block bb = BASIC_BLOCK (this_basic_block); |
12204 | ||
12205 | for (tem = PREV_INSN (i3); place == 0; tem = PREV_INSN (tem)) | |
38d8473f | 12206 | { |
d3a923ee RH |
12207 | if (GET_RTX_CLASS (GET_CODE (tem)) != 'i') |
12208 | { | |
12209 | if (tem == bb->head) | |
12210 | break; | |
12211 | continue; | |
12212 | } | |
12213 | ||
38d8473f RK |
12214 | /* If the register is being set at TEM, see if that is all |
12215 | TEM is doing. If so, delete TEM. Otherwise, make this | |
12216 | into a REG_UNUSED note instead. */ | |
12217 | if (reg_set_p (XEXP (note, 0), PATTERN (tem))) | |
12218 | { | |
12219 | rtx set = single_set (tem); | |
e5e809f4 | 12220 | rtx inner_dest = 0; |
e51712db | 12221 | #ifdef HAVE_cc0 |
f5c97640 | 12222 | rtx cc0_setter = NULL_RTX; |
e51712db | 12223 | #endif |
e5e809f4 JL |
12224 | |
12225 | if (set != 0) | |
12226 | for (inner_dest = SET_DEST (set); | |
663522cb KH |
12227 | (GET_CODE (inner_dest) == STRICT_LOW_PART |
12228 | || GET_CODE (inner_dest) == SUBREG | |
12229 | || GET_CODE (inner_dest) == ZERO_EXTRACT); | |
e5e809f4 JL |
12230 | inner_dest = XEXP (inner_dest, 0)) |
12231 | ; | |
38d8473f RK |
12232 | |
12233 | /* Verify that it was the set, and not a clobber that | |
663522cb | 12234 | modified the register. |
f5c97640 RH |
12235 | |
12236 | CC0 targets must be careful to maintain setter/user | |
12237 | pairs. If we cannot delete the setter due to side | |
12238 | effects, mark the user with an UNUSED note instead | |
12239 | of deleting it. */ | |
38d8473f RK |
12240 | |
12241 | if (set != 0 && ! side_effects_p (SET_SRC (set)) | |
f5c97640 RH |
12242 | && rtx_equal_p (XEXP (note, 0), inner_dest) |
12243 | #ifdef HAVE_cc0 | |
12244 | && (! reg_mentioned_p (cc0_rtx, SET_SRC (set)) | |
12245 | || ((cc0_setter = prev_cc0_setter (tem)) != NULL | |
12246 | && sets_cc0_p (PATTERN (cc0_setter)) > 0)) | |
12247 | #endif | |
12248 | ) | |
38d8473f RK |
12249 | { |
12250 | /* Move the notes and links of TEM elsewhere. | |
663522cb | 12251 | This might delete other dead insns recursively. |
38d8473f RK |
12252 | First set the pattern to something that won't use |
12253 | any register. */ | |
12254 | ||
12255 | PATTERN (tem) = pc_rtx; | |
12256 | ||
12257 | distribute_notes (REG_NOTES (tem), tem, tem, | |
12258 | NULL_RTX, NULL_RTX, NULL_RTX); | |
12259 | distribute_links (LOG_LINKS (tem)); | |
12260 | ||
12261 | PUT_CODE (tem, NOTE); | |
12262 | NOTE_LINE_NUMBER (tem) = NOTE_INSN_DELETED; | |
12263 | NOTE_SOURCE_FILE (tem) = 0; | |
f5c97640 RH |
12264 | |
12265 | #ifdef HAVE_cc0 | |
12266 | /* Delete the setter too. */ | |
12267 | if (cc0_setter) | |
12268 | { | |
12269 | PATTERN (cc0_setter) = pc_rtx; | |
12270 | ||
12271 | distribute_notes (REG_NOTES (cc0_setter), | |
12272 | cc0_setter, cc0_setter, | |
12273 | NULL_RTX, NULL_RTX, NULL_RTX); | |
12274 | distribute_links (LOG_LINKS (cc0_setter)); | |
12275 | ||
12276 | PUT_CODE (cc0_setter, NOTE); | |
d3a923ee RH |
12277 | NOTE_LINE_NUMBER (cc0_setter) |
12278 | = NOTE_INSN_DELETED; | |
f5c97640 RH |
12279 | NOTE_SOURCE_FILE (cc0_setter) = 0; |
12280 | } | |
12281 | #endif | |
38d8473f | 12282 | } |
e5e809f4 JL |
12283 | /* If the register is both set and used here, put the |
12284 | REG_DEAD note here, but place a REG_UNUSED note | |
12285 | here too unless there already is one. */ | |
12286 | else if (reg_referenced_p (XEXP (note, 0), | |
12287 | PATTERN (tem))) | |
12288 | { | |
12289 | place = tem; | |
12290 | ||
12291 | if (! find_regno_note (tem, REG_UNUSED, | |
12292 | REGNO (XEXP (note, 0)))) | |
12293 | REG_NOTES (tem) | |
c5c76735 | 12294 | = gen_rtx_EXPR_LIST (REG_UNUSED, XEXP (note, 0), |
9e6a5703 | 12295 | REG_NOTES (tem)); |
e5e809f4 | 12296 | } |
38d8473f RK |
12297 | else |
12298 | { | |
12299 | PUT_REG_NOTE_KIND (note, REG_UNUSED); | |
663522cb | 12300 | |
38d8473f RK |
12301 | /* If there isn't already a REG_UNUSED note, put one |
12302 | here. */ | |
12303 | if (! find_regno_note (tem, REG_UNUSED, | |
12304 | REGNO (XEXP (note, 0)))) | |
12305 | place = tem; | |
12306 | break; | |
d3a923ee RH |
12307 | } |
12308 | } | |
12309 | else if (reg_referenced_p (XEXP (note, 0), PATTERN (tem)) | |
12310 | || (GET_CODE (tem) == CALL_INSN | |
12311 | && find_reg_fusage (tem, USE, XEXP (note, 0)))) | |
12312 | { | |
12313 | place = tem; | |
12314 | ||
12315 | /* If we are doing a 3->2 combination, and we have a | |
12316 | register which formerly died in i3 and was not used | |
12317 | by i2, which now no longer dies in i3 and is used in | |
12318 | i2 but does not die in i2, and place is between i2 | |
12319 | and i3, then we may need to move a link from place to | |
12320 | i2. */ | |
12321 | if (i2 && INSN_UID (place) <= max_uid_cuid | |
12322 | && INSN_CUID (place) > INSN_CUID (i2) | |
663522cb KH |
12323 | && from_insn |
12324 | && INSN_CUID (from_insn) > INSN_CUID (i2) | |
d3a923ee RH |
12325 | && reg_referenced_p (XEXP (note, 0), PATTERN (i2))) |
12326 | { | |
12327 | rtx links = LOG_LINKS (place); | |
12328 | LOG_LINKS (place) = 0; | |
12329 | distribute_links (links); | |
12330 | } | |
12331 | break; | |
12332 | } | |
12333 | ||
12334 | if (tem == bb->head) | |
230d793d | 12335 | break; |
38d8473f | 12336 | } |
663522cb | 12337 | |
d3a923ee RH |
12338 | /* We haven't found an insn for the death note and it |
12339 | is still a REG_DEAD note, but we have hit the beginning | |
12340 | of the block. If the existing life info says the reg | |
715e7fbc RH |
12341 | was dead, there's nothing left to do. Otherwise, we'll |
12342 | need to do a global life update after combine. */ | |
770ae6cc RK |
12343 | if (REG_NOTE_KIND (note) == REG_DEAD && place == 0 |
12344 | && REGNO_REG_SET_P (bb->global_live_at_start, | |
12345 | REGNO (XEXP (note, 0)))) | |
e2cce0cf | 12346 | { |
770ae6cc RK |
12347 | SET_BIT (refresh_blocks, this_basic_block); |
12348 | need_refresh = 1; | |
e2cce0cf | 12349 | } |
38d8473f | 12350 | } |
230d793d RS |
12351 | |
12352 | /* If the register is set or already dead at PLACE, we needn't do | |
e5e809f4 JL |
12353 | anything with this note if it is still a REG_DEAD note. |
12354 | We can here if it is set at all, not if is it totally replace, | |
12355 | which is what `dead_or_set_p' checks, so also check for it being | |
12356 | set partially. */ | |
12357 | ||
230d793d RS |
12358 | if (place && REG_NOTE_KIND (note) == REG_DEAD) |
12359 | { | |
770ae6cc | 12360 | unsigned int regno = REGNO (XEXP (note, 0)); |
230d793d RS |
12361 | |
12362 | if (dead_or_set_p (place, XEXP (note, 0)) | |
12363 | || reg_bitfield_target_p (XEXP (note, 0), PATTERN (place))) | |
12364 | { | |
12365 | /* Unless the register previously died in PLACE, clear | |
12366 | reg_last_death. [I no longer understand why this is | |
12367 | being done.] */ | |
12368 | if (reg_last_death[regno] != place) | |
12369 | reg_last_death[regno] = 0; | |
12370 | place = 0; | |
12371 | } | |
12372 | else | |
12373 | reg_last_death[regno] = place; | |
12374 | ||
12375 | /* If this is a death note for a hard reg that is occupying | |
12376 | multiple registers, ensure that we are still using all | |
12377 | parts of the object. If we find a piece of the object | |
12378 | that is unused, we must add a USE for that piece before | |
12379 | PLACE and put the appropriate REG_DEAD note on it. | |
12380 | ||
12381 | An alternative would be to put a REG_UNUSED for the pieces | |
12382 | on the insn that set the register, but that can't be done if | |
12383 | it is not in the same block. It is simpler, though less | |
12384 | efficient, to add the USE insns. */ | |
12385 | ||
12386 | if (place && regno < FIRST_PSEUDO_REGISTER | |
12387 | && HARD_REGNO_NREGS (regno, GET_MODE (XEXP (note, 0))) > 1) | |
12388 | { | |
770ae6cc | 12389 | unsigned int endregno |
230d793d RS |
12390 | = regno + HARD_REGNO_NREGS (regno, |
12391 | GET_MODE (XEXP (note, 0))); | |
12392 | int all_used = 1; | |
770ae6cc | 12393 | unsigned int i; |
230d793d RS |
12394 | |
12395 | for (i = regno; i < endregno; i++) | |
9fd5bb62 JW |
12396 | if (! refers_to_regno_p (i, i + 1, PATTERN (place), 0) |
12397 | && ! find_regno_fusage (place, USE, i)) | |
230d793d | 12398 | { |
38a448ca | 12399 | rtx piece = gen_rtx_REG (reg_raw_mode[i], i); |
28f6d3af RK |
12400 | rtx p; |
12401 | ||
12402 | /* See if we already placed a USE note for this | |
12403 | register in front of PLACE. */ | |
12404 | for (p = place; | |
12405 | GET_CODE (PREV_INSN (p)) == INSN | |
12406 | && GET_CODE (PATTERN (PREV_INSN (p))) == USE; | |
12407 | p = PREV_INSN (p)) | |
12408 | if (rtx_equal_p (piece, | |
12409 | XEXP (PATTERN (PREV_INSN (p)), 0))) | |
12410 | { | |
12411 | p = 0; | |
12412 | break; | |
12413 | } | |
12414 | ||
12415 | if (p) | |
12416 | { | |
12417 | rtx use_insn | |
38a448ca RH |
12418 | = emit_insn_before (gen_rtx_USE (VOIDmode, |
12419 | piece), | |
28f6d3af RK |
12420 | p); |
12421 | REG_NOTES (use_insn) | |
38a448ca RH |
12422 | = gen_rtx_EXPR_LIST (REG_DEAD, piece, |
12423 | REG_NOTES (use_insn)); | |
28f6d3af | 12424 | } |
230d793d | 12425 | |
5089e22e | 12426 | all_used = 0; |
230d793d RS |
12427 | } |
12428 | ||
a394b17b JW |
12429 | /* Check for the case where the register dying partially |
12430 | overlaps the register set by this insn. */ | |
12431 | if (all_used) | |
12432 | for (i = regno; i < endregno; i++) | |
12433 | if (dead_or_set_regno_p (place, i)) | |
663522cb KH |
12434 | { |
12435 | all_used = 0; | |
12436 | break; | |
12437 | } | |
a394b17b | 12438 | |
230d793d RS |
12439 | if (! all_used) |
12440 | { | |
12441 | /* Put only REG_DEAD notes for pieces that are | |
12442 | still used and that are not already dead or set. */ | |
12443 | ||
12444 | for (i = regno; i < endregno; i++) | |
12445 | { | |
38a448ca | 12446 | rtx piece = gen_rtx_REG (reg_raw_mode[i], i); |
230d793d | 12447 | |
17cbf358 JW |
12448 | if ((reg_referenced_p (piece, PATTERN (place)) |
12449 | || (GET_CODE (place) == CALL_INSN | |
12450 | && find_reg_fusage (place, USE, piece))) | |
230d793d RS |
12451 | && ! dead_or_set_p (place, piece) |
12452 | && ! reg_bitfield_target_p (piece, | |
12453 | PATTERN (place))) | |
38a448ca | 12454 | REG_NOTES (place) |
c5c76735 JL |
12455 | = gen_rtx_EXPR_LIST (REG_DEAD, piece, |
12456 | REG_NOTES (place)); | |
230d793d RS |
12457 | } |
12458 | ||
12459 | place = 0; | |
12460 | } | |
12461 | } | |
12462 | } | |
12463 | break; | |
12464 | ||
12465 | default: | |
12466 | /* Any other notes should not be present at this point in the | |
12467 | compilation. */ | |
12468 | abort (); | |
12469 | } | |
12470 | ||
12471 | if (place) | |
12472 | { | |
12473 | XEXP (note, 1) = REG_NOTES (place); | |
12474 | REG_NOTES (place) = note; | |
12475 | } | |
1a26b032 RK |
12476 | else if ((REG_NOTE_KIND (note) == REG_DEAD |
12477 | || REG_NOTE_KIND (note) == REG_UNUSED) | |
12478 | && GET_CODE (XEXP (note, 0)) == REG) | |
b1f21e0a | 12479 | REG_N_DEATHS (REGNO (XEXP (note, 0)))--; |
230d793d RS |
12480 | |
12481 | if (place2) | |
1a26b032 RK |
12482 | { |
12483 | if ((REG_NOTE_KIND (note) == REG_DEAD | |
12484 | || REG_NOTE_KIND (note) == REG_UNUSED) | |
12485 | && GET_CODE (XEXP (note, 0)) == REG) | |
b1f21e0a | 12486 | REG_N_DEATHS (REGNO (XEXP (note, 0)))++; |
1a26b032 | 12487 | |
38a448ca RH |
12488 | REG_NOTES (place2) = gen_rtx_fmt_ee (GET_CODE (note), |
12489 | REG_NOTE_KIND (note), | |
12490 | XEXP (note, 0), | |
12491 | REG_NOTES (place2)); | |
1a26b032 | 12492 | } |
230d793d RS |
12493 | } |
12494 | } | |
12495 | \f | |
12496 | /* Similarly to above, distribute the LOG_LINKS that used to be present on | |
5089e22e RS |
12497 | I3, I2, and I1 to new locations. This is also called in one case to |
12498 | add a link pointing at I3 when I3's destination is changed. */ | |
230d793d RS |
12499 | |
12500 | static void | |
12501 | distribute_links (links) | |
12502 | rtx links; | |
12503 | { | |
12504 | rtx link, next_link; | |
12505 | ||
12506 | for (link = links; link; link = next_link) | |
12507 | { | |
12508 | rtx place = 0; | |
12509 | rtx insn; | |
12510 | rtx set, reg; | |
12511 | ||
12512 | next_link = XEXP (link, 1); | |
12513 | ||
12514 | /* If the insn that this link points to is a NOTE or isn't a single | |
12515 | set, ignore it. In the latter case, it isn't clear what we | |
663522cb | 12516 | can do other than ignore the link, since we can't tell which |
230d793d RS |
12517 | register it was for. Such links wouldn't be used by combine |
12518 | anyway. | |
12519 | ||
12520 | It is not possible for the destination of the target of the link to | |
12521 | have been changed by combine. The only potential of this is if we | |
12522 | replace I3, I2, and I1 by I3 and I2. But in that case the | |
12523 | destination of I2 also remains unchanged. */ | |
12524 | ||
12525 | if (GET_CODE (XEXP (link, 0)) == NOTE | |
12526 | || (set = single_set (XEXP (link, 0))) == 0) | |
12527 | continue; | |
12528 | ||
12529 | reg = SET_DEST (set); | |
12530 | while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT | |
12531 | || GET_CODE (reg) == SIGN_EXTRACT | |
12532 | || GET_CODE (reg) == STRICT_LOW_PART) | |
12533 | reg = XEXP (reg, 0); | |
12534 | ||
12535 | /* A LOG_LINK is defined as being placed on the first insn that uses | |
12536 | a register and points to the insn that sets the register. Start | |
12537 | searching at the next insn after the target of the link and stop | |
12538 | when we reach a set of the register or the end of the basic block. | |
12539 | ||
12540 | Note that this correctly handles the link that used to point from | |
5089e22e | 12541 | I3 to I2. Also note that not much searching is typically done here |
230d793d RS |
12542 | since most links don't point very far away. */ |
12543 | ||
12544 | for (insn = NEXT_INSN (XEXP (link, 0)); | |
0d4d42c3 | 12545 | (insn && (this_basic_block == n_basic_blocks - 1 |
3b413743 | 12546 | || BLOCK_HEAD (this_basic_block + 1) != insn)); |
230d793d RS |
12547 | insn = NEXT_INSN (insn)) |
12548 | if (GET_RTX_CLASS (GET_CODE (insn)) == 'i' | |
12549 | && reg_overlap_mentioned_p (reg, PATTERN (insn))) | |
12550 | { | |
12551 | if (reg_referenced_p (reg, PATTERN (insn))) | |
12552 | place = insn; | |
12553 | break; | |
12554 | } | |
6e2d1486 | 12555 | else if (GET_CODE (insn) == CALL_INSN |
663522cb | 12556 | && find_reg_fusage (insn, USE, reg)) |
6e2d1486 RK |
12557 | { |
12558 | place = insn; | |
12559 | break; | |
12560 | } | |
230d793d RS |
12561 | |
12562 | /* If we found a place to put the link, place it there unless there | |
12563 | is already a link to the same insn as LINK at that point. */ | |
12564 | ||
12565 | if (place) | |
12566 | { | |
12567 | rtx link2; | |
12568 | ||
12569 | for (link2 = LOG_LINKS (place); link2; link2 = XEXP (link2, 1)) | |
12570 | if (XEXP (link2, 0) == XEXP (link, 0)) | |
12571 | break; | |
12572 | ||
12573 | if (link2 == 0) | |
12574 | { | |
12575 | XEXP (link, 1) = LOG_LINKS (place); | |
12576 | LOG_LINKS (place) = link; | |
abe6e52f RK |
12577 | |
12578 | /* Set added_links_insn to the earliest insn we added a | |
12579 | link to. */ | |
663522cb | 12580 | if (added_links_insn == 0 |
abe6e52f RK |
12581 | || INSN_CUID (added_links_insn) > INSN_CUID (place)) |
12582 | added_links_insn = place; | |
230d793d RS |
12583 | } |
12584 | } | |
12585 | } | |
12586 | } | |
12587 | \f | |
1427d6d2 RK |
12588 | /* Compute INSN_CUID for INSN, which is an insn made by combine. */ |
12589 | ||
12590 | static int | |
12591 | insn_cuid (insn) | |
12592 | rtx insn; | |
12593 | { | |
12594 | while (insn != 0 && INSN_UID (insn) > max_uid_cuid | |
12595 | && GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == USE) | |
12596 | insn = NEXT_INSN (insn); | |
12597 | ||
12598 | if (INSN_UID (insn) > max_uid_cuid) | |
12599 | abort (); | |
12600 | ||
12601 | return INSN_CUID (insn); | |
12602 | } | |
12603 | \f | |
230d793d RS |
12604 | void |
12605 | dump_combine_stats (file) | |
12606 | FILE *file; | |
12607 | { | |
ab87f8c8 | 12608 | fnotice |
230d793d RS |
12609 | (file, |
12610 | ";; Combiner statistics: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n\n", | |
12611 | combine_attempts, combine_merges, combine_extras, combine_successes); | |
12612 | } | |
12613 | ||
12614 | void | |
12615 | dump_combine_total_stats (file) | |
12616 | FILE *file; | |
12617 | { | |
ab87f8c8 | 12618 | fnotice |
230d793d RS |
12619 | (file, |
12620 | "\n;; Combiner totals: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n", | |
12621 | total_attempts, total_merges, total_extras, total_successes); | |
12622 | } |